diff --git a/.github/actions/start-aws-runner/action.yml b/.github/actions/start-aws-runner/action.yml index 0a02e96c4f288..ff3a6d426c27c 100644 --- a/.github/actions/start-aws-runner/action.yml +++ b/.github/actions/start-aws-runner/action.yml @@ -35,7 +35,7 @@ runs: using: "composite" steps: - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ inputs.aws-access-key-id }} aws-secret-access-key: ${{ inputs.aws-secret-access-key }} diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index f47c72cd2e639..663244410497b 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -75,7 +75,7 @@ jobs: connectors: - 'airbyte-cdk/**' - 'airbyte-integrations/**' - - 'airbyte-commons-worker/**' + - 'airbyte-connector-test-harnesses/acceptance-test-harness/**' db: - 'airbyte-db/**' diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerConfigs.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerConfigs.java deleted file mode 100644 index aff60d5b72961..0000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerConfigs.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import io.airbyte.config.Configs; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.TolerationPOJO; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import lombok.AllArgsConstructor; - -@AllArgsConstructor -public class WorkerConfigs { - - private final Configs.WorkerEnvironment workerEnvironment; - private final ResourceRequirements resourceRequirements; - private final List workerKubeTolerations; - private final Map workerKubeNodeSelectors; - private final Optional> workerIsolatedKubeNodeSelectors; - private final Map workerKubeAnnotations; - private final List jobImagePullSecrets; - private final String jobImagePullPolicy; - private final String sidecarImagePullPolicy; - private final String jobSocatImage; - private final String jobBusyboxImage; - private final String jobCurlImage; - private final Map envMap; - - /** - * Constructs a job-type-agnostic WorkerConfigs. For WorkerConfigs customized for specific - * job-types, use static `build*JOBTYPE*WorkerConfigs` method if one exists. - */ - public WorkerConfigs(final Configs configs) { - this( - configs.getWorkerEnvironment(), - new ResourceRequirements() - .withCpuRequest(configs.getJobMainContainerCpuRequest()) - .withCpuLimit(configs.getJobMainContainerCpuLimit()) - .withMemoryRequest(configs.getJobMainContainerMemoryRequest()) - .withMemoryLimit(configs.getJobMainContainerMemoryLimit()), - configs.getJobKubeTolerations(), - configs.getJobKubeNodeSelectors(), - configs.getUseCustomKubeNodeSelector() ? Optional.of(configs.getIsolatedJobKubeNodeSelectors()) : Optional.empty(), - configs.getJobKubeAnnotations(), - configs.getJobKubeMainContainerImagePullSecrets(), - configs.getJobKubeMainContainerImagePullPolicy(), - configs.getJobKubeSidecarContainerImagePullPolicy(), - configs.getJobKubeSocatImage(), - configs.getJobKubeBusyboxImage(), - configs.getJobKubeCurlImage(), - configs.getJobDefaultEnvMap()); - } - - /** - * Builds a WorkerConfigs with some configs that are specific to the Spec job type. - */ - public static WorkerConfigs buildSpecWorkerConfigs(final Configs configs) { - final Map nodeSelectors = configs.getSpecJobKubeNodeSelectors() != null - ? configs.getSpecJobKubeNodeSelectors() - : configs.getJobKubeNodeSelectors(); - - final Map annotations = configs.getSpecJobKubeAnnotations() != null - ? configs.getSpecJobKubeAnnotations() - : configs.getJobKubeAnnotations(); - - return new WorkerConfigs( - configs.getWorkerEnvironment(), - new ResourceRequirements() - .withCpuRequest(configs.getJobMainContainerCpuRequest()) - .withCpuLimit(configs.getJobMainContainerCpuLimit()) - .withMemoryRequest(configs.getJobMainContainerMemoryRequest()) - .withMemoryLimit(configs.getJobMainContainerMemoryLimit()), - configs.getJobKubeTolerations(), - nodeSelectors, - configs.getUseCustomKubeNodeSelector() ? Optional.of(configs.getIsolatedJobKubeNodeSelectors()) : Optional.empty(), - annotations, - configs.getJobKubeMainContainerImagePullSecrets(), - configs.getJobKubeMainContainerImagePullPolicy(), - configs.getJobKubeSidecarContainerImagePullPolicy(), - configs.getJobKubeSocatImage(), - configs.getJobKubeBusyboxImage(), - configs.getJobKubeCurlImage(), - configs.getJobDefaultEnvMap()); - } - - /** - * Builds a WorkerConfigs with some configs that are specific to the Check job type. - */ - public static WorkerConfigs buildCheckWorkerConfigs(final Configs configs) { - final Map nodeSelectors = configs.getCheckJobKubeNodeSelectors() != null - ? configs.getCheckJobKubeNodeSelectors() - : configs.getJobKubeNodeSelectors(); - - final Map annotations = configs.getCheckJobKubeAnnotations() != null - ? configs.getCheckJobKubeAnnotations() - : configs.getJobKubeAnnotations(); - - return new WorkerConfigs( - configs.getWorkerEnvironment(), - new ResourceRequirements() - .withCpuRequest(configs.getCheckJobMainContainerCpuRequest()) - .withCpuLimit(configs.getCheckJobMainContainerCpuLimit()) - .withMemoryRequest(configs.getCheckJobMainContainerMemoryRequest()) - .withMemoryLimit(configs.getCheckJobMainContainerMemoryLimit()), - configs.getJobKubeTolerations(), - nodeSelectors, - configs.getUseCustomKubeNodeSelector() ? Optional.of(configs.getIsolatedJobKubeNodeSelectors()) : Optional.empty(), - annotations, - configs.getJobKubeMainContainerImagePullSecrets(), - configs.getJobKubeMainContainerImagePullPolicy(), - configs.getJobKubeSidecarContainerImagePullPolicy(), - configs.getJobKubeSocatImage(), - configs.getJobKubeBusyboxImage(), - configs.getJobKubeCurlImage(), - configs.getJobDefaultEnvMap()); - } - - /** - * Builds a WorkerConfigs with some configs that are specific to the Discover job type. - */ - public static WorkerConfigs buildDiscoverWorkerConfigs(final Configs configs) { - final Map nodeSelectors = configs.getDiscoverJobKubeNodeSelectors() != null - ? configs.getDiscoverJobKubeNodeSelectors() - : configs.getJobKubeNodeSelectors(); - - final Map annotations = configs.getDiscoverJobKubeAnnotations() != null - ? configs.getDiscoverJobKubeAnnotations() - : configs.getJobKubeAnnotations(); - - return new WorkerConfigs( - configs.getWorkerEnvironment(), - new ResourceRequirements() - .withCpuRequest(configs.getJobMainContainerCpuRequest()) - .withCpuLimit(configs.getJobMainContainerCpuLimit()) - .withMemoryRequest(configs.getJobMainContainerMemoryRequest()) - .withMemoryLimit(configs.getJobMainContainerMemoryLimit()), - configs.getJobKubeTolerations(), - nodeSelectors, - configs.getUseCustomKubeNodeSelector() ? Optional.of(configs.getIsolatedJobKubeNodeSelectors()) : Optional.empty(), - annotations, - configs.getJobKubeMainContainerImagePullSecrets(), - configs.getJobKubeMainContainerImagePullPolicy(), - configs.getJobKubeSidecarContainerImagePullPolicy(), - configs.getJobKubeSocatImage(), - configs.getJobKubeBusyboxImage(), - configs.getJobKubeCurlImage(), - configs.getJobDefaultEnvMap()); - } - - public static WorkerConfigs buildReplicationWorkerConfigs(final Configs configs) { - return new WorkerConfigs( - configs.getWorkerEnvironment(), - new ResourceRequirements() - .withCpuRequest(configs.getReplicationOrchestratorCpuRequest()) - .withCpuLimit(configs.getReplicationOrchestratorCpuLimit()) - .withMemoryRequest(configs.getReplicationOrchestratorMemoryRequest()) - .withMemoryLimit(configs.getReplicationOrchestratorMemoryLimit()), - configs.getJobKubeTolerations(), - configs.getJobKubeNodeSelectors(), - configs.getUseCustomKubeNodeSelector() ? Optional.of(configs.getIsolatedJobKubeNodeSelectors()) : Optional.empty(), - configs.getJobKubeAnnotations(), - configs.getJobKubeMainContainerImagePullSecrets(), - configs.getJobKubeMainContainerImagePullPolicy(), - configs.getJobKubeSidecarContainerImagePullPolicy(), - configs.getJobKubeSocatImage(), - configs.getJobKubeBusyboxImage(), - configs.getJobKubeCurlImage(), - configs.getJobDefaultEnvMap()); - } - - public Configs.WorkerEnvironment getWorkerEnvironment() { - return workerEnvironment; - } - - public ResourceRequirements getResourceRequirements() { - return resourceRequirements; - } - - public List getWorkerKubeTolerations() { - return workerKubeTolerations; - } - - public Map getworkerKubeNodeSelectors() { - return workerKubeNodeSelectors; - } - - public Optional> getWorkerIsolatedKubeNodeSelectors() { - return workerIsolatedKubeNodeSelectors; - } - - public Map getWorkerKubeAnnotations() { - return workerKubeAnnotations; - } - - public List getJobImagePullSecrets() { - return jobImagePullSecrets; - } - - public String getJobImagePullPolicy() { - return jobImagePullPolicy; - } - - public String getSidecarImagePullPolicy() { - return sidecarImagePullPolicy; - } - - public String getJobSocatImage() { - return jobSocatImage; - } - - public String getJobBusyboxImage() { - return jobBusyboxImage; - } - - public String getJobCurlImage() { - return jobCurlImage; - } - - public Map getEnvMap() { - return envMap; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/exception/WorkerException.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/exception/WorkerException.java deleted file mode 100644 index 9b701364c1ad4..0000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/exception/WorkerException.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.exception; - -public class WorkerException extends Exception { - - public WorkerException(final String message) { - super(message); - } - - public WorkerException(final String message, final Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerConfigsTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerConfigsTest.java deleted file mode 100644 index 2c6baff9bd15a..0000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerConfigsTest.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.ResourceRequirements; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class WorkerConfigsTest { - - private static final String JOB = "job"; - private static final Map DEFAULT_NODE_SELECTORS = ImmutableMap.of(JOB, "default"); - private static final Map SPEC_NODE_SELECTORS = ImmutableMap.of(JOB, "spec"); - private static final Map CHECK_NODE_SELECTORS = ImmutableMap.of(JOB, "check"); - private static final Map DISCOVER_NODE_SELECTORS = ImmutableMap.of(JOB, "discover"); - private static final String DEFAULT_CPU_REQUEST = "0.1"; - private static final String DEFAULT_CPU_LIMIT = "0.2"; - private static final String DEFAULT_MEMORY_REQUEST = "100Mi"; - private static final String DEFAULT_MEMORY_LIMIT = "200Mi"; - private static final ResourceRequirements DEFAULT_RESOURCE_REQUIREMENTS = new ResourceRequirements() - .withCpuRequest(DEFAULT_CPU_REQUEST) - .withCpuLimit(DEFAULT_CPU_LIMIT) - .withMemoryRequest(DEFAULT_MEMORY_REQUEST) - .withMemoryLimit(DEFAULT_MEMORY_LIMIT); - - private static final String REPLICATION_CPU_REQUEST = "0.3"; - private static final String REPLICATION_CPU_LIMIT = "0.4"; - private static final String REPLICATION_MEMORY_REQUEST = "300Mi"; - private static final String REPLICATION_MEMORY_LIMIT = "400Mi"; - private static final ResourceRequirements REPLICATION_RESOURCE_REQUIREMENTS = new ResourceRequirements() - .withCpuRequest(REPLICATION_CPU_REQUEST) - .withCpuLimit(REPLICATION_CPU_LIMIT) - .withMemoryRequest(REPLICATION_MEMORY_REQUEST) - .withMemoryLimit(REPLICATION_MEMORY_LIMIT); - - private Configs configs; - - @BeforeEach - void setup() { - configs = mock(EnvConfigs.class); - when(configs.getJobKubeNodeSelectors()).thenReturn(DEFAULT_NODE_SELECTORS); - when(configs.getJobMainContainerCpuRequest()).thenReturn(DEFAULT_CPU_REQUEST); - when(configs.getJobMainContainerCpuLimit()).thenReturn(DEFAULT_CPU_LIMIT); - when(configs.getJobMainContainerMemoryRequest()).thenReturn(DEFAULT_MEMORY_REQUEST); - when(configs.getJobMainContainerMemoryLimit()).thenReturn(DEFAULT_MEMORY_LIMIT); - } - - @Test - @DisplayName("default workerConfigs use default node selectors") - void testDefaultNodeSelectors() { - final WorkerConfigs defaultWorkerConfigs = new WorkerConfigs(configs); - - Assertions.assertEquals(DEFAULT_NODE_SELECTORS, defaultWorkerConfigs.getworkerKubeNodeSelectors()); - } - - @Test - @DisplayName("spec, check, and discover workerConfigs use job-specific node selectors if set") - void testCustomNodeSelectors() { - when(configs.getCheckJobKubeNodeSelectors()).thenReturn(CHECK_NODE_SELECTORS); - when(configs.getSpecJobKubeNodeSelectors()).thenReturn(SPEC_NODE_SELECTORS); - when(configs.getDiscoverJobKubeNodeSelectors()).thenReturn(DISCOVER_NODE_SELECTORS); - - final WorkerConfigs specWorkerConfigs = WorkerConfigs.buildSpecWorkerConfigs(configs); - final WorkerConfigs checkWorkerConfigs = WorkerConfigs.buildCheckWorkerConfigs(configs); - final WorkerConfigs discoverWorkerConfigs = WorkerConfigs.buildDiscoverWorkerConfigs(configs); - - Assertions.assertEquals(SPEC_NODE_SELECTORS, specWorkerConfigs.getworkerKubeNodeSelectors()); - Assertions.assertEquals(CHECK_NODE_SELECTORS, checkWorkerConfigs.getworkerKubeNodeSelectors()); - Assertions.assertEquals(DISCOVER_NODE_SELECTORS, discoverWorkerConfigs.getworkerKubeNodeSelectors()); - } - - @Test - @DisplayName("spec, check, and discover workerConfigs use default node selectors when custom selectors are not set") - void testNodeSelectorsFallbackToDefault() { - when(configs.getCheckJobKubeNodeSelectors()).thenReturn(null); - when(configs.getSpecJobKubeNodeSelectors()).thenReturn(null); - when(configs.getDiscoverJobKubeNodeSelectors()).thenReturn(null); - - final WorkerConfigs specWorkerConfigs = WorkerConfigs.buildSpecWorkerConfigs(configs); - final WorkerConfigs checkWorkerConfigs = WorkerConfigs.buildCheckWorkerConfigs(configs); - final WorkerConfigs discoverWorkerConfigs = WorkerConfigs.buildDiscoverWorkerConfigs(configs); - - Assertions.assertEquals(DEFAULT_NODE_SELECTORS, specWorkerConfigs.getworkerKubeNodeSelectors()); - Assertions.assertEquals(DEFAULT_NODE_SELECTORS, checkWorkerConfigs.getworkerKubeNodeSelectors()); - Assertions.assertEquals(DEFAULT_NODE_SELECTORS, discoverWorkerConfigs.getworkerKubeNodeSelectors()); - } - - @Test - @DisplayName("default workerConfigs use default resourceRequirements") - void testDefaultResourceRequirements() { - final WorkerConfigs defaultWorkerConfigs = new WorkerConfigs(configs); - - Assertions.assertEquals(DEFAULT_RESOURCE_REQUIREMENTS, defaultWorkerConfigs.getResourceRequirements()); - } - - @Test - @DisplayName("replication workerConfigs use replication-specific resourceRequirements") - void testCustomResourceRequirements() { - when(configs.getReplicationOrchestratorCpuRequest()).thenReturn(REPLICATION_CPU_REQUEST); - when(configs.getReplicationOrchestratorCpuLimit()).thenReturn(REPLICATION_CPU_LIMIT); - when(configs.getReplicationOrchestratorMemoryRequest()).thenReturn(REPLICATION_MEMORY_REQUEST); - when(configs.getReplicationOrchestratorMemoryLimit()).thenReturn(REPLICATION_MEMORY_LIMIT); - - final WorkerConfigs replicationWorkerConfigs = WorkerConfigs.buildReplicationWorkerConfigs(configs); - - Assertions.assertEquals(REPLICATION_RESOURCE_REQUIREMENTS, replicationWorkerConfigs.getResourceRequirements()); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteSourceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteSourceTest.java deleted file mode 100644 index 9b7b796312983..0000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteSourceTest.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static io.airbyte.commons.logging.LoggingHelper.RESET; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.RETURNS_DEEP_STUBS; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.protocol.DefaultProtocolSerializer; -import io.airbyte.commons.protocol.ProtocolSerializer; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.State; -import io.airbyte.config.WorkerSourceConfig; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.IntegrationLauncher; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.List; -import java.util.Map; -import java.util.stream.Stream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DefaultAirbyteSourceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultAirbyteSourceTest.class); - private static final String NAMESPACE = "unused"; - private static final Path TEST_ROOT = Path.of("/tmp/airbyte_tests"); - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - - private static final JsonNode STATE = Jsons.jsonNode(ImmutableMap.of("checkpoint", "the future.")); - private static final JsonNode CONFIG = Jsons.jsonNode(Map.of( - "apiKey", "123", - "region", "us-east")); - private static final ConfiguredAirbyteCatalog CATALOG = CatalogHelpers.createConfiguredAirbyteCatalog( - "hudi:latest", - NAMESPACE, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - - private static final WorkerSourceConfig SOURCE_CONFIG = new WorkerSourceConfig() - .withState(new State().withState(STATE)) - .withSourceConnectionConfiguration(CONFIG) - .withCatalog(CATALOG); - - private static final List MESSAGES = Lists.newArrayList( - AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"), - AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "yellow")); - - private static Path logJobRoot; - - private static final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); - - static { - try { - logJobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); - LogClientSingleton.getInstance().setJobMdc(WorkerEnvironment.DOCKER, LogConfigs.EMPTY, logJobRoot); - } catch (final IOException e) { - LOGGER.error(e.toString()); - } - } - - private Path jobRoot; - private IntegrationLauncher integrationLauncher; - private Process process; - private AirbyteStreamFactory streamFactory; - private HeartbeatMonitor heartbeatMonitor; - private final ProtocolSerializer protocolSerializer = new DefaultProtocolSerializer(); - - @BeforeEach - void setup() throws IOException, WorkerException { - jobRoot = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), "test"); - - integrationLauncher = mock(IntegrationLauncher.class, RETURNS_DEEP_STUBS); - process = mock(Process.class, RETURNS_DEEP_STUBS); - heartbeatMonitor = mock(HeartbeatMonitor.class); - final InputStream inputStream = mock(InputStream.class); - when(integrationLauncher.read( - jobRoot, - WorkerConstants.SOURCE_CONFIG_JSON_FILENAME, - Jsons.serialize(CONFIG), - WorkerConstants.SOURCE_CATALOG_JSON_FILENAME, - Jsons.serialize(CATALOG), - WorkerConstants.INPUT_STATE_JSON_FILENAME, - Jsons.serialize(STATE))).thenReturn(process); - when(process.isAlive()).thenReturn(true); - when(process.getInputStream()).thenReturn(inputStream); - when(process.getErrorStream()).thenReturn(new ByteArrayInputStream("qwer".getBytes(StandardCharsets.UTF_8))); - - streamFactory = noop -> MESSAGES.stream(); - - LogClientSingleton.getInstance().setJobMdc(WorkerEnvironment.DOCKER, LogConfigs.EMPTY, logJobRoot); - } - - @AfterEach - void tearDown() throws IOException { - // The log file needs to be present and empty - final Path logFile = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - if (Files.exists(logFile)) { - Files.delete(logFile); - } - Files.createFile(logFile); - } - - @SuppressWarnings({"OptionalGetWithoutIsPresent", "BusyWait"}) - @Test - void testSuccessfulLifecycle() throws Exception { - when(process.getErrorStream()).thenReturn(new ByteArrayInputStream("qwer".getBytes(StandardCharsets.UTF_8))); - - when(heartbeatMonitor.isBeating()).thenReturn(true).thenReturn(false); - - final AirbyteSource source = new DefaultAirbyteSource(integrationLauncher, streamFactory, heartbeatMonitor, protocolSerializer, featureFlags); - source.start(SOURCE_CONFIG, jobRoot); - - final List messages = Lists.newArrayList(); - - assertFalse(source.isFinished()); - messages.add(source.attemptRead().get()); - assertFalse(source.isFinished()); - messages.add(source.attemptRead().get()); - assertFalse(source.isFinished()); - - when(process.isAlive()).thenReturn(false); - assertTrue(source.isFinished()); - verify(heartbeatMonitor, times(2)).beat(); - - source.close(); - - assertEquals(MESSAGES, messages); - - Assertions.assertTimeout(Duration.ofSeconds(5), () -> { - while (process.getErrorStream().available() != 0) { - Thread.sleep(50); - } - }); - - verify(process).exitValue(); - } - - @Test - void testTaggedLogs() throws Exception { - - when(process.getErrorStream()).thenReturn(new ByteArrayInputStream(("rewq").getBytes(StandardCharsets.UTF_8))); - - when(heartbeatMonitor.isBeating()).thenReturn(true).thenReturn(false); - - final AirbyteSource source = new DefaultAirbyteSource(integrationLauncher, streamFactory, - heartbeatMonitor, protocolSerializer, featureFlags); - source.start(SOURCE_CONFIG, jobRoot); - - final List messages = Lists.newArrayList(); - - messages.add(source.attemptRead().get()); - messages.add(source.attemptRead().get()); - - when(process.isAlive()).thenReturn(false); - - source.close(); - - final Path logPath = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - final Stream logs = IOs.readFile(logPath).lines(); - - logs - .filter(line -> !line.contains("EnvConfigs(getEnvOrDefault)")) - .forEach(line -> { - org.assertj.core.api.Assertions.assertThat(line) - .startsWith(Color.BLUE_BACKGROUND.getCode() + "source" + RESET); - }); - } - - @Test - void testNonzeroExitCodeThrows() throws Exception { - final AirbyteSource tap = new DefaultAirbyteSource(integrationLauncher, streamFactory, heartbeatMonitor, protocolSerializer, featureFlags); - tap.start(SOURCE_CONFIG, jobRoot); - - when(process.exitValue()).thenReturn(1); - - Assertions.assertThrows(WorkerException.class, tap::close); - } - - @Test - void testIgnoredExitCodes() throws Exception { - final AirbyteSource tap = new DefaultAirbyteSource(integrationLauncher, streamFactory, heartbeatMonitor, protocolSerializer, featureFlags); - tap.start(SOURCE_CONFIG, jobRoot); - when(process.isAlive()).thenReturn(false); - - DefaultAirbyteSource.IGNORED_EXIT_CODES.forEach(exitCode -> { - when(process.exitValue()).thenReturn(exitCode); - Assertions.assertDoesNotThrow(tap::close); - }); - } - - @Test - void testGetExitValue() throws Exception { - final AirbyteSource source = new DefaultAirbyteSource(integrationLauncher, streamFactory, heartbeatMonitor, protocolSerializer, featureFlags); - source.start(SOURCE_CONFIG, jobRoot); - - when(process.isAlive()).thenReturn(false); - when(process.exitValue()).thenReturn(2); - - assertEquals(2, source.getExitValue()); - // call a second time to verify that exit value is cached - assertEquals(2, source.getExitValue()); - verify(process, times(1)).exitValue(); - } - -} diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java b/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java deleted file mode 100644 index 671fc166b8d6e..0000000000000 --- a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging; - -import com.fasterxml.jackson.core.type.TypeReference; -import io.airbyte.commons.constants.AirbyteSecretConstants; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.yaml.Yamls; -import java.nio.charset.Charset; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import org.apache.commons.io.IOUtils; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.core.LogEvent; -import org.apache.logging.log4j.core.appender.rewrite.RewritePolicy; -import org.apache.logging.log4j.core.config.plugins.Plugin; -import org.apache.logging.log4j.core.config.plugins.PluginAttribute; -import org.apache.logging.log4j.core.config.plugins.PluginFactory; -import org.apache.logging.log4j.core.impl.Log4jLogEvent; -import org.apache.logging.log4j.message.SimpleMessage; -import org.apache.logging.log4j.status.StatusLogger; - -/** - * Custom Log4j2 {@link RewritePolicy} used to intercept all log messages and mask any JSON - * properties in the message that match the list of maskable properties. - *

- * The maskable properties file is generated by a Gradle task in the {@code :airbyte-config:specs} - * project. The file is named {@code specs_secrets_mask.yaml} and is located in the - * {@code src/main/resources/seed} directory of the {@link :airbyte-config:init} project. - */ -@Plugin(name = "MaskedDataInterceptor", - category = "Core", - elementType = "rewritePolicy", - printObject = true) -public class MaskedDataInterceptor implements RewritePolicy { - - protected static final Logger logger = StatusLogger.getLogger(); - - /** - * The pattern used to determine if a message contains sensitive data. - */ - private final Optional pattern; - - @PluginFactory - public static MaskedDataInterceptor createPolicy( - @PluginAttribute(value = "specMaskFile", - defaultString = "/seed/specs_secrets_mask.yaml") final String specMaskFile) { - return new MaskedDataInterceptor(specMaskFile); - } - - private MaskedDataInterceptor(final String specMaskFile) { - this.pattern = buildPattern(specMaskFile); - } - - @Override - public LogEvent rewrite(final LogEvent source) { - return Log4jLogEvent.newBuilder() - .setLoggerName(source.getLoggerName()) - .setMarker(source.getMarker()) - .setLoggerFqcn(source.getLoggerFqcn()) - .setLevel(source.getLevel()) - .setMessage(new SimpleMessage(applyMask(source.getMessage().getFormattedMessage()))) - .setThrown(source.getThrown()) - .setContextMap(source.getContextMap()) - .setContextStack(source.getContextStack()) - .setThreadName(source.getThreadName()) - .setSource(source.getSource()) - .setTimeMillis(source.getTimeMillis()) - .build(); - } - - /** - * Applies the mask to the message, if necessary. - * - * @param message The log message. - * @return The possibly masked log message. - */ - private String applyMask(final String message) { - if (pattern.isPresent()) { - return message.replaceAll(pattern.get(), "\"$1\":\"" + AirbyteSecretConstants.SECRETS_MASK + "\""); - } else { - return message; - } - } - - /** - * Loads the maskable properties from the provided file. - * - * @param specMaskFile The spec mask file. - * @return The set of maskable properties. - */ - private Set getMaskableProperties(final String specMaskFile) { - logger.info("Loading mask data from '{}", specMaskFile); - try { - final String maskFileContents = IOUtils.toString(getClass().getResourceAsStream(specMaskFile), Charset.defaultCharset()); - final Map> properties = Jsons.object(Yamls.deserialize(maskFileContents), new TypeReference<>() {}); - return properties.getOrDefault("properties", Set.of()); - } catch (final Exception e) { - logger.error("Unable to load mask data from '{}': {}.", specMaskFile, e.getMessage()); - return Set.of(); - } - } - - /** - * Builds the maskable property matching pattern. - * - * @param specMaskFile The spec mask file. - * @return The regular expression pattern used to find maskable properties. - */ - private Optional buildPattern(final String specMaskFile) { - final Set maskableProperties = getMaskableProperties(specMaskFile); - return !maskableProperties.isEmpty() ? Optional.of(generatePattern(maskableProperties)) : Optional.empty(); - } - - /** - * Generates the property matching pattern string from the provided set of properties. - * - * @param properties The set of properties to match. - * @return The generated regular expression pattern used to match the maskable properties. - */ - private String generatePattern(final Set properties) { - final StringBuilder builder = new StringBuilder(); - builder.append("(?i)"); // case insensitive - builder.append("\"("); - builder.append(properties.stream().collect(Collectors.joining("|"))); - builder.append(")\"\\s*:\\s*(\"(?:[^\"\\\\]|\\\\.)*\"|\\[[^]\\[]*]|\\d+)"); - return builder.toString(); - } - -} diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreLists.java b/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreLists.java deleted file mode 100644 index bfbedac25f8cc..0000000000000 --- a/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreLists.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import com.google.common.base.Preconditions; -import java.util.List; -import java.util.stream.Stream; - -public class MoreLists { - - /** - * Concatenate multiple lists into one list. - * - * @param lists to concatenate - * @param type - * @return a new concatenated list - */ - @SafeVarargs - public static List concat(final List... lists) { - return Stream.of(lists).flatMap(List::stream).toList(); - } - - public static T getOrNull(final List list, final int index) { - Preconditions.checkNotNull(list); - if (list.size() > index) { - return list.get(index); - } else { - return null; - } - } - -} diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreProperties.java b/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreProperties.java deleted file mode 100644 index f63aedf64f287..0000000000000 --- a/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreProperties.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.Properties; - -public class MoreProperties { - - /** - * Read an .env file into a Properties object. - * - * @param envFile - .env file to read - * @return properties object parsed from the contents of the .env - * @throws IOException throws an exception if there are errors while reading the file. - */ - public static Properties envFileToProperties(final File envFile) throws IOException { - final Properties prop = new Properties(); - prop.load(new FileInputStream(envFile)); - return prop; - } - -} diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/version/AirbyteProtocolVersionRange.java b/airbyte-commons/src/main/java/io/airbyte/commons/version/AirbyteProtocolVersionRange.java deleted file mode 100644 index 0855a6cee08bf..0000000000000 --- a/airbyte-commons/src/main/java/io/airbyte/commons/version/AirbyteProtocolVersionRange.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -public record AirbyteProtocolVersionRange(Version min, Version max) { - - public boolean isSupported(final Version v) { - final Integer major = getMajor(v); - return getMajor(min) <= major && major <= getMajor(max); - } - - private Integer getMajor(final Version v) { - return Integer.valueOf(v.getMajorVersion()); - } - -} diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/logging/MaskedDataInterceptorTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/logging/MaskedDataInterceptorTest.java deleted file mode 100644 index 37b1468022438..0000000000000 --- a/airbyte-commons/src/test/java/io/airbyte/commons/logging/MaskedDataInterceptorTest.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.constants.AirbyteSecretConstants; -import io.airbyte.commons.json.Jsons; -import org.apache.logging.log4j.core.LogEvent; -import org.apache.logging.log4j.message.Message; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link MaskedDataInterceptor} Log4j rewrite policy. - */ -class MaskedDataInterceptorTest { - - private static final String FOO = "foo"; - private static final String OTHER = "other"; - private static final String JSON_WITH_STRING_SECRETS = "{\"" + FOO + "\":\"test\",\"" + OTHER + "\":{\"prop\":\"value\",\"bar\":\"1234\"}}"; - private static final String JSON_WITH_STRING_WITH_QUOTE_SECRETS = - "{\"" + FOO + "\":\"\\\"test\\\"\",\"" + OTHER + "\":{\"prop\":\"value\",\"bar\":\"1234\"}}"; - private static final String JSON_WITH_NUMBER_SECRETS = "{\"" + FOO + "\":\"test\",\"" + OTHER + "\":{\"prop\":\"value\",\"bar\":1234}}"; - private static final String JSON_WITHOUT_SECRETS = "{\"prop1\":\"test\",\"" + OTHER + "\":{\"prop2\":\"value\",\"prop3\":1234}}"; - public static final String TEST_SPEC_SECRET_MASK_YAML = "/test_spec_secret_mask.yaml"; - - @Test - void testMaskingMessageWithStringSecret() { - final Message message = mock(Message.class); - final LogEvent logEvent = mock(LogEvent.class); - when(message.getFormattedMessage()).thenReturn(JSON_WITH_STRING_SECRETS); - when(logEvent.getMessage()).thenReturn(message); - - final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy(TEST_SPEC_SECRET_MASK_YAML); - - final LogEvent result = interceptor.rewrite(logEvent); - - final JsonNode json = Jsons.deserialize(result.getMessage().getFormattedMessage()); - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()); - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()); - } - - @Test - void testMaskingMessageWithStringSecretWithQuotes() { - final Message message = mock(Message.class); - final LogEvent logEvent = mock(LogEvent.class); - when(message.getFormattedMessage()).thenReturn(JSON_WITH_STRING_WITH_QUOTE_SECRETS); - when(logEvent.getMessage()).thenReturn(message); - - final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy(TEST_SPEC_SECRET_MASK_YAML); - final LogEvent result = interceptor.rewrite(logEvent); - - final JsonNode json = Jsons.deserialize(result.getMessage().getFormattedMessage()); - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()); - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()); - } - - @Test - void testMaskingMessageWithNumberSecret() { - final Message message = mock(Message.class); - final LogEvent logEvent = mock(LogEvent.class); - when(message.getFormattedMessage()).thenReturn(JSON_WITH_NUMBER_SECRETS); - when(logEvent.getMessage()).thenReturn(message); - - final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy(TEST_SPEC_SECRET_MASK_YAML); - - final LogEvent result = interceptor.rewrite(logEvent); - - final JsonNode json = Jsons.deserialize(result.getMessage().getFormattedMessage()); - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()); - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()); - } - - @Test - void testMaskingMessageWithoutSecret() { - final Message message = mock(Message.class); - final LogEvent logEvent = mock(LogEvent.class); - when(message.getFormattedMessage()).thenReturn(JSON_WITHOUT_SECRETS); - when(logEvent.getMessage()).thenReturn(message); - - final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy(TEST_SPEC_SECRET_MASK_YAML); - - final LogEvent result = interceptor.rewrite(logEvent); - - final JsonNode json = Jsons.deserialize(result.getMessage().getFormattedMessage()); - assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json.get("prop1").asText()); - assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("prop2").asText()); - assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("prop3").asText()); - } - - @Test - void testMaskingMessageThatDoesNotMatchPattern() { - final String actualMessage = "This is some log message that doesn't match the pattern."; - final Message message = mock(Message.class); - final LogEvent logEvent = mock(LogEvent.class); - when(message.getFormattedMessage()).thenReturn(actualMessage); - when(logEvent.getMessage()).thenReturn(message); - - final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy(TEST_SPEC_SECRET_MASK_YAML); - - final LogEvent result = interceptor.rewrite(logEvent); - assertFalse(result.getMessage().getFormattedMessage().contains(AirbyteSecretConstants.SECRETS_MASK)); - assertEquals(actualMessage, result.getMessage().getFormattedMessage()); - } - - @Test - void testMissingMaskingFileDoesNotPreventLogging() { - final Message message = mock(Message.class); - final LogEvent logEvent = mock(LogEvent.class); - when(message.getFormattedMessage()).thenReturn(JSON_WITHOUT_SECRETS); - when(logEvent.getMessage()).thenReturn(message); - - assertDoesNotThrow(() -> { - final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy("/does_not_exist.yaml"); - final LogEvent result = interceptor.rewrite(logEvent); - assertEquals(JSON_WITHOUT_SECRETS, result.getMessage().getFormattedMessage()); - }); - } - -} diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/util/MoreListsTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/util/MoreListsTest.java deleted file mode 100644 index e73df8b7a6fb1..0000000000000 --- a/airbyte-commons/src/test/java/io/airbyte/commons/util/MoreListsTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import java.util.List; -import org.junit.jupiter.api.Test; - -class MoreListsTest { - - @Test - void testConcat() { - final List> lists = List.of(List.of(1, 2, 3), List.of(4, 5, 6), List.of(7, 8, 9)); - final List expected = List.of(1, 2, 3, 4, 5, 6, 7, 8, 9); - final List actual = MoreLists.concat(lists.get(0), lists.get(1), lists.get(2)); - assertEquals(expected, actual); - } - - @Test - void testGetOrNull() { - assertThrows(NullPointerException.class, () -> MoreLists.getOrNull(null, 0)); - assertEquals(1, MoreLists.getOrNull(List.of(1, 2, 3), 0)); - assertEquals(2, MoreLists.getOrNull(List.of(1, 2, 3), 1)); - assertEquals(3, MoreLists.getOrNull(List.of(1, 2, 3), 2)); - assertNull(MoreLists.getOrNull(List.of(1, 2, 3), 3)); - } - -} diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/util/MorePropertiesTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/util/MorePropertiesTest.java deleted file mode 100644 index 46a1c63df61ae..0000000000000 --- a/airbyte-commons/src/test/java/io/airbyte/commons/util/MorePropertiesTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.io.IOs; -import java.io.File; -import java.io.IOException; -import java.util.Properties; -import org.junit.jupiter.api.Test; - -class MorePropertiesTest { - - @Test - void testEnvFileToProperties() throws IOException { - final String envFileContents = "OPTION1=hello\n" - + "OPTION2=2\n" - + "OPTION3=\n"; - final File envFile = File.createTempFile("properties-test", ".env"); - IOs.writeFile(envFile.toPath(), envFileContents); - - final Properties actual = MoreProperties.envFileToProperties(envFile); - final Properties expected = new Properties(); - expected.put("OPTION1", "hello"); - expected.put("OPTION2", "2"); - expected.put("OPTION3", ""); - - assertEquals(expected, actual); - } - -} diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/version/AirbyteProtocolVersionRangeTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/version/AirbyteProtocolVersionRangeTest.java deleted file mode 100644 index fabf6c3c7c07b..0000000000000 --- a/airbyte-commons/src/test/java/io/airbyte/commons/version/AirbyteProtocolVersionRangeTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import org.junit.jupiter.api.Test; - -class AirbyteProtocolVersionRangeTest { - - @Test - void checkRanges() { - final AirbyteProtocolVersionRange range = new AirbyteProtocolVersionRange(new Version("1.2.3"), new Version("4.3.2")); - assertTrue(range.isSupported(new Version("2.0.0"))); - assertTrue(range.isSupported(new Version("1.2.3"))); - assertTrue(range.isSupported(new Version("4.3.2"))); - - // We should only be requiring major to be within range - assertTrue(range.isSupported(new Version("1.0.0"))); - assertTrue(range.isSupported(new Version("4.4.0"))); - - assertFalse(range.isSupported(new Version("0.2.3"))); - assertFalse(range.isSupported(new Version("5.0.0"))); - } - - @Test - void checkRangeWithOnlyOneMajor() { - final AirbyteProtocolVersionRange range = new AirbyteProtocolVersionRange(new Version("2.0.0"), new Version("2.1.2")); - - assertTrue(range.isSupported(new Version("2.0.0"))); - assertTrue(range.isSupported(new Version("2.5.0"))); - - assertFalse(range.isSupported(new Version("1.0.0"))); - assertFalse(range.isSupported(new Version("3.0.0"))); - } - -} diff --git a/airbyte-commons/src/test/resources/test_spec_secret_mask.yaml b/airbyte-commons/src/test/resources/test_spec_secret_mask.yaml deleted file mode 100644 index 5a10d04d80890..0000000000000 --- a/airbyte-commons/src/test/resources/test_spec_secret_mask.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -properties: - - foo - - bar - - baz diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/CatalogDefinitionsConfig.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/CatalogDefinitionsConfig.java index 8ade370a1947c..95d38f84aa2a8 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/CatalogDefinitionsConfig.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/CatalogDefinitionsConfig.java @@ -14,11 +14,8 @@ public class CatalogDefinitionsConfig { private static final String DEFAULT_LOCAL_CONNECTOR_CATALOG_PATH = SEED_SUBDIRECTORY + LOCAL_CONNECTOR_CATALOG_FILE_NAME; - private static final String REMOTE_OSS_CATALOG_URL = - "https://storage.googleapis.com/prod-airbyte-cloud-connector-metadata-service/oss_catalog.json"; - public static String getLocalConnectorCatalogPath() { - Optional customCatalogPath = new EnvConfigs().getLocalCatalogPath(); + final Optional customCatalogPath = new EnvConfigs().getLocalCatalogPath(); if (customCatalogPath.isPresent()) { return customCatalogPath.get(); } @@ -33,10 +30,6 @@ public static String getLocalCatalogWritePath() { return DEFAULT_LOCAL_CONNECTOR_CATALOG_PATH; } - public static String getRemoteOssCatalogUrl() { - return REMOTE_OSS_CATALOG_URL; - } - public static String getIconSubdirectory() { return ICON_SUBDIRECTORY; } diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java index ed37f48761f80..5ac15814abeac 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java @@ -13,16 +13,6 @@ public enum ConfigSchema implements AirbyteConfig { // workspace - STANDARD_WORKSPACE("StandardWorkspace.yaml", - StandardWorkspace.class, - standardWorkspace -> standardWorkspace.getWorkspaceId().toString(), - "workspaceId"), - - WORKSPACE_SERVICE_ACCOUNT("WorkspaceServiceAccount.yaml", - WorkspaceServiceAccount.class, - workspaceServiceAccount -> workspaceServiceAccount.getWorkspaceId().toString(), - "workspaceId"), - WORKSPACE_WEBHOOK_OPERATION_CONFIGS("WebhookOperationConfigs.yaml", WebhookOperationConfigs.class), @@ -46,19 +36,10 @@ public enum ConfigSchema implements AirbyteConfig { destinationConnection -> destinationConnection.getDestinationId().toString(), "destinationId"), - // sync (i.e. connection) - STANDARD_SYNC("StandardSync.yaml", - StandardSync.class, - standardSync -> standardSync.getConnectionId().toString(), - "connectionId"), STANDARD_SYNC_OPERATION("StandardSyncOperation.yaml", StandardSyncOperation.class, standardSyncOperation -> standardSyncOperation.getOperationId().toString(), "operationId"), - STANDARD_SYNC_STATE("StandardSyncState.yaml", - StandardSyncState.class, - standardSyncState -> standardSyncState.getConnectionId().toString(), - "connectionId"), SOURCE_OAUTH_PARAM("SourceOAuthParameter.yaml", SourceOAuthParameter.class, sourceOAuthParameter -> sourceOAuthParameter.getOauthParameterId().toString(), @@ -67,17 +48,8 @@ public enum ConfigSchema implements AirbyteConfig { destinationOAuthParameter -> destinationOAuthParameter.getOauthParameterId().toString(), "oauthParameterId"), - STANDARD_SYNC_SUMMARY("StandardSyncSummary.yaml", StandardSyncSummary.class), - - ACTOR_CATALOG("ActorCatalog.yaml", ActorCatalog.class), - ACTOR_CATALOG_FETCH_EVENT("ActorCatalogFetchEvent.yaml", ActorCatalogFetchEvent.class), - // worker STANDARD_SYNC_INPUT("StandardSyncInput.yaml", StandardSyncInput.class), - NORMALIZATION_INPUT("NormalizationInput.yaml", NormalizationInput.class), - OPERATOR_DBT_INPUT("OperatorDbtInput.yaml", OperatorDbtInput.class), - STANDARD_SYNC_OUTPUT("StandardSyncOutput.yaml", StandardSyncOutput.class), - REPLICATION_OUTPUT("ReplicationOutput.yaml", ReplicationOutput.class), STATE("State.yaml", State.class); static final Path KNOWN_SCHEMAS_ROOT = JsonSchemas.prepareSchemas("types", ConfigSchema.class); @@ -101,10 +73,10 @@ ConfigSchema(final String schemaFilename, final Class className) { this.schemaFilename = schemaFilename; this.className = className; - this.extractId = object -> { + extractId = object -> { throw new RuntimeException(className.getSimpleName() + " doesn't have an id"); }; - this.idFieldName = null; + idFieldName = null; } @Override diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchemaMigrationSupport.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchemaMigrationSupport.java deleted file mode 100644 index 9e00dde3c37ab..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchemaMigrationSupport.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -import com.google.common.collect.ImmutableMap; -import java.util.Arrays; -import java.util.Map; -import java.util.stream.Collectors; - -/** - * When migrating configs, it is possible that some of the old config types have been removed from - * the codebase. So we cannot rely on the latest {@link ConfigSchema} to migrate them. This class - * provides backward compatibility for those legacy config types during migration. - */ -public class ConfigSchemaMigrationSupport { - - // a map from config schema to its id field names - public static final Map CONFIG_SCHEMA_ID_FIELD_NAMES; - - static { - final Map currentConfigSchemaIdNames = Arrays.stream(ConfigSchema.values()) - .filter(configSchema -> configSchema.getIdFieldName() != null) - .collect(Collectors.toMap(Enum::name, ConfigSchema::getIdFieldName)); - CONFIG_SCHEMA_ID_FIELD_NAMES = new ImmutableMap.Builder() - .putAll(currentConfigSchemaIdNames) - // add removed config schema and its id field names below - // https://github.com/airbytehq/airbyte/pull/41 - .put("SOURCE_CONNECTION_CONFIGURATION", "sourceSpecificationId") - .put("DESTINATION_CONNECTION_CONFIGURATION", "destinationSpecificationId") - // https://github.com/airbytehq/airbyte/pull/528 - .put("SOURCE_CONNECTION_SPECIFICATION", "sourceSpecificationId") - .put("DESTINATION_CONNECTION_SPECIFICATION", "destinationSpecificationId") - // https://github.com/airbytehq/airbyte/pull/564 - .put("STANDARD_SOURCE", "sourceId") - .put("STANDARD_DESTINATION", "destinationId") - .put("SOURCE_CONNECTION_IMPLEMENTATION", "sourceImplementationId") - .put("DESTINATION_CONNECTION_IMPLEMENTATION", "destinationImplementationId") - // https://github.com/airbytehq/airbyte/pull/3472 - .put("STANDARD_SYNC_SCHEDULE", "connectionId") - .build(); - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigWithMetadata.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigWithMetadata.java deleted file mode 100644 index da404420a7ddb..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigWithMetadata.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -import java.time.Instant; -import java.util.Objects; - -@SuppressWarnings("PMD.ShortVariable") -public class ConfigWithMetadata { - - private final String configId; - private final String configType; - private final Instant createdAt; - private final Instant updatedAt; - private final T config; - - public ConfigWithMetadata(final String configId, final String configType, final Instant createdAt, final Instant updatedAt, final T config) { - this.configId = configId; - this.configType = configType; - this.createdAt = createdAt; - this.updatedAt = updatedAt; - this.config = config; - } - - public String getConfigId() { - return configId; - } - - public String getConfigType() { - return configType; - } - - public Instant getCreatedAt() { - return createdAt; - } - - public Instant getUpdatedAt() { - return updatedAt; - } - - public T getConfig() { - return config; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final ConfigWithMetadata that = (ConfigWithMetadata) o; - return Objects.equals(configId, that.configId) && Objects.equals(configType, that.configType) && Objects.equals( - createdAt, that.createdAt) && Objects.equals(updatedAt, that.updatedAt) && Objects.equals(config, that.config); - } - - @Override - public int hashCode() { - return Objects.hash(configId, configType, createdAt, updatedAt, config); - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java index 2dac35e85694e..d7ead983d845f 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java @@ -4,17 +4,6 @@ package io.airbyte.config; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.storage.CloudStorageConfigs; -import java.net.URI; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; - /** * This interface defines the general variables for configuring Airbyte. *

@@ -31,763 +20,15 @@ @SuppressWarnings("PMD.BooleanGetMethodName") public interface Configs { - // CORE - // General - - /** - * Distinguishes internal Airbyte deployments. Internal-use only. - */ - String getAirbyteRole(); - - /** - * Defines the Airbyte deployment version. - */ - AirbyteVersion getAirbyteVersion(); - - /** - * Defines the max supported Airbyte Protocol Version - */ - Version getAirbyteProtocolVersionMax(); - - /** - * Defines the min supported Airbyte Protocol Version - */ - Version getAirbyteProtocolVersionMin(); - - String getAirbyteVersionOrWarning(); - /** * Defines the bucket for caching specs. This immensely speeds up spec operations. This is updated * when new versions are published. */ String getSpecCacheBucket(); - /** - * Distinguishes internal Airbyte deployments. Internal-use only. - */ - DeploymentMode getDeploymentMode(); - - /** - * Defines if the deployment is Docker or Kubernetes. Airbyte behaves accordingly. - */ - WorkerEnvironment getWorkerEnvironment(); - - /** - * Defines the configs directory. Applies only to Docker, and is present in Kubernetes for backward - * compatibility. - */ - Path getConfigRoot(); - - /** - * Defines the Airbyte workspace directory. Applies only to Docker, and is present in Kubernetes for - * backward compatibility. - */ - Path getWorkspaceRoot(); - - /** - * Defines the URL to pull the remote connector catalog from. - * - * @return - */ - Optional getRemoteConnectorCatalogUrl(); - - // Docker Only - - /** - * Defines the name of the Airbyte docker volume. - */ - String getWorkspaceDockerMount(); - - /** - * Defines the name of the docker mount that is used for local file handling. On Docker, this allows - * connector pods to interact with a volume for "local file" operations. - */ - String getLocalDockerMount(); - - /** - * Defines the docker network jobs are launched on with the new scheduler. - */ - String getDockerNetwork(); - - Path getLocalRoot(); - - // Secrets - - /** - * Defines the GCP Project to store secrets in. Alpha support. - */ - String getSecretStoreGcpProjectId(); - - /** - * Define the JSON credentials used to read/write Airbyte Configuration to Google Secret Manager. - * These credentials must have Secret Manager Read/Write access. Alpha support. - */ - String getSecretStoreGcpCredentials(); - - /** - * Defines the Secret Persistence type. None by default. Set to GOOGLE_SECRET_MANAGER to use Google - * Secret Manager. Set to TESTING_CONFIG_DB_TABLE to use the database as a test. Set to VAULT to use - * Hashicorp Vault. Alpha support. Undefined behavior will result if this is turned on and then off. - */ - SecretPersistenceType getSecretPersistenceType(); - - /** - * Define the vault address to read/write Airbyte Configuration to Hashicorp Vault. Alpha Support. - */ - String getVaultAddress(); - - /** - * Define the vault path prefix to read/write Airbyte Configuration to Hashicorp Vault. Empty by - * default. Alpha Support. - */ - String getVaultPrefix(); - - /** - * Define the vault token to read/write Airbyte Configuration to Hashicorp Vault. Empty by default. - * Alpha Support. - */ - String getVaultToken(); - - /** - * Defines thw aws_access_key configuration to use AWSSecretManager. - */ - String getAwsAccessKey(); - - /** - * Defines aws_secret_access_key to use for AWSSecretManager. - */ - String getAwsSecretAccessKey(); - - // Database - - /** - * Define the Jobs Database user. - */ - String getDatabaseUser(); - - /** - * Define the Jobs Database password. - */ - String getDatabasePassword(); - - /** - * Define the Jobs Database url in the form of - * jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB}. Do not include username or - * password. - */ - String getDatabaseUrl(); - - /** - * Define the minimum flyway migration version the Jobs Database must be at. If this is not - * satisfied, applications will not successfully connect. Internal-use only. - */ - String getJobsDatabaseMinimumFlywayMigrationVersion(); - - /** - * Define the total time to wait for the Jobs Database to be initialized. This includes migrations. - */ - long getJobsDatabaseInitializationTimeoutMs(); - - /** - * Define the Configs Database user. Defaults to the Jobs Database user if empty. - */ - String getConfigDatabaseUser(); - - /** - * Define the Configs Database password. Defaults to the Jobs Database password if empty. - */ - String getConfigDatabasePassword(); - - /** - * Define the Configs Database url in the form of - * jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB}. Defaults to the Jobs Database - * url if empty. - */ - String getConfigDatabaseUrl(); - - /** - * Define the minimum flyway migration version the Configs Database must be at. If this is not - * satisfied, applications will not successfully connect. Internal-use only. - */ - String getConfigsDatabaseMinimumFlywayMigrationVersion(); - - /** - * Define the total time to wait for the Configs Database to be initialized. This includes - * migrations. - */ - long getConfigsDatabaseInitializationTimeoutMs(); - - /** - * Define if the Bootloader should run migrations on start up. - */ - boolean runDatabaseMigrationOnStartup(); - - // Temporal Cloud - Internal-Use Only - - /** - * Define if Temporal Cloud should be used. Internal-use only. - */ - boolean temporalCloudEnabled(); - - /** - * Temporal Cloud target endpoint, usually with form ${namespace}.tmprl.cloud:7233. Internal-use - * only. - */ - String getTemporalCloudHost(); - - /** - * Temporal Cloud namespace. Internal-use only. - */ - String getTemporalCloudNamespace(); - - /** - * Temporal Cloud client cert for SSL. Internal-use only. - */ - String getTemporalCloudClientCert(); - - /** - * Temporal Cloud client key for SSL. Internal-use only. - */ - String getTemporalCloudClientKey(); - - // Airbyte Services - - /** - * Define the url where Temporal is hosted at. Please include the port. Airbyte services use this - * information. - */ - String getTemporalHost(); - - /** - * Define the number of retention days for the temporal history - */ - int getTemporalRetentionInDays(); - - /** - * Define the url where the Airbyte Server is hosted at. Airbyte services use this information. - * Manipulates the `INTERNAL_API_HOST` variable. - */ - String getAirbyteApiHost(); - - /** - * Define the port where the Airbyte Server is hosted at. Airbyte services use this information. - * Manipulates the `INTERNAL_API_HOST` variable. - */ - int getAirbyteApiPort(); - - /** - * Define the url the Airbyte Webapp is hosted at. Airbyte services use this information. - */ - String getWebappUrl(); - - // Jobs - - /** - * Define the number of attempts a sync will attempt before failing. - */ - int getSyncJobMaxAttempts(); - - /** - * Define the number of days a sync job will execute for before timing out. - */ - int getSyncJobMaxTimeoutDays(); - - /** - * Defines whether job creation uses connector-specific resource requirements when spawning jobs. - * Works on both Docker and Kubernetes. Defaults to false for ease of use in OSS trials of Airbyte - * but recommended for production deployments. - */ - boolean connectorSpecificResourceDefaultsEnabled(); - - /** - * Define the job container's minimum CPU usage. Units follow either Docker or Kubernetes, depending - * on the deployment. Defaults to none. - */ - String getJobMainContainerCpuRequest(); - - /** - * Define the job container's maximum CPU usage. Units follow either Docker or Kubernetes, depending - * on the deployment. Defaults to none. - */ - String getJobMainContainerCpuLimit(); - - /** - * Define the job container's minimum RAM usage. Units follow either Docker or Kubernetes, depending - * on the deployment. Defaults to none. - */ - String getJobMainContainerMemoryRequest(); - - /** - * Define the job container's maximum RAM usage. Units follow either Docker or Kubernetes, depending - * on the deployment. Defaults to none. - */ - String getJobMainContainerMemoryLimit(); - - /** - * Get datadog or OTEL metric client for Airbyte to emit metrics. Allows empty value - */ - String getMetricClient(); - - /** - * If choosing OTEL as the metric client, Airbyte will emit metrics and traces to this provided - * endpoint. - */ - String getOtelCollectorEndpoint(); - - /** - * If using a LaunchDarkly feature flag client, this API key will be used. - * - * @return LaunchDarkly API key as a string. - */ - String getLaunchDarklyKey(); - - /** - * Get the type of feature flag client to use. - * - * @return - */ - String getFeatureFlagClient(); - - /** - * Defines a default map of environment variables to use for any launched job containers. The - * expected format is a JSON encoded String -> String map. Make sure to escape properly. Defaults to - * an empty map. - */ - Map getJobDefaultEnvMap(); - - /** - * Defines the number of consecutive job failures required before a connection is auto-disabled if - * the AUTO_DISABLE_FAILING_CONNECTIONS flag is set to true. - */ - int getMaxFailedJobsInARowBeforeConnectionDisable(); - - /** - * Defines the required number of days with only failed jobs before a connection is auto-disabled if - * the AUTO_DISABLE_FAILING_CONNECTIONS flag is set to true. - */ - int getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(); - - // Jobs - Kube only - - /** - * Define the check job container's minimum CPU request. Defaults to - * {@link #getJobMainContainerCpuRequest()} if not set. Internal-use only. - */ - String getCheckJobMainContainerCpuRequest(); - - /** - * Define the check job container's maximum CPU usage. Defaults to - * {@link #getJobMainContainerCpuLimit()} if not set. Internal-use only. - */ - String getCheckJobMainContainerCpuLimit(); - - /** - * Define the check job container's minimum RAM usage. Defaults to - * {@link #getJobMainContainerMemoryRequest()} if not set. Internal-use only. - */ - String getCheckJobMainContainerMemoryRequest(); - - /** - * Define the check job container's maximum RAM usage. Defaults to - * {@link #getJobMainContainerMemoryLimit()} if not set. Internal-use only. - */ - String getCheckJobMainContainerMemoryLimit(); - - /** - * Define the normalization job container's minimum CPU request. Defaults to - * {@link #getJobMainContainerCpuRequest()} if not set. Internal-use only. - */ - String getNormalizationJobMainContainerCpuRequest(); - - /** - * Define the normalization job container's maximum CPU usage. Defaults to - * {@link #getJobMainContainerCpuLimit()} if not set. Internal-use only. - */ - String getNormalizationJobMainContainerCpuLimit(); - - /** - * Define the normalization job container's minimum RAM usage. Defaults to - * {@link #getJobMainContainerMemoryRequest()} if not set. Internal-use only. - */ - String getNormalizationJobMainContainerMemoryRequest(); - - /** - * Define the normalization job container's maximum RAM usage. Defaults to - * {@link #getJobMainContainerMemoryLimit()} if not set. Internal-use only. - */ - String getNormalizationJobMainContainerMemoryLimit(); - - /** - * Define one or more Job pod tolerations. Tolerations are separated by ';'. Each toleration - * contains k=v pairs mentioning some/all of key, effect, operator and value and separated by `,`. - */ - List getJobKubeTolerations(); - - /** - * Define one or more Job pod node selectors. Each kv-pair is separated by a `,`. Used for the sync - * job and as fallback in case job specific (spec, check, discover) node selectors are not defined. - */ - Map getJobKubeNodeSelectors(); - - /** - * Define an isolated kube node selectors, so we can run risky images in it. - */ - Map getIsolatedJobKubeNodeSelectors(); - - /** - * Define if we want to run custom connector related jobs in a separate node pool. - */ - boolean getUseCustomKubeNodeSelector(); - - /** - * Define node selectors for Spec job pods specifically. Each kv-pair is separated by a `,`. - */ - Map getSpecJobKubeNodeSelectors(); - - /** - * Define node selectors for Check job pods specifically. Each kv-pair is separated by a `,`. - */ - Map getCheckJobKubeNodeSelectors(); - - /** - * Define node selectors for Discover job pods specifically. Each kv-pair is separated by a `,`. - */ - Map getDiscoverJobKubeNodeSelectors(); - - /** - * Define one or more Job pod annotations. Each kv-pair is separated by a `,`. Used for the sync job - * and as fallback in case job specific (spec, check, discover) annotations are not defined. - */ - Map getJobKubeAnnotations(); - - /** - * Define annotations for Spec job pods specifically. Each kv-pair is separated by a `,`. - */ - Map getSpecJobKubeAnnotations(); - - /** - * Define annotations for Check job pods specifically. Each kv-pair is separated by a `,`. - */ - Map getCheckJobKubeAnnotations(); - - /** - * Define annotations for Discover job pods specifically. Each kv-pair is separated by a `,`. - */ - Map getDiscoverJobKubeAnnotations(); - - /** - * Define the Job pod connector image pull policy. - */ - String getJobKubeMainContainerImagePullPolicy(); - - /** - * Define the Job pod connector sidecar image pull policy. - */ - String getJobKubeSidecarContainerImagePullPolicy(); - - /** - * Define the Job pod connector image pull secret. Useful when hosting private images. - */ - List getJobKubeMainContainerImagePullSecrets(); - - /** - * Define the Memory request for the Sidecar - */ - String getSidecarMemoryRequest(); - - /** - * Define the Memory limit for the Sidecar - */ - String getSidecarKubeMemoryLimit(); - - /** - * Define the CPU request for the Sidecar - */ - String getSidecarKubeCpuRequest(); - - /** - * Define the CPU limit for the Sidecar - */ - String getSidecarKubeCpuLimit(); - - /** - * Define the CPU request for the SOCAT Sidecar - */ - String getJobKubeSocatImage(); - - /** - * Define the CPU limit for the SOCAT Sidecar - */ - String getSocatSidecarKubeCpuLimit(); - - /** - * Define the Job pod socat image. - */ - String getSocatSidecarKubeCpuRequest(); - - /** - * Define the Job pod busybox image. - */ - String getJobKubeBusyboxImage(); - - /** - * Define the Job pod curl image pull. - */ - String getJobKubeCurlImage(); - - /** - * Define the Kubernetes namespace Job pods are created in. - */ - String getJobKubeNamespace(); - - // Logging/Monitoring/Tracking - - /** - * Define either S3, Minio or GCS as a logging backend. Kubernetes only. Multiple variables are - * involved here. Please see {@link CloudStorageConfigs} for more info. - */ - LogConfigs getLogConfigs(); - - /** - * Defines the optional Google application credentials used for logging. - */ - String getGoogleApplicationCredentials(); - - /** - * Define either S3, Minio or GCS as a state storage backend. Multiple variables are involved here. - * Please see {@link CloudStorageConfigs} for more info. - */ - CloudStorageConfigs getStateStorageCloudConfigs(); - - /** - * Determine if Datadog tracking events should be published. Mainly for Airbyte internal use. - */ - boolean getPublishMetrics(); - - /** - * Set the Agent to publish Datadog metrics to. Only relevant if metrics should be published. Mainly - * for Airbyte internal use. - */ - String getDDAgentHost(); - - /** - * Set the port to publish Datadog metrics to. Only relevant if metrics should be published. Mainly - * for Airbyte internal use. - */ - String getDDDogStatsDPort(); - - /** - * Set constant tags to be attached to all metrics. Useful for distinguishing between environments. - * Example: airbyte_instance:dev,k8s-cluster:aws-dev - */ - List getDDConstantTags(); - - /** - * Define whether to publish tracking events to Segment or log-only. Airbyte internal use. - */ - TrackingStrategy getTrackingStrategy(); - - /** - * Define whether to send job failure events to Sentry or log-only. Airbyte internal use. - */ - JobErrorReportingStrategy getJobErrorReportingStrategy(); - - /** - * Determines the Sentry DSN that should be used when reporting connector job failures to Sentry. - * Used with SENTRY error reporting strategy. Airbyte internal use. - */ - String getJobErrorReportingSentryDSN(); - - // APPLICATIONS - // Worker - - /** - * Define the header name used to authenticate from an Airbyte Worker to the Airbyte API - */ - String getAirbyteApiAuthHeaderName(); - - /** - * Define the header value used to authenticate from an Airbyte Worker to the Airbyte API - */ - String getAirbyteApiAuthHeaderValue(); - - /** - * Define the maximum number of workers each Airbyte Worker container supports. Multiple variables - * are involved here. Please see {@link MaxWorkersConfig} for more info. - */ - MaxWorkersConfig getMaxWorkers(); - - /** - * Define if the worker should run get spec workflows. Defaults to true. Internal-use only. - */ - boolean shouldRunGetSpecWorkflows(); - - /** - * Define if the worker should run check connection workflows. Defaults to true. Internal-use only. - */ - boolean shouldRunCheckConnectionWorkflows(); - - /** - * Define if the worker should run discover workflows. Defaults to true. Internal-use only. - */ - boolean shouldRunDiscoverWorkflows(); - - /** - * Define if the worker should run sync workflows. Defaults to true. Internal-use only. - */ - boolean shouldRunSyncWorkflows(); - - /** - * Define if the worker should run connection manager workflows. Defaults to true. Internal-use - * only. - */ - boolean shouldRunConnectionManagerWorkflows(); - - /** - * Define if the worker should run notification workflows. Defaults to true. Internal-use only. - */ - public boolean shouldRunNotifyWorkflows(); - - // Worker - Data Plane configs - - /** - * Define a set of Temporal Task Queue names for which the worker should register handlers for to - * process tasks related to syncing data. - For workers within Airbyte's Control Plane, this returns - * the Control Plane's default task queue. - For workers within a Data Plane, this returns only task - * queue names specific to that Data Plane. Internal-use only. - */ - Set getDataSyncTaskQueues(); - - /** - * Return the Control Plane endpoint that workers in a Data Plane will hit for authentication. This - * is separate from the actual endpoint being hit for application logic. Internal-use only. - */ - String getControlPlaneAuthEndpoint(); - - /** - * Return the service account a data plane uses to authenticate with a control plane. Internal-use - * only. - */ - String getDataPlaneServiceAccountCredentialsPath(); - - /** - * Return the service account email a data plane uses to authenticate with a control plane. - * Internal-use only. - */ - String getDataPlaneServiceAccountEmail(); - - // Worker - Kube only - - /** - * Define the local ports the Airbyte Worker pod uses to connect to the various Job pods. - */ - Set getTemporalWorkerPorts(); - - // Container Orchestrator - - /** - * Define if Airbyte should use the container orchestrator. Internal-use only. Should always be set - * to true - otherwise causes syncs to be run on workers instead. - */ - boolean getContainerOrchestratorEnabled(); - - /** - * Get the name of the container orchestrator secret. Internal-use only. - */ - String getContainerOrchestratorSecretName(); - - /** - * Get the mount path for a secret that should be loaded onto container orchestrator pods. - * Internal-use only. - */ - String getContainerOrchestratorSecretMountPath(); - - /** - * Define the image to use for the container orchestrator. Defaults to the Airbyte version. - */ - String getContainerOrchestratorImage(); - - /** - * Define the replication orchestrator's minimum CPU usage. Defaults to none. - */ - String getReplicationOrchestratorCpuRequest(); - - /** - * Define the replication orchestrator's maximum CPU usage. Defaults to none. - */ - String getReplicationOrchestratorCpuLimit(); - - /** - * Define the replication orchestrator's minimum RAM usage. Defaults to none. - */ - String getReplicationOrchestratorMemoryRequest(); - - /** - * Define the replication orchestrator's maximum RAM usage. Defaults to none. - */ - String getReplicationOrchestratorMemoryLimit(); - - /** - * Get the longest duration of non long running activity - */ - int getMaxActivityTimeoutSecond(); - - /** - * Get initial delay in seconds between two activity attempts - */ - int getInitialDelayBetweenActivityAttemptsSeconds(); - - /** - * Get maximum delay in seconds between two activity attempts - */ - int getMaxDelayBetweenActivityAttemptsSeconds(); - - /** - * Get the delay in seconds between an activity failing and the workflow being restarted - */ - int getWorkflowFailureRestartDelaySeconds(); - - /** - * Get number of attempts of the non long running activities - */ - int getActivityNumberOfAttempt(); - - boolean getAutoDetectSchema(); - - boolean getApplyFieldSelection(); - - String getFieldSelectionWorkspaces(); - - String getStrictComparisonNormalizationWorkspaces(); - - String getStrictComparisonNormalizationTag(); - - enum TrackingStrategy { - SEGMENT, - LOGGING - } - - enum JobErrorReportingStrategy { - SENTRY, - LOGGING - } - - enum WorkerEnvironment { - DOCKER, - KUBERNETES - } - enum DeploymentMode { OSS, CLOUD } - enum SecretPersistenceType { - NONE, - TESTING_CONFIG_DB_TABLE, - GOOGLE_SECRET_MANAGER, - VAULT, - AWS_SECRET_MANAGER - } - } diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java index c05d9f61681b5..9be7c9b5fc7b3 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java @@ -4,33 +4,9 @@ package io.airbyte.config; -import com.google.common.base.Preconditions; -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.storage.CloudStorageConfigs; -import io.airbyte.config.storage.CloudStorageConfigs.GcsConfig; -import io.airbyte.config.storage.CloudStorageConfigs.MinioConfig; -import io.airbyte.config.storage.CloudStorageConfigs.S3Config; -import java.net.URI; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; import java.util.Map; -import java.util.Map.Entry; -import java.util.Objects; import java.util.Optional; -import java.util.Set; import java.util.function.Function; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,212 +16,13 @@ public class EnvConfigs implements Configs { private static final Logger LOGGER = LoggerFactory.getLogger(EnvConfigs.class); // env variable names - public static final String AIRBYTE_ROLE = "AIRBYTE_ROLE"; - public static final String AIRBYTE_VERSION = "AIRBYTE_VERSION"; - public static final String INTERNAL_API_HOST = "INTERNAL_API_HOST"; - public static final String AIRBYTE_API_AUTH_HEADER_NAME = "AIRBYTE_API_AUTH_HEADER_NAME"; - public static final String AIRBYTE_API_AUTH_HEADER_VALUE = "AIRBYTE_API_AUTH_HEADER_VALUE"; - public static final String WORKER_ENVIRONMENT = "WORKER_ENVIRONMENT"; public static final String SPEC_CACHE_BUCKET = "SPEC_CACHE_BUCKET"; public static final String LOCAL_CONNECTOR_CATALOG_PATH = "LOCAL_CONNECTOR_CATALOG_PATH"; - public static final String GITHUB_STORE_BRANCH = "GITHUB_STORE_BRANCH"; - public static final String WORKSPACE_ROOT = "WORKSPACE_ROOT"; - public static final String WORKSPACE_DOCKER_MOUNT = "WORKSPACE_DOCKER_MOUNT"; - public static final String LOCAL_ROOT = "LOCAL_ROOT"; - public static final String LOCAL_DOCKER_MOUNT = "LOCAL_DOCKER_MOUNT"; - public static final String CONFIG_ROOT = "CONFIG_ROOT"; - public static final String DOCKER_NETWORK = "DOCKER_NETWORK"; - public static final String TRACKING_STRATEGY = "TRACKING_STRATEGY"; - public static final String JOB_ERROR_REPORTING_STRATEGY = "JOB_ERROR_REPORTING_STRATEGY"; - public static final String JOB_ERROR_REPORTING_SENTRY_DSN = "JOB_ERROR_REPORTING_SENTRY_DSN"; - public static final String DEPLOYMENT_MODE = "DEPLOYMENT_MODE"; - public static final String DATABASE_USER = "DATABASE_USER"; - public static final String DATABASE_PASSWORD = "DATABASE_PASSWORD"; - public static final String DATABASE_URL = "DATABASE_URL"; - public static final String CONFIG_DATABASE_USER = "CONFIG_DATABASE_USER"; - public static final String CONFIG_DATABASE_PASSWORD = "CONFIG_DATABASE_PASSWORD"; - public static final String CONFIG_DATABASE_URL = "CONFIG_DATABASE_URL"; - public static final String RUN_DATABASE_MIGRATION_ON_STARTUP = "RUN_DATABASE_MIGRATION_ON_STARTUP"; - public static final String WEBAPP_URL = "WEBAPP_URL"; - public static final String JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY = "JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY"; - public static final String JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY = "JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY"; - public static final String JOB_KUBE_TOLERATIONS = "JOB_KUBE_TOLERATIONS"; - public static final String JOB_KUBE_NODE_SELECTORS = "JOB_KUBE_NODE_SELECTORS"; - public static final String JOB_ISOLATED_KUBE_NODE_SELECTORS = "JOB_ISOLATED_KUBE_NODE_SELECTORS"; - public static final String USE_CUSTOM_NODE_SELECTOR = "USE_CUSTOM_NODE_SELECTOR"; - public static final String JOB_KUBE_ANNOTATIONS = "JOB_KUBE_ANNOTATIONS"; - private static final String DEFAULT_SIDECAR_MEMORY_REQUEST = "25Mi"; - private static final String SIDECAR_MEMORY_REQUEST = "SIDECAR_MEMORY_REQUEST"; - private static final String DEFAULT_SIDECAR_KUBE_MEMORY_LIMIT = "50Mi"; - private static final String SIDECAR_KUBE_MEMORY_LIMIT = "SIDECAR_KUBE_MEMORY_LIMIT"; - private static final String DEFAULT_SIDECAR_KUBE_CPU_REQUEST = "0.1"; - private static final String SIDECAR_KUBE_CPU_REQUEST = "SIDECAR_KUBE_CPU_REQUEST"; - // Test show at least 1.5 CPU is required to hit >20 Mb/s. Overprovision to ensure sidecar resources - // do not cause bottlenecks. - // This is fine as the limit only affects whether the container is throttled by Kube. It does not - // affect scheduling. - private static final String DEFAULT_SIDECAR_KUBE_CPU_LIMIT = "2.0"; - private static final String SIDECAR_KUBE_CPU_LIMIT = "SIDECAR_KUBE_CPU_LIMIT"; - public static final String JOB_KUBE_SOCAT_IMAGE = "JOB_KUBE_SOCAT_IMAGE"; - public static final String SOCAT_KUBE_CPU_LIMIT = "SOCAT_KUBE_CPU_LIMIT"; - public static final String SOCAT_KUBE_CPU_REQUEST = "SOCAT_KUBE_CPU_REQUEST"; - public static final String JOB_KUBE_BUSYBOX_IMAGE = "JOB_KUBE_BUSYBOX_IMAGE"; - public static final String JOB_KUBE_CURL_IMAGE = "JOB_KUBE_CURL_IMAGE"; - public static final String SYNC_JOB_MAX_ATTEMPTS = "SYNC_JOB_MAX_ATTEMPTS"; - public static final String SYNC_JOB_MAX_TIMEOUT_DAYS = "SYNC_JOB_MAX_TIMEOUT_DAYS"; - private static final String CONNECTOR_SPECIFIC_RESOURCE_DEFAULTS_ENABLED = "CONNECTOR_SPECIFIC_RESOURCE_DEFAULTS_ENABLED"; - public static final String MAX_SPEC_WORKERS = "MAX_SPEC_WORKERS"; - public static final String MAX_CHECK_WORKERS = "MAX_CHECK_WORKERS"; - public static final String MAX_DISCOVER_WORKERS = "MAX_DISCOVER_WORKERS"; - public static final String MAX_SYNC_WORKERS = "MAX_SYNC_WORKERS"; - public static final String MAX_NOTIFY_WORKERS = "MAX_NOTIFY_WORKERS"; - private static final String TEMPORAL_HOST = "TEMPORAL_HOST"; - private static final String TEMPORAL_WORKER_PORTS = "TEMPORAL_WORKER_PORTS"; - private static final String TEMPORAL_HISTORY_RETENTION_IN_DAYS = "TEMPORAL_HISTORY_RETENTION_IN_DAYS"; - public static final String JOB_KUBE_NAMESPACE = "JOB_KUBE_NAMESPACE"; - public static final String JOB_MAIN_CONTAINER_CPU_REQUEST = "JOB_MAIN_CONTAINER_CPU_REQUEST"; - public static final String JOB_MAIN_CONTAINER_CPU_LIMIT = "JOB_MAIN_CONTAINER_CPU_LIMIT"; - public static final String JOB_MAIN_CONTAINER_MEMORY_REQUEST = "JOB_MAIN_CONTAINER_MEMORY_REQUEST"; - public static final String JOB_MAIN_CONTAINER_MEMORY_LIMIT = "JOB_MAIN_CONTAINER_MEMORY_LIMIT"; - public static final String JOB_DEFAULT_ENV_MAP = "JOB_DEFAULT_ENV_MAP"; - public static final String JOB_DEFAULT_ENV_PREFIX = "JOB_DEFAULT_ENV_"; - private static final String SECRET_PERSISTENCE = "SECRET_PERSISTENCE"; - public static final String JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET = "JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET"; - public static final String PUBLISH_METRICS = "PUBLISH_METRICS"; - public static final String DD_AGENT_HOST = "DD_AGENT_HOST"; - public static final String DD_DOGSTATSD_PORT = "DD_DOGSTATSD_PORT"; - private static final String CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION = "CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION"; - private static final String CONFIGS_DATABASE_INITIALIZATION_TIMEOUT_MS = "CONFIGS_DATABASE_INITIALIZATION_TIMEOUT_MS"; - private static final String JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION = "JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION"; - private static final String JOBS_DATABASE_INITIALIZATION_TIMEOUT_MS = "JOBS_DATABASE_INITIALIZATION_TIMEOUT_MS"; - private static final String CONTAINER_ORCHESTRATOR_ENABLED = "CONTAINER_ORCHESTRATOR_ENABLED"; - private static final String CONTAINER_ORCHESTRATOR_SECRET_NAME = "CONTAINER_ORCHESTRATOR_SECRET_NAME"; - private static final String CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH = "CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH"; - private static final String CONTAINER_ORCHESTRATOR_IMAGE = "CONTAINER_ORCHESTRATOR_IMAGE"; - public static final String DD_CONSTANT_TAGS = "DD_CONSTANT_TAGS"; - public static final String STATE_STORAGE_S3_BUCKET_NAME = "STATE_STORAGE_S3_BUCKET_NAME"; - public static final String STATE_STORAGE_S3_REGION = "STATE_STORAGE_S3_REGION"; - public static final String STATE_STORAGE_S3_ACCESS_KEY = "STATE_STORAGE_S3_ACCESS_KEY"; - public static final String STATE_STORAGE_S3_SECRET_ACCESS_KEY = "STATE_STORAGE_S3_SECRET_ACCESS_KEY"; - public static final String STATE_STORAGE_MINIO_BUCKET_NAME = "STATE_STORAGE_MINIO_BUCKET_NAME"; - public static final String STATE_STORAGE_MINIO_ENDPOINT = "STATE_STORAGE_MINIO_ENDPOINT"; - public static final String STATE_STORAGE_MINIO_ACCESS_KEY = "STATE_STORAGE_MINIO_ACCESS_KEY"; - public static final String STATE_STORAGE_MINIO_SECRET_ACCESS_KEY = "STATE_STORAGE_MINIO_SECRET_ACCESS_KEY"; - public static final String STATE_STORAGE_GCS_BUCKET_NAME = "STATE_STORAGE_GCS_BUCKET_NAME"; - public static final String STATE_STORAGE_GCS_APPLICATION_CREDENTIALS = "STATE_STORAGE_GCS_APPLICATION_CREDENTIALS"; - - private static final String TEMPORAL_CLOUD_ENABLED = "TEMPORAL_CLOUD_ENABLED"; - private static final String TEMPORAL_CLOUD_HOST = "TEMPORAL_CLOUD_HOST"; - private static final String TEMPORAL_CLOUD_NAMESPACE = "TEMPORAL_CLOUD_NAMESPACE"; - private static final String TEMPORAL_CLOUD_CLIENT_CERT = "TEMPORAL_CLOUD_CLIENT_CERT"; - private static final String TEMPORAL_CLOUD_CLIENT_KEY = "TEMPORAL_CLOUD_CLIENT_KEY"; - - public static final String ACTIVITY_MAX_TIMEOUT_SECOND = "ACTIVITY_MAX_TIMEOUT_SECOND"; - public static final String ACTIVITY_MAX_ATTEMPT = "ACTIVITY_MAX_ATTEMPT"; - public static final String ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS = "ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS"; - public static final String ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS = "ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS"; - public static final String WORKFLOW_FAILURE_RESTART_DELAY_SECONDS = "WORKFLOW_FAILURE_RESTART_DELAY_SECONDS"; - - private static final String SHOULD_RUN_GET_SPEC_WORKFLOWS = "SHOULD_RUN_GET_SPEC_WORKFLOWS"; - private static final String SHOULD_RUN_CHECK_CONNECTION_WORKFLOWS = "SHOULD_RUN_CHECK_CONNECTION_WORKFLOWS"; - private static final String SHOULD_RUN_DISCOVER_WORKFLOWS = "SHOULD_RUN_DISCOVER_WORKFLOWS"; - private static final String SHOULD_RUN_SYNC_WORKFLOWS = "SHOULD_RUN_SYNC_WORKFLOWS"; - private static final String SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS = "SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS"; - private static final String SHOULD_RUN_NOTIFY_WORKFLOWS = "SHOULD_RUN_NOTIFY_WORKFLOWS"; - - // Worker - Control plane configs - private static final String DEFAULT_DATA_SYNC_TASK_QUEUES = "SYNC"; // should match TemporalJobType.SYNC.name() - - // Worker - Data Plane configs - private static final String DATA_SYNC_TASK_QUEUES = "DATA_SYNC_TASK_QUEUES"; - private static final String CONTROL_PLANE_AUTH_ENDPOINT = "CONTROL_PLANE_AUTH_ENDPOINT"; - private static final String DATA_PLANE_SERVICE_ACCOUNT_CREDENTIALS_PATH = "DATA_PLANE_SERVICE_ACCOUNT_CREDENTIALS_PATH"; - private static final String DATA_PLANE_SERVICE_ACCOUNT_EMAIL = "DATA_PLANE_SERVICE_ACCOUNT_EMAIL"; - - private static final String MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = "MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE"; - private static final String MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = "MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE"; - - public static final String METRIC_CLIENT = "METRIC_CLIENT"; - private static final String OTEL_COLLECTOR_ENDPOINT = "OTEL_COLLECTOR_ENDPOINT"; - - public static final String REMOTE_CONNECTOR_CATALOG_URL = "REMOTE_CONNECTOR_CATALOG_URL"; - - // job-type-specific overrides - public static final String SPEC_JOB_KUBE_NODE_SELECTORS = "SPEC_JOB_KUBE_NODE_SELECTORS"; - public static final String CHECK_JOB_KUBE_NODE_SELECTORS = "CHECK_JOB_KUBE_NODE_SELECTORS"; - public static final String DISCOVER_JOB_KUBE_NODE_SELECTORS = "DISCOVER_JOB_KUBE_NODE_SELECTORS"; - public static final String SPEC_JOB_KUBE_ANNOTATIONS = "SPEC_JOB_KUBE_ANNOTATIONS"; - public static final String CHECK_JOB_KUBE_ANNOTATIONS = "CHECK_JOB_KUBE_ANNOTATIONS"; - public static final String DISCOVER_JOB_KUBE_ANNOTATIONS = "DISCOVER_JOB_KUBE_ANNOTATIONS"; - - private static final String REPLICATION_ORCHESTRATOR_CPU_REQUEST = "REPLICATION_ORCHESTRATOR_CPU_REQUEST"; - private static final String REPLICATION_ORCHESTRATOR_CPU_LIMIT = "REPLICATION_ORCHESTRATOR_CPU_LIMIT"; - private static final String REPLICATION_ORCHESTRATOR_MEMORY_REQUEST = "REPLICATION_ORCHESTRATOR_MEMORY_REQUEST"; - private static final String REPLICATION_ORCHESTRATOR_MEMORY_LIMIT = "REPLICATION_ORCHESTRATOR_MEMORY_LIMIT"; - - static final String CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST = "CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST"; - static final String CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT = "CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT"; - static final String CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST = "CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST"; - static final String CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT = "CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT"; - - static final String NORMALIZATION_JOB_MAIN_CONTAINER_CPU_REQUEST = "NORMALIZATION_JOB_MAIN_CONTAINER_CPU_REQUEST"; - static final String NORMALIZATION_JOB_MAIN_CONTAINER_CPU_LIMIT = "NORMALIZATION_JOB_MAIN_CONTAINER_CPU_LIMIT"; - static final String NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_REQUEST = "NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_REQUEST"; - static final String NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_LIMIT = "NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_LIMIT"; - - private static final String VAULT_ADDRESS = "VAULT_ADDRESS"; - private static final String VAULT_PREFIX = "VAULT_PREFIX"; - private static final String VAULT_AUTH_TOKEN = "VAULT_AUTH_TOKEN"; // defaults private static final String DEFAULT_SPEC_CACHE_BUCKET = "io-airbyte-cloud-spec-cache"; - private static final String DEFAULT_GITHUB_STORE_BRANCH = "master"; - private static final String DEFAULT_JOB_KUBE_NAMESPACE = "default"; - private static final String DEFAULT_JOB_CPU_REQUIREMENT = null; - private static final String DEFAULT_JOB_MEMORY_REQUIREMENT = null; - private static final String DEFAULT_JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"; - private static final String DEFAULT_JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"; - private static final String SECRET_STORE_GCP_PROJECT_ID = "SECRET_STORE_GCP_PROJECT_ID"; - private static final String SECRET_STORE_GCP_CREDENTIALS = "SECRET_STORE_GCP_CREDENTIALS"; - private static final String AWS_ACCESS_KEY = "AWS_ACCESS_KEY"; - private static final String AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY"; - private static final String DEFAULT_JOB_KUBE_SOCAT_IMAGE = "alpine/socat:1.7.4.4-r0"; - private static final String DEFAULT_JOB_KUBE_BUSYBOX_IMAGE = "busybox:1.35"; - private static final String DEFAULT_JOB_KUBE_CURL_IMAGE = "curlimages/curl:7.87.0"; - private static final int DEFAULT_DATABASE_INITIALIZATION_TIMEOUT_MS = 60 * 1000; - private static final long DEFAULT_MAX_SPEC_WORKERS = 5; - private static final long DEFAULT_MAX_CHECK_WORKERS = 5; - private static final long DEFAULT_MAX_DISCOVER_WORKERS = 5; - private static final long DEFAULT_MAX_SYNC_WORKERS = 5; - private static final long DEFAULT_MAX_NOTIFY_WORKERS = 5; - private static final String DEFAULT_NETWORK = "host"; - private static final Version DEFAULT_AIRBYTE_PROTOCOL_VERSION_MAX = new Version("0.3.0"); - private static final Version DEFAULT_AIRBYTE_PROTOCOL_VERSION_MIN = new Version("0.0.0"); - private static final String AUTO_DETECT_SCHEMA = "AUTO_DETECT_SCHEMA"; - private static final String APPLY_FIELD_SELECTION = "APPLY_FIELD_SELECTION"; - private static final String FIELD_SELECTION_WORKSPACES = "FIELD_SELECTION_WORKSPACES"; - - private static final String STRICT_COMPARISON_NORMALIZATION_WORKSPACES = "STRICT_COMPARISON_NORMALIZATION_WORKSPACES"; - private static final String STRICT_COMPARISON_NORMALIZATION_TAG = "STRICT_COMPARISON_NORMALIZATION_TAG"; - - public static final Map> JOB_SHARED_ENVS = Map.of( - AIRBYTE_VERSION, (instance) -> instance.getAirbyteVersion().serialize(), - AIRBYTE_ROLE, EnvConfigs::getAirbyteRole, - DEPLOYMENT_MODE, (instance) -> instance.getDeploymentMode().name(), - WORKER_ENVIRONMENT, (instance) -> instance.getWorkerEnvironment().name()); - - public static final int DEFAULT_TEMPORAL_HISTORY_RETENTION_IN_DAYS = 30; - - public static final int DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = 100; - public static final int DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = 14; - public static final String LAUNCHDARKLY_KEY = "LAUNCHDARKLY_KEY"; - - public static final String FEATURE_FLAG_CLIENT = "FEATURE_FLAG_CLIENT"; private final Function getEnv; - private final Supplier> getAllEnvKeys; - private final LogConfigs logConfigs; - private final CloudStorageConfigs stateStorageCloudConfigs; /** * Constructs {@link EnvConfigs} from actual environment variables. @@ -259,85 +36,7 @@ public EnvConfigs() { * variables from a non-envvar source. */ public EnvConfigs(final Map envMap) { - this.getEnv = envMap::get; - this.getAllEnvKeys = envMap::keySet; - this.logConfigs = new LogConfigs(getLogConfiguration()); - this.stateStorageCloudConfigs = getStateStorageConfiguration().orElse(null); - } - - private Optional getLogConfiguration() { - if (getEnv(LogClientSingleton.GCS_LOG_BUCKET) != null && !getEnv(LogClientSingleton.GCS_LOG_BUCKET).isBlank()) { - return Optional.of(CloudStorageConfigs.gcs(new GcsConfig( - getEnvOrDefault(LogClientSingleton.GCS_LOG_BUCKET, ""), - getEnvOrDefault(LogClientSingleton.GOOGLE_APPLICATION_CREDENTIALS, "")))); - } else if (getEnv(LogClientSingleton.S3_MINIO_ENDPOINT) != null && !getEnv(LogClientSingleton.S3_MINIO_ENDPOINT).isBlank()) { - return Optional.of(CloudStorageConfigs.minio(new MinioConfig( - getEnvOrDefault(LogClientSingleton.S3_LOG_BUCKET, ""), - getEnvOrDefault(LogClientSingleton.AWS_ACCESS_KEY_ID, ""), - getEnvOrDefault(LogClientSingleton.AWS_SECRET_ACCESS_KEY, ""), - getEnvOrDefault(LogClientSingleton.S3_MINIO_ENDPOINT, "")))); - } else if (getEnv(LogClientSingleton.S3_LOG_BUCKET_REGION) != null && !getEnv(LogClientSingleton.S3_LOG_BUCKET_REGION).isBlank()) { - return Optional.of(CloudStorageConfigs.s3(new S3Config( - getEnvOrDefault(LogClientSingleton.S3_LOG_BUCKET, ""), - getEnvOrDefault(LogClientSingleton.AWS_ACCESS_KEY_ID, ""), - getEnvOrDefault(LogClientSingleton.AWS_SECRET_ACCESS_KEY, ""), - getEnvOrDefault(LogClientSingleton.S3_LOG_BUCKET_REGION, "")))); - } else { - return Optional.empty(); - } - } - - private Optional getStateStorageConfiguration() { - if (getEnv(STATE_STORAGE_GCS_BUCKET_NAME) != null && !getEnv(STATE_STORAGE_GCS_BUCKET_NAME).isBlank()) { - return Optional.of(CloudStorageConfigs.gcs(new GcsConfig( - getEnvOrDefault(STATE_STORAGE_GCS_BUCKET_NAME, ""), - getEnvOrDefault(STATE_STORAGE_GCS_APPLICATION_CREDENTIALS, "")))); - } else if (getEnv(STATE_STORAGE_MINIO_ENDPOINT) != null && !getEnv(STATE_STORAGE_MINIO_ENDPOINT).isBlank()) { - return Optional.of(CloudStorageConfigs.minio(new MinioConfig( - getEnvOrDefault(STATE_STORAGE_MINIO_BUCKET_NAME, ""), - getEnvOrDefault(STATE_STORAGE_MINIO_ACCESS_KEY, ""), - getEnvOrDefault(STATE_STORAGE_MINIO_SECRET_ACCESS_KEY, ""), - getEnvOrDefault(STATE_STORAGE_MINIO_ENDPOINT, "")))); - } else if (getEnv(STATE_STORAGE_S3_REGION) != null && !getEnv(STATE_STORAGE_S3_REGION).isBlank()) { - return Optional.of(CloudStorageConfigs.s3(new S3Config( - getEnvOrDefault(STATE_STORAGE_S3_BUCKET_NAME, ""), - getEnvOrDefault(STATE_STORAGE_S3_ACCESS_KEY, ""), - getEnvOrDefault(STATE_STORAGE_S3_SECRET_ACCESS_KEY, ""), - getEnvOrDefault(STATE_STORAGE_S3_REGION, "")))); - } else { - return Optional.empty(); - } - } - - // CORE - // General - @Override - public String getAirbyteRole() { - return getEnv(AIRBYTE_ROLE); - } - - @Override - public AirbyteVersion getAirbyteVersion() { - return new AirbyteVersion(getEnsureEnv(AIRBYTE_VERSION)); - } - - @Override - public Version getAirbyteProtocolVersionMax() { - return DEFAULT_AIRBYTE_PROTOCOL_VERSION_MAX; - } - - @Override - public Version getAirbyteProtocolVersionMin() { - return DEFAULT_AIRBYTE_PROTOCOL_VERSION_MIN; - } - - @Override - public String getAirbyteVersionOrWarning() { - return Optional.ofNullable(getEnv(AIRBYTE_VERSION)).orElse("version not set"); - } - - public String getGithubStoreBranch() { - return getEnvOrDefault(GITHUB_STORE_BRANCH, DEFAULT_GITHUB_STORE_BRANCH); + getEnv = envMap::get; } @Override @@ -349,857 +48,13 @@ public Optional getLocalCatalogPath() { return Optional.ofNullable(getEnv(LOCAL_CONNECTOR_CATALOG_PATH)); } - @Override - public DeploymentMode getDeploymentMode() { - return getEnvOrDefault(DEPLOYMENT_MODE, DeploymentMode.OSS, s -> { - try { - return DeploymentMode.valueOf(s); - } catch (final IllegalArgumentException e) { - LOGGER.info(s + " not recognized, defaulting to " + DeploymentMode.OSS); - return DeploymentMode.OSS; - } - }); - } - - @Override - public WorkerEnvironment getWorkerEnvironment() { - return getEnvOrDefault(WORKER_ENVIRONMENT, WorkerEnvironment.DOCKER, s -> WorkerEnvironment.valueOf(s.toUpperCase())); - } - - @Override - public Path getConfigRoot() { - return getPath(CONFIG_ROOT); - } - - @Override - public Path getWorkspaceRoot() { - return getPath(WORKSPACE_ROOT); - } - - @Override - public Optional getRemoteConnectorCatalogUrl() { - final String remoteConnectorCatalogUrl = getEnvOrDefault(REMOTE_CONNECTOR_CATALOG_URL, null); - if (remoteConnectorCatalogUrl != null) { - return Optional.of(URI.create(remoteConnectorCatalogUrl)); - } else { - return Optional.empty(); - } - } - - // Docker Only - @Override - public String getWorkspaceDockerMount() { - return getEnvOrDefault(WORKSPACE_DOCKER_MOUNT, getWorkspaceRoot().toString()); - } - - @Override - public String getLocalDockerMount() { - return getEnvOrDefault(LOCAL_DOCKER_MOUNT, getLocalRoot().toString()); - } - - @Override - public String getDockerNetwork() { - return getEnvOrDefault(DOCKER_NETWORK, DEFAULT_NETWORK); - } - - @Override - public Path getLocalRoot() { - return getPath(LOCAL_ROOT); - } - - // Secrets - @Override - public String getSecretStoreGcpCredentials() { - return getEnv(SECRET_STORE_GCP_CREDENTIALS); - } - - @Override - public String getSecretStoreGcpProjectId() { - return getEnv(SECRET_STORE_GCP_PROJECT_ID); - } - - @Override - public SecretPersistenceType getSecretPersistenceType() { - final var secretPersistenceStr = getEnvOrDefault(SECRET_PERSISTENCE, SecretPersistenceType.TESTING_CONFIG_DB_TABLE.name()); - return SecretPersistenceType.valueOf(secretPersistenceStr); - } - - @Override - public String getVaultAddress() { - return getEnv(VAULT_ADDRESS); - } - - @Override - public String getVaultPrefix() { - return getEnvOrDefault(VAULT_PREFIX, ""); - } - - @Override - public String getVaultToken() { - return getEnv(VAULT_AUTH_TOKEN); - } - - @Override - public String getAwsAccessKey() { - return getEnv(AWS_ACCESS_KEY); - } - - @Override - public String getAwsSecretAccessKey() { - return getEnv(AWS_SECRET_ACCESS_KEY); - } - - // Database - @Override - public String getDatabaseUser() { - return getEnsureEnv(DATABASE_USER); - } - - @Override - public String getDatabasePassword() { - return getEnsureEnv(DATABASE_PASSWORD); - } - - @Override - public String getDatabaseUrl() { - return getEnsureEnv(DATABASE_URL); - } - - @Override - public String getJobsDatabaseMinimumFlywayMigrationVersion() { - return getEnsureEnv(JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION); - } - - @Override - public long getJobsDatabaseInitializationTimeoutMs() { - return getEnvOrDefault(JOBS_DATABASE_INITIALIZATION_TIMEOUT_MS, DEFAULT_DATABASE_INITIALIZATION_TIMEOUT_MS); - } - - @Override - public String getConfigDatabaseUser() { - // Default to reuse the job database - return getEnvOrDefault(CONFIG_DATABASE_USER, getDatabaseUser()); - } - - @Override - public String getConfigDatabasePassword() { - // Default to reuse the job database - return getEnvOrDefault(CONFIG_DATABASE_PASSWORD, getDatabasePassword(), true); - } - - @Override - public String getConfigDatabaseUrl() { - // Default to reuse the job database - return getEnvOrDefault(CONFIG_DATABASE_URL, getDatabaseUrl()); - } - - @Override - public String getConfigsDatabaseMinimumFlywayMigrationVersion() { - return getEnsureEnv(CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION); - } - - @Override - public long getConfigsDatabaseInitializationTimeoutMs() { - return getEnvOrDefault(CONFIGS_DATABASE_INITIALIZATION_TIMEOUT_MS, DEFAULT_DATABASE_INITIALIZATION_TIMEOUT_MS); - } - - @Override - public boolean runDatabaseMigrationOnStartup() { - return getEnvOrDefault(RUN_DATABASE_MIGRATION_ON_STARTUP, true); - } - - // Temporal Cloud - @Override - public boolean temporalCloudEnabled() { - return getEnvOrDefault(TEMPORAL_CLOUD_ENABLED, false); - } - - @Override - public String getTemporalCloudHost() { - return getEnvOrDefault(TEMPORAL_CLOUD_HOST, ""); - } - - @Override - public String getTemporalCloudNamespace() { - return getEnvOrDefault(TEMPORAL_CLOUD_NAMESPACE, ""); - } - - @Override - public String getTemporalCloudClientCert() { - return getEnvOrDefault(TEMPORAL_CLOUD_CLIENT_CERT, ""); - } - - @Override - public String getTemporalCloudClientKey() { - return getEnvOrDefault(TEMPORAL_CLOUD_CLIENT_KEY, ""); - } - - // Airbyte Services - @Override - public String getTemporalHost() { - return getEnvOrDefault(TEMPORAL_HOST, "airbyte-temporal:7233"); - } - - @Override - public int getTemporalRetentionInDays() { - return getEnvOrDefault(TEMPORAL_HISTORY_RETENTION_IN_DAYS, DEFAULT_TEMPORAL_HISTORY_RETENTION_IN_DAYS); - } - - @Override - public String getAirbyteApiHost() { - return getEnsureEnv(INTERNAL_API_HOST).split(":")[0]; - } - - @Override - public int getAirbyteApiPort() { - return Integer.parseInt(getEnsureEnv(INTERNAL_API_HOST).split(":")[1]); - } - - @Override - public String getAirbyteApiAuthHeaderName() { - return getEnvOrDefault(AIRBYTE_API_AUTH_HEADER_NAME, ""); - } - - @Override - public String getAirbyteApiAuthHeaderValue() { - return getEnvOrDefault(AIRBYTE_API_AUTH_HEADER_VALUE, ""); - } - - @Override - public String getWebappUrl() { - return getEnsureEnv(WEBAPP_URL); - } - - // Jobs - @Override - public int getSyncJobMaxAttempts() { - return Integer.parseInt(getEnvOrDefault(SYNC_JOB_MAX_ATTEMPTS, "3")); - } - - @Override - public int getSyncJobMaxTimeoutDays() { - return Integer.parseInt(getEnvOrDefault(SYNC_JOB_MAX_TIMEOUT_DAYS, "3")); - } - - @Override - public boolean connectorSpecificResourceDefaultsEnabled() { - return getEnvOrDefault(CONNECTOR_SPECIFIC_RESOURCE_DEFAULTS_ENABLED, false); - } - - /** - * Returns worker pod tolerations parsed from its own environment variable. The value of the env is - * a string that represents one or more tolerations. - *

    - *
  • Tolerations are separated by a `;` - *
  • Each toleration contains k=v pairs mentioning some/all of key, effect, operator and value and - * separated by `,` - *
- *

- * For example:- The following represents two tolerations, one checking existence and another - * matching a value - *

- * key=airbyte-server,operator=Exists,effect=NoSchedule;key=airbyte-server,operator=Equals,value=true,effect=NoSchedule - * - * @return list of WorkerKubeToleration parsed from env - */ - @Override - public List getJobKubeTolerations() { - final String tolerationsStr = getEnvOrDefault(JOB_KUBE_TOLERATIONS, ""); - - final Stream tolerations = Strings.isNullOrEmpty(tolerationsStr) ? Stream.of() - : Splitter.on(";") - .splitToStream(tolerationsStr) - .filter(tolerationStr -> !Strings.isNullOrEmpty(tolerationStr)); - - return tolerations - .map(this::parseToleration) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } - - private TolerationPOJO parseToleration(final String tolerationStr) { - final Map tolerationMap = Splitter.on(",") - .splitToStream(tolerationStr) - .map(s -> s.split("=")) - .collect(Collectors.toMap(s -> s[0], s -> s[1])); - - if (tolerationMap.containsKey("key") && tolerationMap.containsKey("effect") && tolerationMap.containsKey("operator")) { - return new TolerationPOJO( - tolerationMap.get("key"), - tolerationMap.get("effect"), - tolerationMap.get("value"), - tolerationMap.get("operator")); - } else { - LOGGER.warn( - "Ignoring toleration {}, missing one of key,effect or operator", - tolerationStr); - return null; - } - } - - /** - * Returns a map of node selectors for any job type. Used as a default if a particular job type does - * not define its own node selector environment variable. - * - * @return map containing kv pairs of node selectors, or empty optional if none present. - */ - @Override - public Map getJobKubeNodeSelectors() { - return splitKVPairsFromEnvString(getEnvOrDefault(JOB_KUBE_NODE_SELECTORS, "")); - } - - @Override - public Map getIsolatedJobKubeNodeSelectors() { - return splitKVPairsFromEnvString(getEnvOrDefault(JOB_ISOLATED_KUBE_NODE_SELECTORS, "")); - } - - @Override - public boolean getUseCustomKubeNodeSelector() { - return getEnvOrDefault(USE_CUSTOM_NODE_SELECTOR, false); - } - - /** - * Returns a map of node selectors for Spec job pods specifically. - * - * @return map containing kv pairs of node selectors, or empty optional if none present. - */ - @Override - public Map getSpecJobKubeNodeSelectors() { - return splitKVPairsFromEnvString(getEnvOrDefault(SPEC_JOB_KUBE_NODE_SELECTORS, "")); - } - - /** - * Returns a map of node selectors for Check job pods specifically. - * - * @return map containing kv pairs of node selectors, or empty optional if none present. - */ - @Override - public Map getCheckJobKubeNodeSelectors() { - return splitKVPairsFromEnvString(getEnvOrDefault(CHECK_JOB_KUBE_NODE_SELECTORS, "")); - } - - /** - * Returns a map of node selectors for Discover job pods specifically. - * - * @return map containing kv pairs of node selectors, or empty optional if none present. - */ - @Override - public Map getDiscoverJobKubeNodeSelectors() { - return splitKVPairsFromEnvString(getEnvOrDefault(DISCOVER_JOB_KUBE_NODE_SELECTORS, "")); - } - - /** - * Returns a map of annotations from its own environment variable. The value of the env is a string - * that represents one or more annotations. Each kv-pair is separated by a `,` - *

- * For example:- The following represents two annotations - *

- * airbyte=server,type=preemptive - * - * @return map containing kv pairs of annotations - */ - @Override - public Map getJobKubeAnnotations() { - return splitKVPairsFromEnvString(getEnvOrDefault(JOB_KUBE_ANNOTATIONS, "")); - } - - /** - * Returns a map of node selectors for Spec job pods specifically. - * - * @return map containing kv pairs of node selectors, or empty optional if none present. - */ - @Override - public Map getSpecJobKubeAnnotations() { - return splitKVPairsFromEnvString(getEnvOrDefault(SPEC_JOB_KUBE_ANNOTATIONS, "")); - } - - /** - * Returns a map of node selectors for Check job pods specifically. - * - * @return map containing kv pairs of node selectors, or empty optional if none present. - */ - @Override - public Map getCheckJobKubeAnnotations() { - return splitKVPairsFromEnvString(getEnvOrDefault(CHECK_JOB_KUBE_ANNOTATIONS, "")); - } - - /** - * Returns a map of node selectors for Discover job pods specifically. - * - * @return map containing kv pairs of node selectors, or empty optional if none present. - */ - @Override - public Map getDiscoverJobKubeAnnotations() { - return splitKVPairsFromEnvString(getEnvOrDefault(DISCOVER_JOB_KUBE_ANNOTATIONS, "")); - } - - /** - * Splits key value pairs from the input string into a map. Each kv-pair is separated by a ','. The - * key and the value are separated by '='. - *

- * For example:- The following represents two map entries - *

- * key1=value1,key2=value2 - * - * @param input string - * @return map containing kv pairs - */ - public Map splitKVPairsFromEnvString(String input) { - if (input == null) { - input = ""; - } - final Map map = Splitter.on(",") - .splitToStream(input) - .filter(s -> !Strings.isNullOrEmpty(s) && s.contains("=")) - .map(s -> s.split("=")) - .collect(Collectors.toMap(s -> s[0].trim(), s -> s[1].trim())); - return map.isEmpty() ? null : map; - } - - @Override - public String getJobKubeMainContainerImagePullPolicy() { - return getEnvOrDefault(JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY, DEFAULT_JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY); - } - - @Override - public String getJobKubeSidecarContainerImagePullPolicy() { - return getEnvOrDefault(JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY, DEFAULT_JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY); - } - - /** - * Returns the name of the secret to be used when pulling down docker images for jobs. Automatically - * injected in the KubePodProcess class and used in the job pod templates. - *

- * Can provide multiple strings seperated by comma(,) to indicate pulling from different - * repositories. The empty string is a no-op value. - */ - @Override - public List getJobKubeMainContainerImagePullSecrets() { - final String secrets = getEnvOrDefault(JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET, ""); - return Arrays.stream(secrets.split(",")).collect(Collectors.toList()); - } - - @Override - public String getSidecarKubeCpuRequest() { - return getEnvOrDefault(SIDECAR_KUBE_CPU_REQUEST, DEFAULT_SIDECAR_KUBE_CPU_REQUEST); - } - - @Override - public String getSidecarKubeCpuLimit() { - return getEnvOrDefault(SIDECAR_KUBE_CPU_LIMIT, DEFAULT_SIDECAR_KUBE_CPU_LIMIT); - } - - @Override - public String getSidecarKubeMemoryLimit() { - return getEnvOrDefault(SIDECAR_KUBE_MEMORY_LIMIT, DEFAULT_SIDECAR_KUBE_MEMORY_LIMIT); - } - - @Override - public String getSidecarMemoryRequest() { - return getEnvOrDefault(SIDECAR_MEMORY_REQUEST, DEFAULT_SIDECAR_MEMORY_REQUEST); - } - - @Override - public String getJobKubeSocatImage() { - return getEnvOrDefault(JOB_KUBE_SOCAT_IMAGE, DEFAULT_JOB_KUBE_SOCAT_IMAGE); - } - - @Override - public String getSocatSidecarKubeCpuRequest() { - return getEnvOrDefault(SOCAT_KUBE_CPU_REQUEST, getSidecarKubeCpuRequest()); - } - - @Override - public String getSocatSidecarKubeCpuLimit() { - return getEnvOrDefault(SOCAT_KUBE_CPU_LIMIT, getSidecarKubeCpuLimit()); - } - - @Override - public String getJobKubeBusyboxImage() { - return getEnvOrDefault(JOB_KUBE_BUSYBOX_IMAGE, DEFAULT_JOB_KUBE_BUSYBOX_IMAGE); - } - - @Override - public String getJobKubeCurlImage() { - return getEnvOrDefault(JOB_KUBE_CURL_IMAGE, DEFAULT_JOB_KUBE_CURL_IMAGE); - } - - @Override - public String getJobKubeNamespace() { - return getEnvOrDefault(JOB_KUBE_NAMESPACE, DEFAULT_JOB_KUBE_NAMESPACE); - } - - @Override - public String getJobMainContainerCpuRequest() { - return getEnvOrDefault(JOB_MAIN_CONTAINER_CPU_REQUEST, DEFAULT_JOB_CPU_REQUIREMENT); - } - - @Override - public String getJobMainContainerCpuLimit() { - return getEnvOrDefault(JOB_MAIN_CONTAINER_CPU_LIMIT, DEFAULT_JOB_CPU_REQUIREMENT); - } - - @Override - public String getJobMainContainerMemoryRequest() { - return getEnvOrDefault(JOB_MAIN_CONTAINER_MEMORY_REQUEST, DEFAULT_JOB_MEMORY_REQUIREMENT); - } - - @Override - public String getJobMainContainerMemoryLimit() { - return getEnvOrDefault(JOB_MAIN_CONTAINER_MEMORY_LIMIT, DEFAULT_JOB_MEMORY_REQUIREMENT); - } - - @Override - public String getMetricClient() { - return getEnvOrDefault(METRIC_CLIENT, ""); - } - - @Override - public String getOtelCollectorEndpoint() { - return getEnvOrDefault(OTEL_COLLECTOR_ENDPOINT, ""); - } - - @Override - public String getLaunchDarklyKey() { - return getEnvOrDefault(LAUNCHDARKLY_KEY, ""); - } - - @Override - public String getFeatureFlagClient() { - return getEnvOrDefault(FEATURE_FLAG_CLIENT, ""); - } - - /** - * There are two types of environment variables available to the job container: - *

    - *
  • Exclusive variables prefixed with JOB_DEFAULT_ENV_PREFIX
  • - *
  • Shared variables defined in JOB_SHARED_ENVS
  • - *
- */ - @Override - public Map getJobDefaultEnvMap() { - final Map jobPrefixedEnvMap = getAllEnvKeys.get().stream() - .filter(key -> key.startsWith(JOB_DEFAULT_ENV_PREFIX)) - .collect(Collectors.toMap(key -> key.replace(JOB_DEFAULT_ENV_PREFIX, ""), getEnv)); - // This method assumes that these shared env variables are not critical to the execution - // of the jobs, and only serve as metadata. So any exception is swallowed and default to - // an empty string. Change this logic if this assumption no longer holds. - final Map jobSharedEnvMap = JOB_SHARED_ENVS.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, - entry -> Exceptions.swallowWithDefault(() -> Objects.requireNonNullElse(entry.getValue().apply(this), ""), ""))); - return MoreMaps.merge(jobPrefixedEnvMap, jobSharedEnvMap); - } - - @Override - public int getMaxFailedJobsInARowBeforeConnectionDisable() { - return getEnvOrDefault(MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE, DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE); - } - - @Override - public int getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable() { - return getEnvOrDefault(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE, DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE); - } - - @Override - public String getCheckJobMainContainerCpuRequest() { - return getEnvOrDefault(CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST, getJobMainContainerCpuRequest()); - } - - @Override - public String getCheckJobMainContainerCpuLimit() { - return getEnvOrDefault(CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT, getJobMainContainerCpuLimit()); - } - - @Override - public String getCheckJobMainContainerMemoryRequest() { - return getEnvOrDefault(CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST, getJobMainContainerMemoryRequest()); - } - - @Override - public String getCheckJobMainContainerMemoryLimit() { - return getEnvOrDefault(CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT, getJobMainContainerMemoryLimit()); - } - - @Override - public String getNormalizationJobMainContainerCpuRequest() { - return getEnvOrDefault(NORMALIZATION_JOB_MAIN_CONTAINER_CPU_REQUEST, getJobMainContainerCpuRequest()); - } - - @Override - public String getNormalizationJobMainContainerCpuLimit() { - return getEnvOrDefault(NORMALIZATION_JOB_MAIN_CONTAINER_CPU_LIMIT, getJobMainContainerCpuLimit()); - } - - @Override - public String getNormalizationJobMainContainerMemoryRequest() { - return getEnvOrDefault(NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_REQUEST, getJobMainContainerMemoryRequest()); - } - - @Override - public String getNormalizationJobMainContainerMemoryLimit() { - return getEnvOrDefault(NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_LIMIT, getJobMainContainerMemoryLimit()); - } - - @Override - public LogConfigs getLogConfigs() { - return logConfigs; - } - - @Override - public String getGoogleApplicationCredentials() { - return getEnvOrDefault(LogClientSingleton.GOOGLE_APPLICATION_CREDENTIALS, null); - } - - @Override - public CloudStorageConfigs getStateStorageCloudConfigs() { - return stateStorageCloudConfigs; - } - - @Override - public boolean getPublishMetrics() { - return getEnvOrDefault(PUBLISH_METRICS, false); - } - - @Override - public String getDDAgentHost() { - return getEnvOrDefault(DD_AGENT_HOST, ""); - } - - @Override - public String getDDDogStatsDPort() { - return getEnvOrDefault(DD_DOGSTATSD_PORT, ""); - } - - @Override - public List getDDConstantTags() { - final String tagsString = getEnvOrDefault(DD_CONSTANT_TAGS, ""); - return Splitter.on(",") - .splitToStream(tagsString) - .filter(s -> !s.trim().isBlank()) - .collect(Collectors.toList()); - } - - @Override - public TrackingStrategy getTrackingStrategy() { - return getEnvOrDefault(TRACKING_STRATEGY, TrackingStrategy.LOGGING, s -> { - try { - return TrackingStrategy.valueOf(s.toUpperCase()); - } catch (final IllegalArgumentException e) { - LOGGER.info(s + " not recognized, defaulting to " + TrackingStrategy.LOGGING); - return TrackingStrategy.LOGGING; - } - }); - } - - @Override - public JobErrorReportingStrategy getJobErrorReportingStrategy() { - return getEnvOrDefault(JOB_ERROR_REPORTING_STRATEGY, JobErrorReportingStrategy.LOGGING, s -> { - try { - return JobErrorReportingStrategy.valueOf(s.toUpperCase()); - } catch (final IllegalArgumentException e) { - LOGGER.info(s + " not recognized, defaulting to " + JobErrorReportingStrategy.LOGGING); - return JobErrorReportingStrategy.LOGGING; - } - }); - } - - @Override - public String getJobErrorReportingSentryDSN() { - return getEnvOrDefault(JOB_ERROR_REPORTING_SENTRY_DSN, ""); - } - - // APPLICATIONS - // Worker - @Override - public MaxWorkersConfig getMaxWorkers() { - return new MaxWorkersConfig( - Math.toIntExact(getEnvOrDefault(MAX_SPEC_WORKERS, DEFAULT_MAX_SPEC_WORKERS)), - Math.toIntExact(getEnvOrDefault(MAX_CHECK_WORKERS, DEFAULT_MAX_CHECK_WORKERS)), - Math.toIntExact(getEnvOrDefault(MAX_DISCOVER_WORKERS, DEFAULT_MAX_DISCOVER_WORKERS)), - Math.toIntExact(getEnvOrDefault(MAX_SYNC_WORKERS, DEFAULT_MAX_SYNC_WORKERS)), - Math.toIntExact(getEnvOrDefault(MAX_NOTIFY_WORKERS, DEFAULT_MAX_NOTIFY_WORKERS))); - } - - @Override - public boolean shouldRunGetSpecWorkflows() { - return getEnvOrDefault(SHOULD_RUN_GET_SPEC_WORKFLOWS, true); - } - - @Override - public boolean shouldRunCheckConnectionWorkflows() { - return getEnvOrDefault(SHOULD_RUN_CHECK_CONNECTION_WORKFLOWS, true); - } - - @Override - public boolean shouldRunDiscoverWorkflows() { - return getEnvOrDefault(SHOULD_RUN_DISCOVER_WORKFLOWS, true); - } - - @Override - public boolean shouldRunSyncWorkflows() { - return getEnvOrDefault(SHOULD_RUN_SYNC_WORKFLOWS, true); - } - - @Override - public boolean shouldRunConnectionManagerWorkflows() { - return getEnvOrDefault(SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS, true); - } - - @Override - public boolean shouldRunNotifyWorkflows() { - return getEnvOrDefault(SHOULD_RUN_NOTIFY_WORKFLOWS, false); - } - // Worker - Data plane - @Override - public Set getDataSyncTaskQueues() { - final var taskQueues = getEnvOrDefault(DATA_SYNC_TASK_QUEUES, DEFAULT_DATA_SYNC_TASK_QUEUES); - if (taskQueues.isEmpty()) { - return new HashSet<>(); - } - return Arrays.stream(taskQueues.split(",")).collect(Collectors.toSet()); - } - - @Override - public String getControlPlaneAuthEndpoint() { - return getEnvOrDefault(CONTROL_PLANE_AUTH_ENDPOINT, ""); - } - - @Override - public String getDataPlaneServiceAccountCredentialsPath() { - return getEnvOrDefault(DATA_PLANE_SERVICE_ACCOUNT_CREDENTIALS_PATH, ""); - } - - @Override - public String getDataPlaneServiceAccountEmail() { - return getEnvOrDefault(DATA_PLANE_SERVICE_ACCOUNT_EMAIL, ""); - } - - @Override - public Set getTemporalWorkerPorts() { - final var ports = getEnvOrDefault(TEMPORAL_WORKER_PORTS, ""); - if (ports.isEmpty()) { - return new HashSet<>(); - } - return Arrays.stream(ports.split(",")).map(Integer::valueOf).collect(Collectors.toSet()); - } - - @Override - public boolean getContainerOrchestratorEnabled() { - return getEnvOrDefault(CONTAINER_ORCHESTRATOR_ENABLED, false, Boolean::valueOf); - } - - @Override - public String getContainerOrchestratorSecretName() { - return getEnvOrDefault(CONTAINER_ORCHESTRATOR_SECRET_NAME, null); - } - - @Override - public String getContainerOrchestratorSecretMountPath() { - return getEnvOrDefault(CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH, null); - } - - @Override - public String getContainerOrchestratorImage() { - return getEnvOrDefault(CONTAINER_ORCHESTRATOR_IMAGE, "airbyte/container-orchestrator:" + getAirbyteVersion().serialize()); - } - - @Override - public String getReplicationOrchestratorCpuRequest() { - return getEnvOrDefault(REPLICATION_ORCHESTRATOR_CPU_REQUEST, null); - } - - @Override - public String getReplicationOrchestratorCpuLimit() { - return getEnvOrDefault(REPLICATION_ORCHESTRATOR_CPU_LIMIT, null); - } - - @Override - public String getReplicationOrchestratorMemoryRequest() { - return getEnvOrDefault(REPLICATION_ORCHESTRATOR_MEMORY_REQUEST, null); - } - - @Override - public String getReplicationOrchestratorMemoryLimit() { - return getEnvOrDefault(REPLICATION_ORCHESTRATOR_MEMORY_LIMIT, null); - } - - @Override - public int getMaxActivityTimeoutSecond() { - return Integer.parseInt(getEnvOrDefault(ACTIVITY_MAX_TIMEOUT_SECOND, "120")); - } - - @Override - public int getInitialDelayBetweenActivityAttemptsSeconds() { - return Integer.parseInt(getEnvOrDefault(ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS, "30")); - } - - @Override - public int getMaxDelayBetweenActivityAttemptsSeconds() { - return Integer.parseInt(getEnvOrDefault(ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS, String.valueOf(10 * 60))); - } - - @Override - public int getWorkflowFailureRestartDelaySeconds() { - return Integer.parseInt(getEnvOrDefault(WORKFLOW_FAILURE_RESTART_DELAY_SECONDS, String.valueOf(10 * 60))); - } - - @Override - public boolean getAutoDetectSchema() { - return getEnvOrDefault(AUTO_DETECT_SCHEMA, true); - } - - @Override - public boolean getApplyFieldSelection() { - return getEnvOrDefault(APPLY_FIELD_SELECTION, false); - } - - @Override - public String getFieldSelectionWorkspaces() { - return getEnvOrDefault(FIELD_SELECTION_WORKSPACES, ""); - } - - @Override - public String getStrictComparisonNormalizationWorkspaces() { - return getEnvOrDefault(STRICT_COMPARISON_NORMALIZATION_WORKSPACES, ""); - } - - @Override - public String getStrictComparisonNormalizationTag() { - return getEnvOrDefault(STRICT_COMPARISON_NORMALIZATION_TAG, "strict_comparison"); - } - - @Override - public int getActivityNumberOfAttempt() { - return Integer.parseInt(getEnvOrDefault(ACTIVITY_MAX_ATTEMPT, "5")); - } - // Helpers public String getEnvOrDefault(final String key, final String defaultValue) { return getEnvOrDefault(key, defaultValue, Function.identity(), false); } - public String getEnvOrDefault(final String key, final String defaultValue, final boolean isSecret) { - return getEnvOrDefault(key, defaultValue, Function.identity(), isSecret); - } - - public long getEnvOrDefault(final String key, final long defaultValue) { - return getEnvOrDefault(key, defaultValue, Long::parseLong, false); - } - - public int getEnvOrDefault(final String key, final int defaultValue) { - return getEnvOrDefault(key, defaultValue, Integer::parseInt, false); - } - - public boolean getEnvOrDefault(final String key, final boolean defaultValue) { - return getEnvOrDefault(key, defaultValue, Boolean::parseBoolean); - } - - public T getEnvOrDefault(final String key, final T defaultValue, final Function parser) { - return getEnvOrDefault(key, defaultValue, parser, false); - } - public T getEnvOrDefault(final String key, final T defaultValue, final Function parser, final boolean isSecret) { final String value = getEnv.apply(key); if (value != null && !value.isEmpty()) { @@ -1214,19 +69,4 @@ public String getEnv(final String name) { return getEnv.apply(name); } - public String getEnsureEnv(final String name) { - final String value = getEnv(name); - Preconditions.checkArgument(value != null, "'%s' environment variable cannot be null", name); - - return value; - } - - private Path getPath(final String name) { - final String value = getEnv.apply(name); - if (value == null) { - throw new IllegalArgumentException("Env variable not defined: " + name); - } - return Path.of(value); - } - } diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/MaxWorkersConfig.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/MaxWorkersConfig.java deleted file mode 100644 index 922e1544cc342..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/MaxWorkersConfig.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -public class MaxWorkersConfig { - - private final int maxSpecWorkers; - private final int maxCheckWorkers; - private final int maxDiscoverWorkers; - private final int maxSyncWorkers; - private final int maxNotifyWorkers; - - public MaxWorkersConfig(final int maxSpecWorkers, - final int maxCheckWorkers, - final int maxDiscoverWorkers, - final int maxSyncWorkers, - final int maxNotifyWorkers) { - this.maxSpecWorkers = maxSpecWorkers; - this.maxCheckWorkers = maxCheckWorkers; - this.maxDiscoverWorkers = maxDiscoverWorkers; - this.maxSyncWorkers = maxSyncWorkers; - this.maxNotifyWorkers = maxNotifyWorkers; - } - - public int getMaxSpecWorkers() { - return maxSpecWorkers; - } - - public int getMaxCheckWorkers() { - return maxCheckWorkers; - } - - public int getMaxDiscoverWorkers() { - return maxDiscoverWorkers; - } - - public int getMaxSyncWorkers() { - return maxSyncWorkers; - } - - public int getMaxNotifyWorkers() { - return maxNotifyWorkers; - } - - @Override - public String toString() { - return "MaxWorkersConfig{" + - "maxSpecWorkers=" + maxSpecWorkers + - ", maxCheckWorkers=" + maxCheckWorkers + - ", maxDiscoverWorkers=" + maxDiscoverWorkers + - ", maxSyncWorkers=" + maxSyncWorkers + - ", maxNotifyWorkers=" + maxNotifyWorkers + - '}'; - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/StreamResetRecord.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/StreamResetRecord.java deleted file mode 100644 index babfe778af4e8..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/StreamResetRecord.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -import java.util.UUID; -import javax.annotation.Nullable; -import lombok.NonNull; - -/* - * A stream reset record is a reference to a stream that has a reset pending or running - */ -public record StreamResetRecord(@NonNull UUID connectionId, - @NonNull String streamName, - @Nullable String streamNamespace) { - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/TolerationPOJO.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/TolerationPOJO.java deleted file mode 100644 index c2d031e425b91..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/TolerationPOJO.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -import java.util.Objects; - -/** - * Represents a minimal io.fabric8.kubernetes.api.model.Toleration - */ -@SuppressWarnings("PMD.ShortVariable") -public class TolerationPOJO { - - private final String key; - private final String effect; - private final String value; - private final String operator; - - public TolerationPOJO(final String key, final String effect, final String value, final String operator) { - this.key = key; - this.effect = effect; - this.value = value; - this.operator = operator; - } - - public String getKey() { - return key; - } - - public String getEffect() { - return effect; - } - - public String getValue() { - return value; - } - - public String getOperator() { - return operator; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final TolerationPOJO that = (TolerationPOJO) o; - return Objects.equals(key, that.key) && Objects.equals(effect, that.effect) - && Objects.equals(value, that.value) && Objects.equals(operator, - that.operator); - } - - @Override - public int hashCode() { - return Objects.hash(key, effect, value, operator); - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/WorkerEnvConstants.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/WorkerEnvConstants.java deleted file mode 100644 index c864b32bcb00d..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/WorkerEnvConstants.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -/** - * These extra env variables are created on the fly and passed to the connector. - */ -public class WorkerEnvConstants { - - public static final String WORKER_CONNECTOR_IMAGE = "WORKER_CONNECTOR_IMAGE"; - public static final String WORKER_JOB_ID = "WORKER_JOB_ID"; - public static final String WORKER_JOB_ATTEMPT = "WORKER_JOB_ATTEMPT"; - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/WorkspaceRetentionConfig.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/WorkspaceRetentionConfig.java deleted file mode 100644 index 1255e55c07a34..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/WorkspaceRetentionConfig.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -public class WorkspaceRetentionConfig { - - private final long minDays; - private final long maxDays; - private final long maxSizeMb; - - public WorkspaceRetentionConfig(final long minDays, final long maxDays, final long maxSizeMb) { - this.minDays = minDays; - this.maxDays = maxDays; - this.maxSizeMb = maxSizeMb; - } - - public long getMinDays() { - return minDays; - } - - public long getMaxDays() { - return maxDays; - } - - public long getMaxSizeMb() { - return maxSizeMb; - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/constants/AlwaysAllowedHosts.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/constants/AlwaysAllowedHosts.java deleted file mode 100644 index 01983a0367054..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/constants/AlwaysAllowedHosts.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.constants; - -import java.util.List; - -public class AlwaysAllowedHosts { - - private final List hosts = List.of( - // DataDog. See https://docs.datadoghq.com/agent/proxy/?tab=linux and change the location tabs - "*.datadoghq.com", - "*.datadoghq.eu", - - // Sentry. See https://docs.sentry.io/api/ for more information - "*.sentry.io"); - - public List getHosts() { - return hosts; - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/CloudLogs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/CloudLogs.java deleted file mode 100644 index 0be45cb2c97f1..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/CloudLogs.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import io.airbyte.config.storage.DefaultGcsClientFactory; -import io.airbyte.config.storage.DefaultS3ClientFactory; -import io.airbyte.config.storage.MinioS3ClientFactory; -import java.io.File; -import java.io.IOException; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Interface for various Cloud Storage clients supporting Cloud log retrieval. - * - * The underlying assumption 1) each file at the path is part of the entire log file represented by - * that path 2) log files names start with timestamps, making it possible extract the time the file - * was written from it's name. - */ -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -public interface CloudLogs { - - Logger LOGGER = LoggerFactory.getLogger(CloudLogs.class); - - /** - * Retrieve all objects at the given path in lexicographical order, and return their contents as one - * file. - */ - File downloadCloudLog(LogConfigs configs, String logPath) throws IOException; - - /** - * Assume all the lexicographically ordered objects at the given path form one giant log file, - * return the last numLines lines. - */ - List tailCloudLog(LogConfigs configs, String logPath, int numLines) throws IOException; - - void deleteLogs(LogConfigs configs, String logPath); - - static CloudLogs createCloudLogClient(final LogConfigs configs) { - switch (configs.getStorageConfigs().getType()) { - case S3 -> { - return new S3Logs(new DefaultS3ClientFactory(configs.getStorageConfigs().getS3Config())); - } - case MINIO -> { - return new S3Logs(new MinioS3ClientFactory(configs.getStorageConfigs().getMinioConfig())); - } - case GCS -> { - return new GcsLogs(new DefaultGcsClientFactory(configs.getStorageConfigs().getGcsConfig())); - } - } - - throw new RuntimeException("Error no cloud credentials configured.."); - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/GcsLogs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/GcsLogs.java deleted file mode 100644 index 52f5bbe322f9c..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/GcsLogs.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import com.google.api.gax.paging.Page; -import com.google.cloud.storage.Blob; -import com.google.cloud.storage.Blob.BlobSourceOption; -import com.google.cloud.storage.Storage; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import io.airbyte.commons.string.Strings; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings({"PMD.AvoidFileStream", "PMD.ShortVariable", "PMD.CloseResource", "PMD.AvoidInstantiatingObjectsInLoops"}) -public class GcsLogs implements CloudLogs { - - private static final Logger LOGGER = LoggerFactory.getLogger(GcsLogs.class); - - private static Storage gcs; - private final Supplier gcsClientFactory; - - public GcsLogs(final Supplier gcsClientFactory) { - this.gcsClientFactory = gcsClientFactory; - } - - @Override - public File downloadCloudLog(final LogConfigs configs, final String logPath) throws IOException { - return getFile(configs, logPath, LogClientSingleton.DEFAULT_PAGE_SIZE); - } - - private File getFile(final LogConfigs configs, final String logPath, final int pageSize) throws IOException { - return getFile(getOrCreateGcsClient(), configs, logPath, pageSize); - } - - @VisibleForTesting - static File getFile(final Storage gcsClient, final LogConfigs configs, final String logPath, final int pageSize) throws IOException { - LOGGER.debug("Retrieving logs from GCS path: {}", logPath); - - LOGGER.debug("Start GCS list request."); - final Page blobs = gcsClient.list( - configs.getStorageConfigs().getGcsConfig().getBucketName(), - Storage.BlobListOption.prefix(logPath), - Storage.BlobListOption.pageSize(pageSize)); - - final var randomName = Strings.addRandomSuffix("logs", "-", 5); - final var tmpOutputFile = new File("/tmp/" + randomName); - final var os = new FileOutputStream(tmpOutputFile); - LOGGER.debug("Start getting GCS objects."); - // Objects are returned in lexicographical order. - for (final Blob blob : blobs.iterateAll()) { - blob.downloadTo(os); - } - os.close(); - LOGGER.debug("Done retrieving GCS logs: {}.", logPath); - return tmpOutputFile; - } - - @Override - public List tailCloudLog(final LogConfigs configs, final String logPath, final int numLines) throws IOException { - LOGGER.debug("Tailing logs from GCS path: {}", logPath); - final Storage gcsClient = getOrCreateGcsClient(); - - LOGGER.debug("Start GCS list request."); - - final Page blobs = gcsClient.list( - configs.getStorageConfigs().getGcsConfig().getBucketName(), - Storage.BlobListOption.prefix(logPath)); - - final var ascendingTimestampBlobs = new ArrayList(); - for (final Blob blob : blobs.iterateAll()) { - ascendingTimestampBlobs.add(blob); - } - final var descendingTimestampBlobs = Lists.reverse(ascendingTimestampBlobs); - - final var lines = new ArrayList(); - int linesRead = 0; - - LOGGER.debug("Start getting GCS objects."); - while (linesRead <= numLines && !descendingTimestampBlobs.isEmpty()) { - final var poppedBlob = descendingTimestampBlobs.remove(0); - try (final var inMemoryData = new ByteArrayOutputStream()) { - poppedBlob.downloadTo(inMemoryData); - final var currFileLines = inMemoryData.toString(StandardCharsets.UTF_8).split("\n"); - final List currFileLinesReversed = Lists.reverse(List.of(currFileLines)); - for (final var line : currFileLinesReversed) { - if (linesRead == numLines) { - break; - } - lines.add(0, line); - linesRead++; - } - } - } - - LOGGER.debug("Done retrieving GCS logs: {}.", logPath); - return lines; - } - - @Override - public void deleteLogs(final LogConfigs configs, final String logPath) { - LOGGER.debug("Retrieving logs from GCS path: {}", logPath); - final Storage gcsClient = getOrCreateGcsClient(); - - LOGGER.debug("Start GCS list and delete request."); - final Page blobs = gcsClient.list(configs.getStorageConfigs().getGcsConfig().getBucketName(), Storage.BlobListOption.prefix(logPath)); - for (final Blob blob : blobs.iterateAll()) { - blob.delete(BlobSourceOption.generationMatch()); - } - LOGGER.debug("Finished all deletes."); - } - - private Storage getOrCreateGcsClient() { - if (gcs == null) { - gcs = gcsClientFactory.get(); - } - return gcs; - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java deleted file mode 100644 index 44a384d9898cd..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.io.IOs; -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.WorkerEnvironment; -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; -import org.apache.commons.lang3.NotImplementedException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -/** - * Airbyte's logging layer entrypoint. Handles logs written to local disk as well as logs written to - * cloud storages. - *

- * Although the configuration is passed in as {@link Configs}, it is transformed to - * {@link LogConfigs} within this class. Beyond this class, all configuration consumption is via the - * {@link LogConfigs} interface via the {@link CloudLogs} interface. - */ -@SuppressWarnings({"PMD.AvoidThrowingRawExceptionTypes", "PMD.AvoidSynchronizedAtMethodLevel"}) -public class LogClientSingleton { - - private static final Logger LOGGER = LoggerFactory.getLogger(LogClientSingleton.class); - private static LogClientSingleton instance; - - @VisibleForTesting - final static int LOG_TAIL_SIZE = 1000000; - @VisibleForTesting - CloudLogs logClient; - - // Any changes to the following values must also be propagated to the log4j2.xml in main/resources. - public static final String WORKSPACE_MDC_KEY = "workspace_app_root"; - public static final String CLOUD_WORKSPACE_MDC_KEY = "cloud_workspace_app_root"; - - public static final String JOB_LOG_PATH_MDC_KEY = "job_log_path"; - public static final String CLOUD_JOB_LOG_PATH_MDC_KEY = "cloud_job_log_path"; - - // S3/Minio - public static final String S3_LOG_BUCKET = "S3_LOG_BUCKET"; - public static final String S3_LOG_BUCKET_REGION = "S3_LOG_BUCKET_REGION"; - public static final String AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID"; - public static final String AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY"; - public static final String S3_MINIO_ENDPOINT = "S3_MINIO_ENDPOINT"; - - // GCS - public static final String GCS_LOG_BUCKET = "GCS_LOG_BUCKET"; - public static final String GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"; - - public static final int DEFAULT_PAGE_SIZE = 1000; - public static final String LOG_FILENAME = "logs.log"; - public static final String APP_LOGGING_CLOUD_PREFIX = "app-logging"; - public static final String JOB_LOGGING_CLOUD_PREFIX = "job-logging"; - - public static synchronized LogClientSingleton getInstance() { - if (instance == null) { - instance = new LogClientSingleton(); - } - return instance; - } - - public Path getServerLogsRoot(final Path workspaceRoot) { - return workspaceRoot.resolve("server/logs"); - } - - public Path getSchedulerLogsRoot(final Path workspaceRoot) { - return workspaceRoot.resolve("scheduler/logs"); - } - - public File getServerLogFile(final Path workspaceRoot, final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs) { - if (shouldUseLocalLogs(workerEnvironment)) { - return getServerLogsRoot(workspaceRoot).resolve(LOG_FILENAME).toFile(); - } - final var cloudLogPath = sanitisePath(APP_LOGGING_CLOUD_PREFIX, getServerLogsRoot(workspaceRoot)); - try { - createCloudClientIfNull(logConfigs); - return logClient.downloadCloudLog(logConfigs, cloudLogPath); - } catch (final IOException e) { - throw new RuntimeException("Error retrieving log file: " + cloudLogPath + " from S3", e); - } - } - - public File getSchedulerLogFile(final Path workspaceRoot, final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs) { - if (shouldUseLocalLogs(workerEnvironment)) { - return getSchedulerLogsRoot(workspaceRoot).resolve(LOG_FILENAME).toFile(); - } - - final var cloudLogPath = APP_LOGGING_CLOUD_PREFIX + getSchedulerLogsRoot(workspaceRoot); - try { - createCloudClientIfNull(logConfigs); - return logClient.downloadCloudLog(logConfigs, cloudLogPath); - } catch (final IOException e) { - throw new RuntimeException("Error retrieving log file: " + cloudLogPath + " from S3", e); - } - } - - public List getJobLogFile(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final Path logPath) throws IOException { - if (logPath == null || logPath.equals(Path.of(""))) { - return Collections.emptyList(); - } - - if (shouldUseLocalLogs(workerEnvironment)) { - return IOs.getTail(LOG_TAIL_SIZE, logPath); - } - - final var cloudLogPath = sanitisePath(JOB_LOGGING_CLOUD_PREFIX, logPath); - createCloudClientIfNull(logConfigs); - return logClient.tailCloudLog(logConfigs, cloudLogPath, LOG_TAIL_SIZE); - } - - /** - * Primarily to clean up logs after testing. Only valid for Kube logs. - */ - @VisibleForTesting - public void deleteLogs(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final String logPath) { - if (logPath == null || logPath.equals("")) { - return; - } - - if (shouldUseLocalLogs(workerEnvironment)) { - throw new NotImplementedException("Local log deletes not supported."); - } - final var cloudLogPath = sanitisePath(JOB_LOGGING_CLOUD_PREFIX, Path.of(logPath)); - createCloudClientIfNull(logConfigs); - logClient.deleteLogs(logConfigs, cloudLogPath); - } - - public void setJobMdc(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final Path path) { - if (shouldUseLocalLogs(workerEnvironment)) { - LOGGER.debug("Setting docker job mdc"); - final String resolvedPath = path.resolve(LogClientSingleton.LOG_FILENAME).toString(); - MDC.put(LogClientSingleton.JOB_LOG_PATH_MDC_KEY, resolvedPath); - } else { - LOGGER.debug("Setting kube job mdc"); - createCloudClientIfNull(logConfigs); - MDC.put(LogClientSingleton.CLOUD_JOB_LOG_PATH_MDC_KEY, path.resolve(LogClientSingleton.LOG_FILENAME).toString()); - } - } - - public void setWorkspaceMdc(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final Path path) { - if (shouldUseLocalLogs(workerEnvironment)) { - LOGGER.debug("Setting docker workspace mdc"); - MDC.put(LogClientSingleton.WORKSPACE_MDC_KEY, path.toString()); - } else { - LOGGER.debug("Setting kube workspace mdc"); - createCloudClientIfNull(logConfigs); - MDC.put(LogClientSingleton.CLOUD_WORKSPACE_MDC_KEY, path.toString()); - } - } - - // This method should cease to exist here and become a property on the enum instead - // TODO handle this as part of refactor https://github.com/airbytehq/airbyte/issues/7545 - private static boolean shouldUseLocalLogs(final WorkerEnvironment workerEnvironment) { - return workerEnvironment.equals(WorkerEnvironment.DOCKER); - } - - private void createCloudClientIfNull(final LogConfigs configs) { - if (logClient == null) { - logClient = CloudLogs.createCloudLogClient(configs); - } - } - - /** - * Convenience wrapper for making sure paths are slash-separated. - */ - private static String sanitisePath(final String prefix, final Path path) { - return Paths.get(prefix, path.toString()).toString(); - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/LogConfigs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/LogConfigs.java deleted file mode 100644 index af4a419375ae0..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/LogConfigs.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import io.airbyte.config.storage.CloudStorageConfigs; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.util.Optional; - -/** - * Describes logging configuration. For now it just contains configuration around storage medium, - * but in the future will have other configuration options (e.g. json logging, etc). - */ -@Singleton -public class LogConfigs { - - public final static LogConfigs EMPTY = new LogConfigs(Optional.empty()); - - private final CloudStorageConfigs storageConfigs; - - public LogConfigs(@Named("logStorageConfigs") final Optional storageConfigs) { - this.storageConfigs = storageConfigs.orElse(null); - } - - public CloudStorageConfigs getStorageConfigs() { - return storageConfigs; - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/S3Logs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/S3Logs.java deleted file mode 100644 index e1b63e005f37b..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/S3Logs.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import io.airbyte.commons.string.Strings; -import io.airbyte.config.storage.CloudStorageConfigs; -import io.airbyte.config.storage.CloudStorageConfigs.S3ApiWorkerStorageConfig; -import io.airbyte.config.storage.CloudStorageConfigs.WorkerStorageType; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.Delete; -import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest; -import software.amazon.awssdk.services.s3.model.GetObjectRequest; -import software.amazon.awssdk.services.s3.model.ListObjectsV2Request; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; - -@SuppressWarnings({"PMD.ShortVariable", "PMD.CloseResource", "PMD.AvoidFileStream"}) -public class S3Logs implements CloudLogs { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3Logs.class); - - private static S3Client s3; - - private final Supplier s3ClientFactory; - - public S3Logs(final Supplier s3ClientFactory) { - this.s3ClientFactory = s3ClientFactory; - } - - @Override - public File downloadCloudLog(final LogConfigs configs, final String logPath) throws IOException { - return getFile(configs, logPath, LogClientSingleton.DEFAULT_PAGE_SIZE); - } - - private static String getBucketName(final CloudStorageConfigs configs) { - final S3ApiWorkerStorageConfig config; - if (configs.getType() == WorkerStorageType.S3) { - config = configs.getS3Config(); - } else if (configs.getType() == WorkerStorageType.MINIO) { - config = configs.getMinioConfig(); - } else { - throw new IllegalArgumentException("config must be of type S3 or MINIO"); - } - return config.getBucketName(); - } - - private File getFile(final LogConfigs configs, final String logPath, final int pageSize) throws IOException { - return getFile(getOrCreateS3Client(), configs, logPath, pageSize); - } - - @VisibleForTesting - static File getFile(final S3Client s3Client, final LogConfigs configs, final String logPath, final int pageSize) throws IOException { - LOGGER.debug("Retrieving logs from S3 path: {}", logPath); - - final var s3Bucket = getBucketName(configs.getStorageConfigs()); - final var randomName = Strings.addRandomSuffix("logs", "-", 5); - final var tmpOutputFile = new File("/tmp/" + randomName); - final var os = new FileOutputStream(tmpOutputFile); - - LOGGER.debug("Start S3 list request."); - final var listObjReq = ListObjectsV2Request.builder().bucket(s3Bucket) - .prefix(logPath).maxKeys(pageSize).build(); - LOGGER.debug("Start getting S3 objects."); - // Objects are returned in lexicographical order. - for (final var page : s3Client.listObjectsV2Paginator(listObjReq)) { - for (final var objMetadata : page.contents()) { - final var getObjReq = GetObjectRequest.builder() - .key(objMetadata.key()) - .bucket(s3Bucket) - .build(); - final var data = s3Client.getObjectAsBytes(getObjReq).asByteArray(); - os.write(data); - } - } - os.close(); - - LOGGER.debug("Done retrieving S3 logs: {}.", logPath); - return tmpOutputFile; - } - - @Override - public List tailCloudLog(final LogConfigs configs, final String logPath, final int numLines) throws IOException { - LOGGER.debug("Tailing logs from S3 path: {}", logPath); - final S3Client s3Client = getOrCreateS3Client(); - - final var s3Bucket = getBucketName(configs.getStorageConfigs()); - LOGGER.debug("Start making S3 list request."); - final List ascendingTimestampKeys = getAscendingObjectKeys(s3Client, logPath, s3Bucket); - final var descendingTimestampKeys = Lists.reverse(ascendingTimestampKeys); - - final var lines = new ArrayList(); - int linesRead = 0; - - LOGGER.debug("Start getting S3 objects."); - while (linesRead <= numLines && !descendingTimestampKeys.isEmpty()) { - final var poppedKey = descendingTimestampKeys.remove(0); - final List currFileLinesReversed = Lists.reverse(getCurrFile(s3Client, s3Bucket, poppedKey)); - for (final var line : currFileLinesReversed) { - if (linesRead == numLines) { - break; - } - lines.add(0, line); - linesRead++; - } - } - - LOGGER.debug("Done retrieving S3 logs: {}.", logPath); - return lines; - } - - @Override - public void deleteLogs(final LogConfigs configs, final String logPath) { - LOGGER.debug("Deleting logs from S3 path: {}", logPath); - final S3Client s3Client = getOrCreateS3Client(); - - final var s3Bucket = getBucketName(configs.getStorageConfigs()); - final var keys = getAscendingObjectKeys(s3Client, logPath, s3Bucket) - .stream().map(key -> ObjectIdentifier.builder().key(key).build()) - .collect(Collectors.toList()); - final Delete del = Delete.builder() - .objects(keys) - .build(); - final DeleteObjectsRequest multiObjectDeleteRequest = DeleteObjectsRequest.builder() - .bucket(s3Bucket) - .delete(del) - .build(); - - s3Client.deleteObjects(multiObjectDeleteRequest); - LOGGER.debug("Multiple objects are deleted!"); - } - - private S3Client getOrCreateS3Client() { - if (s3 == null) { - s3 = s3ClientFactory.get(); - } - return s3; - } - - private static List getAscendingObjectKeys(final S3Client s3Client, final String logPath, final String s3Bucket) { - final var listObjReq = ListObjectsV2Request.builder().bucket(s3Bucket).prefix(logPath).build(); - final var ascendingTimestampObjs = new ArrayList(); - - // Objects are returned in lexicographical order. - for (final var page : s3Client.listObjectsV2Paginator(listObjReq)) { - for (final var objMetadata : page.contents()) { - ascendingTimestampObjs.add(objMetadata.key()); - } - } - return ascendingTimestampObjs; - } - - private static List getCurrFile(final S3Client s3Client, final String s3Bucket, final String poppedKey) throws IOException { - final var getObjReq = GetObjectRequest.builder() - .key(poppedKey) - .bucket(s3Bucket) - .build(); - - final var data = s3Client.getObjectAsBytes(getObjReq).asByteArray(); - final var is = new ByteArrayInputStream(data); - final var currentFileLines = new ArrayList(); - try (final var reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { - String temp = reader.readLine(); - while (temp != null) { - currentFileLines.add(temp); - temp = reader.readLine(); - } - } - return currentFileLines; - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/ScheduleHelpers.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/ScheduleHelpers.java deleted file mode 100644 index 385bbb08f7279..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/ScheduleHelpers.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import io.airbyte.config.BasicSchedule; -import io.airbyte.config.Schedule; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.ScheduleType; -import java.util.concurrent.TimeUnit; - -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -public class ScheduleHelpers { - - public static Long getSecondsInUnit(final Schedule.TimeUnit timeUnitEnum) { - switch (timeUnitEnum) { - case MINUTES: - return TimeUnit.MINUTES.toSeconds(1); - case HOURS: - return TimeUnit.HOURS.toSeconds(1); - case DAYS: - return TimeUnit.DAYS.toSeconds(1); - case WEEKS: - return TimeUnit.DAYS.toSeconds(1) * 7; - case MONTHS: - return TimeUnit.DAYS.toSeconds(1) * 30; - default: - throw new RuntimeException("Unhandled TimeUnitEnum: " + timeUnitEnum); - } - } - - public static Long getSecondsInUnit(final BasicSchedule.TimeUnit timeUnitEnum) { - switch (timeUnitEnum) { - case MINUTES: - return TimeUnit.MINUTES.toSeconds(1); - case HOURS: - return TimeUnit.HOURS.toSeconds(1); - case DAYS: - return TimeUnit.DAYS.toSeconds(1); - case WEEKS: - return TimeUnit.DAYS.toSeconds(1) * 7; - case MONTHS: - return TimeUnit.DAYS.toSeconds(1) * 30; - default: - throw new RuntimeException("Unhandled TimeUnitEnum: " + timeUnitEnum); - } - } - - public static Long getIntervalInSecond(final Schedule schedule) { - return getSecondsInUnit(schedule.getTimeUnit()) * schedule.getUnits(); - } - - public static Long getIntervalInSecond(final BasicSchedule schedule) { - return getSecondsInUnit(schedule.getTimeUnit()) * schedule.getUnits(); - } - - public static boolean isScheduleTypeMismatch(final StandardSync standardSync) { - if (standardSync.getScheduleType() == null) { - return false; - } - return (standardSync.getManual() && standardSync.getScheduleType() != ScheduleType.MANUAL) || (!standardSync.getManual() - && standardSync.getScheduleType() == ScheduleType.MANUAL); - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/CloudStorageConfigs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/CloudStorageConfigs.java deleted file mode 100644 index ba0124282c066..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/CloudStorageConfigs.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import com.google.common.base.Preconditions; - -/** - * Immutable configuration for configuring Cloud storage clients. We usually allow configuring one - * of 3 type of cloud storage clients in our env variables. We then want to opaquely pass that to - * wherever that cloud storage is used and then, based on the configuration, spin up the correct - * client. This configuration object allows us to do that. - */ -@SuppressWarnings("PMD.ShortMethodName") -public class CloudStorageConfigs { - - public enum WorkerStorageType { - S3, - MINIO, - GCS - } - - private final WorkerStorageType type; - private final S3Config s3Config; - private final MinioConfig minioConfig; - private final GcsConfig gcsConfig; - - public static CloudStorageConfigs s3(final S3Config config) { - return new CloudStorageConfigs(WorkerStorageType.S3, config, null, null); - } - - public static CloudStorageConfigs minio(final MinioConfig config) { - return new CloudStorageConfigs(WorkerStorageType.MINIO, null, config, null); - } - - public static CloudStorageConfigs gcs(final GcsConfig config) { - return new CloudStorageConfigs(WorkerStorageType.GCS, null, null, config); - } - - private CloudStorageConfigs(final WorkerStorageType type, - final S3Config s3Config, - final MinioConfig minioConfig, - final GcsConfig gcsConfig) { - validate(type, s3Config, minioConfig, gcsConfig); - - this.type = type; - this.s3Config = s3Config; - this.minioConfig = minioConfig; - this.gcsConfig = gcsConfig; - } - - private void validate(final WorkerStorageType type, - final S3Config s3Config, - final MinioConfig minioConfig, - final GcsConfig gcsConfig) { - switch (type) { - case S3 -> { - Preconditions.checkNotNull(s3Config); - Preconditions.checkArgument(minioConfig == null); - Preconditions.checkArgument(gcsConfig == null); - } - case MINIO -> { - Preconditions.checkArgument(s3Config == null); - Preconditions.checkNotNull(minioConfig); - Preconditions.checkArgument(gcsConfig == null); - } - case GCS -> { - Preconditions.checkArgument(s3Config == null); - Preconditions.checkArgument(minioConfig == null); - Preconditions.checkNotNull(gcsConfig); - } - } - } - - public WorkerStorageType getType() { - return type; - } - - public S3Config getS3Config() { - return s3Config; - } - - public MinioConfig getMinioConfig() { - return minioConfig; - } - - public GcsConfig getGcsConfig() { - return gcsConfig; - } - - public static class S3ApiWorkerStorageConfig { - - private final String bucketName; - private final String awsAccessKey; - private final String awsSecretAccessKey; - - protected S3ApiWorkerStorageConfig(final String bucketName, final String awsAccessKey, final String awsSecretAccessKey) { - this.bucketName = bucketName; - this.awsAccessKey = awsAccessKey; - this.awsSecretAccessKey = awsSecretAccessKey; - } - - public String getBucketName() { - return bucketName; - } - - public String getAwsAccessKey() { - return awsAccessKey; - } - - public String getAwsSecretAccessKey() { - return awsSecretAccessKey; - } - - } - - public static class S3Config extends S3ApiWorkerStorageConfig { - - private final String region; - - public S3Config(final String bucketName, final String awsAccessKey, final String awsSecretAccessKey, final String region) { - super(bucketName, awsAccessKey, awsSecretAccessKey); - this.region = region; - } - - public String getRegion() { - return region; - } - - } - - public static class MinioConfig extends S3ApiWorkerStorageConfig { - - private final String minioEndpoint; - - public MinioConfig(final String bucketName, final String awsAccessKey, final String awsSecretAccessKey, final String minioEndpoint) { - super(bucketName, awsAccessKey, awsSecretAccessKey); - this.minioEndpoint = minioEndpoint; - } - - public String getMinioEndpoint() { - return minioEndpoint; - } - - } - - public static class GcsConfig { - - private final String bucketName; - private final String googleApplicationCredentials; - - public GcsConfig(final String bucketName, final String googleApplicationCredentials) { - this.bucketName = bucketName; - this.googleApplicationCredentials = googleApplicationCredentials; - } - - public String getBucketName() { - return bucketName; - } - - public String getGoogleApplicationCredentials() { - return googleApplicationCredentials; - } - - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/DefaultGcsClientFactory.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/DefaultGcsClientFactory.java deleted file mode 100644 index 7373b45c991ca..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/DefaultGcsClientFactory.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import com.google.api.client.util.Preconditions; -import com.google.auth.oauth2.ServiceAccountCredentials; -import com.google.cloud.storage.Storage; -import com.google.cloud.storage.StorageOptions; -import io.airbyte.config.storage.CloudStorageConfigs.GcsConfig; -import java.io.ByteArrayInputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.function.Supplier; - -/** - * Takes in the constructor our standard format for gcs configuration and provides a factory that - * uses that configuration to create a GCS client (Storage). - */ -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -public class DefaultGcsClientFactory implements Supplier { - - private final GcsConfig config; - - public DefaultGcsClientFactory(final GcsConfig config) { - validate(config); - this.config = config; - } - - private static void validate(final GcsConfig config) { - Preconditions.checkArgument(!config.getBucketName().isBlank()); - Preconditions.checkArgument(!config.getGoogleApplicationCredentials().isBlank()); - } - - @Override - public Storage get() { - try { - final var credentialsByteStream = new ByteArrayInputStream(Files.readAllBytes(Path.of(config.getGoogleApplicationCredentials()))); - final var credentials = ServiceAccountCredentials.fromStream(credentialsByteStream); - return StorageOptions.newBuilder().setCredentials(credentials).build().getService(); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/DefaultS3ClientFactory.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/DefaultS3ClientFactory.java deleted file mode 100644 index 5120f4d852b12..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/DefaultS3ClientFactory.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import com.google.common.base.Preconditions; -import io.airbyte.config.storage.CloudStorageConfigs.S3ApiWorkerStorageConfig; -import io.airbyte.config.storage.CloudStorageConfigs.S3Config; -import java.util.function.Supplier; -import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.s3.S3Client; - -/** - * Takes in the constructor our standard format for S3 configuration and provides a factory that - * uses that configuration to create an S3Client. - */ -public class DefaultS3ClientFactory implements Supplier { - - private final S3Config s3Config; - - public DefaultS3ClientFactory(final S3Config s3Config) { - validate(s3Config); - - this.s3Config = s3Config; - } - - private static void validate(final S3Config config) { - Preconditions.checkNotNull(config); - validateBase(config); - Preconditions.checkArgument(!config.getRegion().isBlank()); - } - - static void validateBase(final S3ApiWorkerStorageConfig s3BaseConfig) { - Preconditions.checkArgument(!s3BaseConfig.getAwsAccessKey().isBlank()); - Preconditions.checkArgument(!s3BaseConfig.getAwsSecretAccessKey().isBlank()); - Preconditions.checkArgument(!s3BaseConfig.getBucketName().isBlank()); - Preconditions.checkArgument(!s3BaseConfig.getBucketName().isBlank()); - } - - @Override - public S3Client get() { - final var builder = S3Client.builder(); - builder.credentialsProvider(() -> AwsBasicCredentials.create(s3Config.getAwsAccessKey(), s3Config.getAwsSecretAccessKey())); - builder.region(Region.of(s3Config.getRegion())); - return builder.build(); - } - -} diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/MinioS3ClientFactory.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/MinioS3ClientFactory.java deleted file mode 100644 index cef1bccf2de3a..0000000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/MinioS3ClientFactory.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import com.google.common.base.Preconditions; -import io.airbyte.config.storage.CloudStorageConfigs.MinioConfig; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.function.Supplier; -import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.s3.S3Client; - -/** - * When using minio, we can still leverage the S3Client, we just slightly change what information we - * pass to it. Takes in the constructor our standard format for minio configuration and provides a - * factory that uses that configuration to create an S3Client. - */ -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -public class MinioS3ClientFactory implements Supplier { - - private final MinioConfig minioConfig; - - public MinioS3ClientFactory(final MinioConfig minioConfig) { - validate(minioConfig); - this.minioConfig = minioConfig; - } - - private static void validate(final MinioConfig config) { - Preconditions.checkNotNull(config); - DefaultS3ClientFactory.validateBase(config); - Preconditions.checkArgument(!config.getMinioEndpoint().isBlank()); - } - - @Override - public S3Client get() { - final var builder = S3Client.builder(); - - // The Minio S3 client. - final var minioEndpoint = minioConfig.getMinioEndpoint(); - try { - final var minioUri = new URI(minioEndpoint); - builder.credentialsProvider(() -> AwsBasicCredentials.create(minioConfig.getAwsAccessKey(), minioConfig.getAwsSecretAccessKey())); - builder.endpointOverride(minioUri); - builder.region(Region.US_EAST_1); // Although this is not used, the S3 client will error out if this is not set. Set a stub value. - } catch (final URISyntaxException e) { - throw new RuntimeException("Error creating S3 log client to Minio", e); - } - - return builder.build(); - } - -} diff --git a/airbyte-config/config-models/src/main/resources/types/ActorCatalog.yaml b/airbyte-config/config-models/src/main/resources/types/ActorCatalog.yaml deleted file mode 100644 index a925ee891cdf6..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/ActorCatalog.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/AttemptFailureSummary.yaml -title: ActorCatalog -description: Catalog of an actor. -type: object -additionalProperties: false -required: - - id - - catalog - - catalogHash -properties: - id: - type: string - format: uuid - catalog: - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - catalogHash: - type: string diff --git a/airbyte-config/config-models/src/main/resources/types/ActorCatalogFetchEvent.yaml b/airbyte-config/config-models/src/main/resources/types/ActorCatalogFetchEvent.yaml deleted file mode 100644 index 3bb598c7bd59f..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/ActorCatalogFetchEvent.yaml +++ /dev/null @@ -1,30 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/AttemptFailureSummary.yaml -title: ActorCatalogFetchEvent -description: Link actor to their actual catalog -type: object -additionalProperties: false -required: - - id - - actorCatalogId - - actorId - - configHash - - connectorVersion -properties: - id: - type: string - format: uuid - actorId: - type: string - format: uuid - actorCatalogId: - type: string - format: uuid - configHash: - type: string - connectorVersion: - type: string - createdAt: - type: integer - format: int64 diff --git a/airbyte-config/config-models/src/main/resources/types/ActorCatalogWithUpdatedAt.yaml b/airbyte-config/config-models/src/main/resources/types/ActorCatalogWithUpdatedAt.yaml deleted file mode 100644 index 812b7c14eef10..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/ActorCatalogWithUpdatedAt.yaml +++ /dev/null @@ -1,23 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -title: ActorCatalogWithUpdatedAt -description: Catalog of an actor with its most recent ActorCatalogFetchEvent created_at timestamp. -type: object -additionalProperties: false -required: - - id - - catalog - - catalogHash - - updatedAt -properties: - id: - type: string - format: uuid - catalog: - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - catalogHash: - type: string - updatedAt: - type: integer - format: int64 diff --git a/airbyte-config/config-models/src/main/resources/types/AttemptFailureSummary.yaml b/airbyte-config/config-models/src/main/resources/types/AttemptFailureSummary.yaml deleted file mode 100644 index d9e607f53ca77..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/AttemptFailureSummary.yaml +++ /dev/null @@ -1,18 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/AttemptFailureSummary.yaml -title: AttemptFailureSummary -description: Attempt-level summarization of failures that occurred during a sync workflow. -type: object -additionalProperties: false -required: - - failures -properties: - failures: - description: Ordered list of failures that occurred during the attempt. - type: array - items: - "$ref": FailureReason.yaml - partialSuccess: - description: True if the number of committed records for this attempt was greater than 0. False if 0 records were committed. Blank if number of committed records is unknown. - type: boolean diff --git a/airbyte-config/config-models/src/main/resources/types/AttemptSyncConfig.yaml b/airbyte-config/config-models/src/main/resources/types/AttemptSyncConfig.yaml deleted file mode 100644 index 7b28faea7bbf0..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/AttemptSyncConfig.yaml +++ /dev/null @@ -1,22 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/AttemptSyncConfig.yaml -title: AttemptSyncConfig -description: attempt sync config -type: object -additionalProperties: false -required: - - sourceConfiguration - - destinationConfiguration -properties: - sourceConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - destinationConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - state: - description: optional state of the previous run. this object is defined per integration. - "$ref": State.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/CustomerioNotificationConfiguration.yaml b/airbyte-config/config-models/src/main/resources/types/CustomerioNotificationConfiguration.yaml deleted file mode 100644 index 11ad66bb6d1ee..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/CustomerioNotificationConfiguration.yaml +++ /dev/null @@ -1,8 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/CustomerioNotificationConfiguration.yaml -title: CustomerioNotificationConfiguration -description: Customer.io Notification Settings -type: object -additionalProperties: false -properties: diff --git a/airbyte-config/config-models/src/main/resources/types/Geography.yaml b/airbyte-config/config-models/src/main/resources/types/Geography.yaml deleted file mode 100644 index f545f53c1d24d..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/Geography.yaml +++ /dev/null @@ -1,10 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/Geography.yaml -title: Geography -description: Geography Setting -type: string -enum: - - auto - - us - - eu diff --git a/airbyte-config/config-models/src/main/resources/types/JobCheckConnectionConfig.yaml b/airbyte-config/config-models/src/main/resources/types/JobCheckConnectionConfig.yaml deleted file mode 100644 index bf93ce6998723..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/JobCheckConnectionConfig.yaml +++ /dev/null @@ -1,29 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/JobCheckConnectionConfig.yaml -title: JobCheckConnectionConfig -description: job check connection config -type: object -additionalProperties: false -required: - - connectionConfiguration - - dockerImage -properties: - actorType: - "$ref": ActorType.yaml - actorId: - description: The ID of the actor being checked, so we can persist config updates - type: string - format: uuid - connectionConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - dockerImage: - type: string - protocolVersion: - type: object - existingJavaType: io.airbyte.commons.version.Version - isCustomConnector: - description: determine if the running image is a custom connector. - type: boolean diff --git a/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml b/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml deleted file mode 100644 index 824b18b753072..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml +++ /dev/null @@ -1,29 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/JobConfig.yaml -title: JobConfig -description: job config -type: object -additionalProperties: false -required: - - configType -properties: - configType: - type: string - enum: - - checkConnectionSource - - checkConnectionDestination - - discoverSchema - - getSpec - - sync - - resetConnection - checkConnection: - "$ref": JobCheckConnectionConfig.yaml - discoverCatalog: - "$ref": JobDiscoverCatalogConfig.yaml - getSpec: - "$ref": JobGetSpecConfig.yaml - sync: - "$ref": JobSyncConfig.yaml - resetConnection: - "$ref": JobResetConnectionConfig.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/JobDiscoverCatalogConfig.yaml b/airbyte-config/config-models/src/main/resources/types/JobDiscoverCatalogConfig.yaml deleted file mode 100644 index 8f60458a3e112..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/JobDiscoverCatalogConfig.yaml +++ /dev/null @@ -1,38 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/JobDiscoverCatalogConfig.yaml -title: JobDiscoverCatalogConfig -description: job discover catalog config -type: object -additionalProperties: false -required: - - connectionConfiguration - - dockerImage - - sourceId - - connectorVersion - - configHash -properties: - connectionConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - dockerImage: - type: string - description: The connector image - example: airbyte/source-postgres:1.0.12 - sourceId: - description: The ID of the source being discovered, so we can persist this alongside the discovered catalog - type: string - connectorVersion: - description: Connector version, so we can persist this alongside the discovered catalog - type: string - example: 1.0.12 - configHash: - description: Hash of the source configuration -- see `configuration` field in SourceConnection.yaml -- so we can persist this alongside the discovered catalog. - type: string - protocolVersion: - type: object - existingJavaType: io.airbyte.commons.version.Version - isCustomConnector: - description: determine if the running image is a custom connector. - type: boolean diff --git a/airbyte-config/config-models/src/main/resources/types/JobOutput.yaml b/airbyte-config/config-models/src/main/resources/types/JobOutput.yaml deleted file mode 100644 index a19c32dfd778d..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/JobOutput.yaml +++ /dev/null @@ -1,25 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/JobOutput.yaml -title: JobOutput -description: job output -type: object -additionalProperties: false -required: - - outputType -properties: - outputType: - type: string - enum: - - checkConnection - - discoverCatalog - - getSpec - - sync - checkConnection: - "$ref": StandardCheckConnectionOutput.yaml - discoverCatalog: - "$ref": StandardDiscoverCatalogOutput.yaml - getSpec: - "$ref": StandardGetSpecOutput.yaml - sync: - "$ref": StandardSyncOutput.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/JobResetConnectionConfig.yaml b/airbyte-config/config-models/src/main/resources/types/JobResetConnectionConfig.yaml deleted file mode 100644 index 73dcd898f93c9..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/JobResetConnectionConfig.yaml +++ /dev/null @@ -1,56 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/JobResetConnectionConfig.yaml -title: JobResetConnectionConfig -description: job reset connection config -type: object -additionalProperties: false -required: - - configuredAirbyteCatalog - - destinationDockerImage -properties: - namespaceDefinition: - "$ref": NamespaceDefinitionType.yaml - namespaceFormat: - type: string - default: null - example: "${SOURCE_NAMESPACE}" - prefix: - description: Prefix that will be prepended to the name of each stream when it is written to the destination. - type: string - configuredAirbyteCatalog: - description: the configured airbyte catalog - type: object - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - destinationDockerImage: - description: Image name of the destination with tag. - type: string - destinationProtocolVersion: - description: Airbyte Protocol Version of the destination - type: object - existingJavaType: io.airbyte.commons.version.Version - operationSequence: - description: Sequence of configurations of operations to apply as part of the sync - type: array - items: - "$ref": StandardSyncOperation.yaml - webhookOperationConfigs: - description: The webhook operation configs belonging to this workspace. Must conform to WebhookOperationConfigs.yaml. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - resourceRequirements: - type: object - description: optional resource requirements to run sync workers - existingJavaType: io.airbyte.config.ResourceRequirements - resetSourceConfiguration: - "$ref": ResetSourceConfiguration.yaml - isSourceCustomConnector: - description: determine if the running image of the source is a custom connector. - type: boolean - isDestinationCustomConnector: - description: determine if the running image of the destination is a custom connector. - type: boolean - workspaceId: - description: The id of the workspace associated with the sync - type: string - format: uuid diff --git a/airbyte-config/config-models/src/main/resources/types/NormalizationInput.yaml b/airbyte-config/config-models/src/main/resources/types/NormalizationInput.yaml deleted file mode 100644 index 72fd7ffbcdb80..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/NormalizationInput.yaml +++ /dev/null @@ -1,27 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/NormalizationInput.yaml -title: NormalizationInput -description: job normalization config -type: object -additionalProperties: false -required: - - destinationConfiguration - - catalog -properties: - destinationConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - catalog: - description: the configured airbyte catalog. this version of the catalog represents the schema of the data in json blobs in the raw tables. - type: object - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - resourceRequirements: - type: object - description: optional resource requirements to run sync workers - existingJavaType: io.airbyte.config.ResourceRequirements - workspaceId: - description: The id of the workspace associated with this sync - type: string - format: uuid diff --git a/airbyte-config/config-models/src/main/resources/types/NormalizationSummary.yaml b/airbyte-config/config-models/src/main/resources/types/NormalizationSummary.yaml deleted file mode 100644 index 96e1244da4cb6..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/NormalizationSummary.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/NormalizationSummary.yaml -title: NormalizationSummary -description: information output by syncs for which a normalization step was performed -type: object -required: - - startTime - - endTime -additionalProperties: false -properties: - startTime: - type: integer - endTime: - type: integer - failures: - type: array - items: - "$ref": FailureReason.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/Notification.yaml b/airbyte-config/config-models/src/main/resources/types/Notification.yaml deleted file mode 100644 index f35a9c410e6c4..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/Notification.yaml +++ /dev/null @@ -1,24 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/Notification.yaml -title: Notification -description: Notification Settings -type: object -required: - - notificationType -additionalProperties: true -properties: - # Instead of this type field, we would prefer a json schema "oneOf" but unfortunately, - # the jsonschema2pojo does not seem to support it yet: https://github.com/joelittlejohn/jsonschema2pojo/issues/392 - notificationType: - "$ref": NotificationType.yaml - sendOnSuccess: - type: boolean - default: false - sendOnFailure: - type: boolean - default: true - slackConfiguration: - "$ref": SlackNotificationConfiguration.yaml - customerioConfiguration: - "$ref": CustomerioNotificationConfiguration.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/OperatorDbtInput.yaml b/airbyte-config/config-models/src/main/resources/types/OperatorDbtInput.yaml deleted file mode 100644 index 2bdc9ca1d90aa..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/OperatorDbtInput.yaml +++ /dev/null @@ -1,17 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/OperatorDbtInput.yaml -title: Operator Dbt Input -description: Input configuration for DBT Transformation operator -type: object -additionalProperties: false -required: - - destinationConfiguration - - operatorDbt -properties: - destinationConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - operatorDbt: - "$ref": OperatorDbt.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/OperatorWebhookInput.yaml b/airbyte-config/config-models/src/main/resources/types/OperatorWebhookInput.yaml deleted file mode 100644 index e0a0016b527b9..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/OperatorWebhookInput.yaml +++ /dev/null @@ -1,24 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/OperatorWebhookInput.yaml -title: OperatorWebhookInput -description: Execution input for a webhook operation -type: object -required: - - executionUrl - - webhookConfigId -properties: - executionUrl: - description: URL to invoke the webhook via POST. - type: string - executionBody: - description: Message body to be POSTed. - type: string - webhookConfigId: - description: An id used to index into the workspaceWebhookConfigs, which has a list of webhook configs. - type: string - format: uuid - workspaceWebhookConfigs: - description: Webhook configs for this workspace. Must conform to WebhookOperationConfigs.yaml; any secrets must be hydrated before use. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode diff --git a/airbyte-config/config-models/src/main/resources/types/ReplicationAttemptSummary.yaml b/airbyte-config/config-models/src/main/resources/types/ReplicationAttemptSummary.yaml deleted file mode 100644 index fc3eee68b54b9..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/ReplicationAttemptSummary.yaml +++ /dev/null @@ -1,33 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/ReplicationAttemptSummary.yaml -title: ReplicationAttemptSummary -type: object -required: - - status - - recordsSynced - - bytesSynced - - startTime - - endTime - - totalStats - - streamStats -additionalProperties: false -properties: - status: - "$ref": ReplicationStatus.yaml - recordsSynced: # TODO (parker) remove in favor of totalRecordsEmitted - type: integer - minValue: 0 - bytesSynced: # TODO (parker) remove in favor of totalBytesEmitted - type: integer - minValue: 0 - startTime: - type: integer - endTime: - type: integer - totalStats: - "$ref": SyncStats.yaml - streamStats: - type: array - items: - "$ref": StreamSyncStats.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/ReplicationOutput.yaml b/airbyte-config/config-models/src/main/resources/types/ReplicationOutput.yaml deleted file mode 100644 index 0182c02594056..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/ReplicationOutput.yaml +++ /dev/null @@ -1,22 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/ReplicationOutput.yaml -title: ReplicationOutput -description: metadata summary of a replication attempt -type: object -additionalProperties: false -required: - - replicationAttemptSummary - - state - - output_catalog -properties: - replicationAttemptSummary: - "$ref": ReplicationAttemptSummary.yaml - state: - "$ref": State.yaml - output_catalog: - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - failures: - type: array - items: - "$ref": FailureReason.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/ResetSourceConfiguration.yaml b/airbyte-config/config-models/src/main/resources/types/ResetSourceConfiguration.yaml deleted file mode 100644 index facea3cc60da2..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/ResetSourceConfiguration.yaml +++ /dev/null @@ -1,15 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/ResetSourceConfiguration.yaml -title: ResetSourceConfiguration -description: configuration of the reset source -type: object -additionalProperties: false -required: - - streamsToReset -properties: - streamsToReset: - type: array - items: - type: object - existingJavaType: io.airbyte.protocol.models.StreamDescriptor diff --git a/airbyte-config/config-models/src/main/resources/types/SlackNotificationConfiguration.yaml b/airbyte-config/config-models/src/main/resources/types/SlackNotificationConfiguration.yaml deleted file mode 100644 index 90546b93aeebf..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/SlackNotificationConfiguration.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/SlackNotificationConfiguration.yaml -title: SlackNotificationConfiguration -description: Slack Notification Settings -type: object -required: - - webhook -additionalProperties: false -properties: - webhook: - type: string diff --git a/airbyte-config/config-models/src/main/resources/types/StandardDiscoverCatalogOutput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardDiscoverCatalogOutput.yaml deleted file mode 100644 index ada314dc9d6fb..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StandardDiscoverCatalogOutput.yaml +++ /dev/null @@ -1,14 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StandardDiscoverCatalogOutput.yaml -title: StandardDiscoverCatalogOutput -description: describes the standard output for any discovery run. -type: object -required: - - catalog -additionalProperties: false -properties: - catalog: - description: describes the available schemas. - type: object - existingJavaType: io.airbyte.protocol.models.AirbyteCatalog diff --git a/airbyte-config/config-models/src/main/resources/types/StandardGetSpecOutput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardGetSpecOutput.yaml deleted file mode 100644 index fd8394574440f..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StandardGetSpecOutput.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StandardGetSpecOutput.yaml -title: StandardGetSpecOutput -description: job get spec output -type: object -additionalProperties: false -required: - - specification -properties: - specification: - existingJavaType: io.airbyte.protocol.models.ConnectorSpecification diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSync.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSync.yaml deleted file mode 100644 index c4e679e408d8b..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StandardSync.yaml +++ /dev/null @@ -1,134 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StandardSync.yaml -title: StandardSync -description: configuration required for sync for ALL sources -type: object -required: - - sourceId - - destinationId - - name - - catalog - - manual - - namespaceDefinition - - geography - - breakingChange -additionalProperties: false -properties: - namespaceDefinition: - "$ref": NamespaceDefinitionType.yaml - namespaceFormat: - type: string - default: null - example: "${SOURCE_NAMESPACE}" - prefix: - description: Prefix that will be prepended to the name of each stream when it is written to the destination. - type: string - sourceId: - type: string - format: uuid - destinationId: - type: string - format: uuid - operationIds: - type: array - items: - type: string - format: uuid - connectionId: - type: string - format: uuid - name: - type: string - catalog: - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - fieldSelectionData: - type: object - description: A map of StreamDescriptor to an indicator of whether field selection is enabled for that stream. - additionalProperties: - type: boolean - status: - type: string - enum: - - active - - inactive - - deprecated - # TODO(https://github.com/airbytehq/airbyte/issues/11432): remove. Prefer the scheduleType and scheduleData properties. - schedule: - type: object - required: - - timeUnit - - units - additionalProperties: false - properties: - timeUnit: - type: string - enum: - - minutes - - hours - - days - - weeks - - months - units: - type: integer - # When manual is true, schedule should be null, and will be ignored. - # TODO(https://github.com/airbytehq/airbyte/issues/11432): remove. Prefer setting the scheduleType property to Manual. - manual: - type: boolean - # If this property is set to BasicSchedule or Cron, the corresponding field in the scheduleData property should be set. - # NOTE: if this is set, it takes precedence over the `manual` property. - scheduleType: - type: string - enum: - - Manual - - BasicSchedule - - Cron - scheduleData: - type: object - additionalProperties: false - # Ideally basicSchedule and cron should be a union, but java codegen does not handle the union type properly. - properties: - # This should be populated when scheduleType is BasicSchedule. - basicSchedule: - type: object - required: - - timeUnit - - units - properties: - timeUnit: - type: string - enum: - - minutes - - hours - - days - - weeks - - months - units: - type: integer - # This should be populated when scheduleType is Cron. - cron: - type: object - required: - - cronExpression - - cronTimeZone - properties: - cronExpression: - type: string - cronTimeZone: - type: string - source_catalog_id: - type: string - format: uuid - resourceRequirements: - "$ref": ResourceRequirements.yaml - geography: - "$ref": Geography.yaml - breakingChange: - type: boolean - notifySchemaChanges: - type: boolean - nonBreakingChangesPreference: - type: string - enum: - - ignore - - disable diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml deleted file mode 100644 index ba0a14a340fdb..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml +++ /dev/null @@ -1,26 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StandardSyncOutput.yaml -title: StandardSyncOutput -description: job sync config -type: object -additionalProperties: false -required: - - standardSyncSummary - - state - - output_catalog -properties: - standardSyncSummary: - "$ref": StandardSyncSummary.yaml - normalizationSummary: - "$ref": NormalizationSummary.yaml - webhookOperationSummary: - "$ref": WebhookOperationSummary.yaml - state: - "$ref": State.yaml - output_catalog: - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - failures: - type: array - items: - "$ref": FailureReason.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncState.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncState.yaml deleted file mode 100644 index 964e084bc6968..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncState.yaml +++ /dev/null @@ -1,17 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StandardSyncState.yaml -title: StandardSyncState -description: The current state of a connection (i.e. StandardSync). -type: object -additionalProperties: false -required: - - connectionId -properties: - connectionId: - type: string - format: uuid - description: This is a foreign key that references a connection (i.e. StandardSync). - state: - "$ref": State.yaml - description: The current (latest) connection state. diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml deleted file mode 100644 index a305f7fc44104..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml +++ /dev/null @@ -1,36 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StandardSyncSummary.yaml -title: StandardSyncSummary -description: - standard information output by ALL sources for a sync step (our version - of state.json) -type: object -required: - - status - - recordsSynced - - bytesSynced - - startTime - - endTime - - totalStats - - streamStats -additionalProperties: false -properties: - status: - "$ref": ReplicationStatus.yaml - recordsSynced: # TODO (parker) remove in favor of totalRecordsEmitted - type: integer - minValue: 0 - bytesSynced: # TODO (parker) remove in favor of totalBytesEmitted - type: integer - minValue: 0 - startTime: - type: integer - endTime: - type: integer - totalStats: - "$ref": SyncStats.yaml - streamStats: - type: array - items: - "$ref": StreamSyncStats.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardWorkspace.yaml b/airbyte-config/config-models/src/main/resources/types/StandardWorkspace.yaml deleted file mode 100644 index 03c786a207a0c..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StandardWorkspace.yaml +++ /dev/null @@ -1,58 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StandardWorkspaceConfiguration.yaml -title: StandardWorkspace -description: workspace configuration -type: object -required: - - workspaceId - - name - - slug - - initialSetupComplete - - defaultGeography -additionalProperties: false -properties: - workspaceId: - type: string - format: uuid - customerId: - type: string - format: uuid - name: - type: string - slug: - type: string - email: - type: string - format: email - initialSetupComplete: - type: boolean - anonymousDataCollection: - type: boolean - news: - type: boolean - securityUpdates: - type: boolean - displaySetupWizard: - type: boolean - tombstone: - description: - if not set or false, the configuration is active. if true, then this - configuration is permanently off. - type: boolean - notifications: - type: array - items: - "$ref": Notification.yaml - firstCompletedSync: - type: boolean - feedbackDone: - type: boolean - defaultGeography: - "$ref": Geography.yaml - webhookOperationConfigs: - description: - Configurations for webhooks operations, stored as a JSON object so we can replace sensitive info with - coordinates in the secrets manager. Must conform to WebhookOperationConfigs.yaml. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode diff --git a/airbyte-config/config-models/src/main/resources/types/StreamSyncStats.yaml b/airbyte-config/config-models/src/main/resources/types/StreamSyncStats.yaml deleted file mode 100644 index 5ce73ce21d1d4..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StreamSyncStats.yaml +++ /dev/null @@ -1,18 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StreamSyncStats.yaml -title: StreamSyncStats -description: Sync stats for a particular stream. -type: object -required: - - streamName - - stats -additionalProperties: false -properties: - streamName: - type: string - # Not required as not all sources emits a namespace for each Stream. - streamNamespace: - type: string - stats: - "$ref": SyncStats.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/WorkspaceServiceAccount.yaml b/airbyte-config/config-models/src/main/resources/types/WorkspaceServiceAccount.yaml deleted file mode 100644 index f18e2183edb0c..0000000000000 --- a/airbyte-config/config-models/src/main/resources/types/WorkspaceServiceAccount.yaml +++ /dev/null @@ -1,31 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/WorkspaceServiceAccount.yaml -title: WorkspaceServiceAccount -description: service account attached to a workspace -type: object -required: - - workspaceId - - serviceAccountId - - serviceAccountEmail - - jsonCredential - - hmacKey -additionalProperties: false -properties: - workspaceId: - type: string - format: uuid - serviceAccountId: - type: string - serviceAccountEmail: - type: string - jsonCredential: - # Ref : io.airbyte.config.persistence.MockData#workspaceServiceAccounts() for sample data - description: Represents the JSON key generated for the service account - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - hmacKey: - # Ref : io.airbyte.config.persistence.MockData#workspaceServiceAccounts() for sample data - description: Represents the secret and access id of generated HMAC key for the service account - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/EnvConfigsTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/EnvConfigsTest.java deleted file mode 100644 index 746f2cdee98d2..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/EnvConfigsTest.java +++ /dev/null @@ -1,486 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.Configs.JobErrorReportingStrategy; -import io.airbyte.config.Configs.WorkerEnvironment; -import java.net.URI; -import java.nio.file.Paths; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.NullAssignment") -class EnvConfigsTest { - - private Map envMap; - private EnvConfigs config; - private final static String ABC = "abc"; - private final static String DEV = "dev"; - private final static String ABCDEF = "abc/def"; - private final static String ROOT = "root"; - private final static String KEY = "key"; - private final static String ONE = "one"; - private final static String TWO = "two"; - private final static String ONE_EQ_TWO = "one=two"; - private final static String NOTHING = "nothing"; - private final static String SOMETHING = "something"; - private final static String AIRBYTE = "airbyte"; - private final static String SERVER = "server"; - private final static String AIRB_SERV_SOME_NOTHING = "airbyte=server,something=nothing"; - private final static String ENV_STRING = "key=k,,;$%&^#"; - private final static String NODE_SELECTORS = ",,,"; - - @BeforeEach - void setUp() { - envMap = new HashMap<>(); - config = new EnvConfigs(envMap); - } - - @Test - void ensureGetEnvBehavior() { - assertNull(System.getenv("MY_RANDOM_VAR_1234")); - } - - @Test - void testAirbyteRole() { - envMap.put(EnvConfigs.AIRBYTE_ROLE, null); - assertNull(config.getAirbyteRole()); - - envMap.put(EnvConfigs.AIRBYTE_ROLE, DEV); - assertEquals(DEV, config.getAirbyteRole()); - } - - @Test - void testAirbyteVersion() { - envMap.put(EnvConfigs.AIRBYTE_VERSION, null); - assertThrows(IllegalArgumentException.class, () -> config.getAirbyteVersion()); - - envMap.put(EnvConfigs.AIRBYTE_VERSION, DEV); - assertEquals(new AirbyteVersion(DEV), config.getAirbyteVersion()); - } - - @Test - void testWorkspaceRoot() { - envMap.put(EnvConfigs.WORKSPACE_ROOT, null); - assertThrows(IllegalArgumentException.class, () -> config.getWorkspaceRoot()); - - envMap.put(EnvConfigs.WORKSPACE_ROOT, ABCDEF); - assertEquals(Paths.get(ABCDEF), config.getWorkspaceRoot()); - } - - @Test - void testLocalRoot() { - envMap.put(EnvConfigs.LOCAL_ROOT, null); - assertThrows(IllegalArgumentException.class, () -> config.getLocalRoot()); - - envMap.put(EnvConfigs.LOCAL_ROOT, ABCDEF); - assertEquals(Paths.get(ABCDEF), config.getLocalRoot()); - } - - @Test - void testConfigRoot() { - envMap.put(EnvConfigs.CONFIG_ROOT, null); - assertThrows(IllegalArgumentException.class, () -> config.getConfigRoot()); - - envMap.put(EnvConfigs.CONFIG_ROOT, "a/b"); - assertEquals(Paths.get("a/b"), config.getConfigRoot()); - } - - @Test - void testGetDatabaseUser() { - envMap.put(EnvConfigs.DATABASE_USER, null); - assertThrows(IllegalArgumentException.class, () -> config.getDatabaseUser()); - - envMap.put(EnvConfigs.DATABASE_USER, "user"); - assertEquals("user", config.getDatabaseUser()); - } - - @Test - void testGetDatabasePassword() { - envMap.put(EnvConfigs.DATABASE_PASSWORD, null); - assertThrows(IllegalArgumentException.class, () -> config.getDatabasePassword()); - - envMap.put(EnvConfigs.DATABASE_PASSWORD, "password"); - assertEquals("password", config.getDatabasePassword()); - } - - @Test - void testGetDatabaseUrl() { - envMap.put(EnvConfigs.DATABASE_URL, null); - assertThrows(IllegalArgumentException.class, () -> config.getDatabaseUrl()); - - envMap.put(EnvConfigs.DATABASE_URL, "url"); - assertEquals("url", config.getDatabaseUrl()); - } - - @Test - void testGetWorkspaceDockerMount() { - envMap.put(EnvConfigs.WORKSPACE_DOCKER_MOUNT, null); - envMap.put(EnvConfigs.WORKSPACE_ROOT, ABCDEF); - assertEquals(ABCDEF, config.getWorkspaceDockerMount()); - - envMap.put(EnvConfigs.WORKSPACE_DOCKER_MOUNT, ROOT); - envMap.put(EnvConfigs.WORKSPACE_ROOT, ABCDEF); - assertEquals(ROOT, config.getWorkspaceDockerMount()); - - envMap.put(EnvConfigs.WORKSPACE_DOCKER_MOUNT, null); - envMap.put(EnvConfigs.WORKSPACE_ROOT, null); - assertThrows(IllegalArgumentException.class, () -> config.getWorkspaceDockerMount()); - } - - @Test - void testGetLocalDockerMount() { - envMap.put(EnvConfigs.LOCAL_DOCKER_MOUNT, null); - envMap.put(EnvConfigs.LOCAL_ROOT, ABCDEF); - assertEquals(ABCDEF, config.getLocalDockerMount()); - - envMap.put(EnvConfigs.LOCAL_DOCKER_MOUNT, ROOT); - envMap.put(EnvConfigs.LOCAL_ROOT, ABCDEF); - assertEquals(ROOT, config.getLocalDockerMount()); - - envMap.put(EnvConfigs.LOCAL_DOCKER_MOUNT, null); - envMap.put(EnvConfigs.LOCAL_ROOT, null); - assertThrows(IllegalArgumentException.class, () -> config.getLocalDockerMount()); - } - - @Test - void testDockerNetwork() { - envMap.put(EnvConfigs.DOCKER_NETWORK, null); - assertEquals("host", config.getDockerNetwork()); - - envMap.put(EnvConfigs.DOCKER_NETWORK, ABC); - assertEquals(ABC, config.getDockerNetwork()); - } - - @Test - void testTrackingStrategy() { - envMap.put(EnvConfigs.TRACKING_STRATEGY, null); - assertEquals(Configs.TrackingStrategy.LOGGING, config.getTrackingStrategy()); - - envMap.put(EnvConfigs.TRACKING_STRATEGY, ABC); - assertEquals(Configs.TrackingStrategy.LOGGING, config.getTrackingStrategy()); - - envMap.put(EnvConfigs.TRACKING_STRATEGY, "logging"); - assertEquals(Configs.TrackingStrategy.LOGGING, config.getTrackingStrategy()); - - envMap.put(EnvConfigs.TRACKING_STRATEGY, "segment"); - assertEquals(Configs.TrackingStrategy.SEGMENT, config.getTrackingStrategy()); - - envMap.put(EnvConfigs.TRACKING_STRATEGY, "LOGGING"); - assertEquals(Configs.TrackingStrategy.LOGGING, config.getTrackingStrategy()); - } - - @Test - void testErrorReportingStrategy() { - envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, null); - assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); - - envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, ABC); - assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); - - envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "logging"); - assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); - - envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "sentry"); - assertEquals(JobErrorReportingStrategy.SENTRY, config.getJobErrorReportingStrategy()); - - envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "LOGGING"); - assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); - - envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "SENTRY"); - assertEquals(JobErrorReportingStrategy.SENTRY, config.getJobErrorReportingStrategy()); - } - - @Test - void testDeploymentMode() { - envMap.put(EnvConfigs.DEPLOYMENT_MODE, null); - assertEquals(Configs.DeploymentMode.OSS, config.getDeploymentMode()); - - envMap.put(EnvConfigs.DEPLOYMENT_MODE, "CLOUD"); - assertEquals(Configs.DeploymentMode.CLOUD, config.getDeploymentMode()); - - envMap.put(EnvConfigs.DEPLOYMENT_MODE, "oss"); - assertEquals(Configs.DeploymentMode.OSS, config.getDeploymentMode()); - - envMap.put(EnvConfigs.DEPLOYMENT_MODE, "OSS"); - assertEquals(Configs.DeploymentMode.OSS, config.getDeploymentMode()); - } - - @Test - void testworkerKubeTolerations() { - final String airbyteServer = "airbyte-server"; - final String noSchedule = "NoSchedule"; - - envMap.put(EnvConfigs.JOB_KUBE_TOLERATIONS, null); - assertEquals(config.getJobKubeTolerations(), List.of()); - - envMap.put(EnvConfigs.JOB_KUBE_TOLERATIONS, ";;;"); - assertEquals(config.getJobKubeTolerations(), List.of()); - - envMap.put(EnvConfigs.JOB_KUBE_TOLERATIONS, "key=k,value=v;"); - assertEquals(config.getJobKubeTolerations(), List.of()); - - envMap.put(EnvConfigs.JOB_KUBE_TOLERATIONS, "key=airbyte-server,operator=Exists,effect=NoSchedule"); - assertEquals(config.getJobKubeTolerations(), List.of(new TolerationPOJO(airbyteServer, noSchedule, null, "Exists"))); - - envMap.put(EnvConfigs.JOB_KUBE_TOLERATIONS, "key=airbyte-server,operator=Equals,value=true,effect=NoSchedule"); - assertEquals(config.getJobKubeTolerations(), List.of(new TolerationPOJO(airbyteServer, noSchedule, "true", "Equals"))); - - envMap.put(EnvConfigs.JOB_KUBE_TOLERATIONS, - "key=airbyte-server,operator=Exists,effect=NoSchedule;key=airbyte-server,operator=Equals,value=true,effect=NoSchedule"); - assertEquals(config.getJobKubeTolerations(), List.of( - new TolerationPOJO(airbyteServer, noSchedule, null, "Exists"), - new TolerationPOJO(airbyteServer, noSchedule, "true", "Equals"))); - } - - @Test - void testSplitKVPairsFromEnvString() { - String input = "key1=value1,key2=value2"; - Map map = config.splitKVPairsFromEnvString(input); - assertNotNull(map); - assertEquals(2, map.size()); - assertEquals(map, Map.of("key1", "value1", "key2", "value2")); - - input = ENV_STRING; - map = config.splitKVPairsFromEnvString(input); - assertNotNull(map); - assertEquals(map, Map.of(KEY, "k")); - - input = null; - map = config.splitKVPairsFromEnvString(input); - assertNull(map); - - input = " key1= value1, key2 = value2"; - map = config.splitKVPairsFromEnvString(input); - assertNotNull(map); - assertEquals(map, Map.of("key1", "value1", "key2", "value2")); - - input = "key1:value1,key2:value2"; - map = config.splitKVPairsFromEnvString(input); - assertNull(map); - } - - @Test - void testJobKubeNodeSelectors() { - envMap.put(EnvConfigs.JOB_KUBE_NODE_SELECTORS, null); - assertNull(config.getJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.JOB_KUBE_NODE_SELECTORS, NODE_SELECTORS); - assertNull(config.getJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.JOB_KUBE_NODE_SELECTORS, ENV_STRING); - assertEquals(config.getJobKubeNodeSelectors(), Map.of(KEY, "k")); - - envMap.put(EnvConfigs.JOB_KUBE_NODE_SELECTORS, ONE_EQ_TWO); - assertEquals(config.getJobKubeNodeSelectors(), Map.of(ONE, TWO)); - - envMap.put(EnvConfigs.JOB_KUBE_NODE_SELECTORS, AIRB_SERV_SOME_NOTHING); - assertEquals(config.getJobKubeNodeSelectors(), Map.of(AIRBYTE, SERVER, SOMETHING, NOTHING)); - } - - @Test - void testSpecKubeNodeSelectors() { - envMap.put(EnvConfigs.SPEC_JOB_KUBE_NODE_SELECTORS, null); - assertNull(config.getSpecJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.SPEC_JOB_KUBE_NODE_SELECTORS, NODE_SELECTORS); - assertNull(config.getSpecJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.SPEC_JOB_KUBE_NODE_SELECTORS, ENV_STRING); - assertEquals(config.getSpecJobKubeNodeSelectors(), Map.of(KEY, "k")); - - envMap.put(EnvConfigs.SPEC_JOB_KUBE_NODE_SELECTORS, ONE_EQ_TWO); - assertEquals(config.getSpecJobKubeNodeSelectors(), Map.of(ONE, TWO)); - - envMap.put(EnvConfigs.SPEC_JOB_KUBE_NODE_SELECTORS, AIRB_SERV_SOME_NOTHING); - assertEquals(config.getSpecJobKubeNodeSelectors(), Map.of(AIRBYTE, SERVER, SOMETHING, NOTHING)); - } - - @Test - void testCheckKubeNodeSelectors() { - envMap.put(EnvConfigs.CHECK_JOB_KUBE_NODE_SELECTORS, null); - assertNull(config.getCheckJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.CHECK_JOB_KUBE_NODE_SELECTORS, NODE_SELECTORS); - assertNull(config.getCheckJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.CHECK_JOB_KUBE_NODE_SELECTORS, ENV_STRING); - assertEquals(config.getCheckJobKubeNodeSelectors(), Map.of(KEY, "k")); - - envMap.put(EnvConfigs.CHECK_JOB_KUBE_NODE_SELECTORS, ONE_EQ_TWO); - assertEquals(config.getCheckJobKubeNodeSelectors(), Map.of(ONE, TWO)); - - envMap.put(EnvConfigs.CHECK_JOB_KUBE_NODE_SELECTORS, AIRB_SERV_SOME_NOTHING); - assertEquals(config.getCheckJobKubeNodeSelectors(), Map.of(AIRBYTE, SERVER, SOMETHING, NOTHING)); - } - - @Test - void testDiscoverKubeNodeSelectors() { - envMap.put(EnvConfigs.DISCOVER_JOB_KUBE_NODE_SELECTORS, null); - assertNull(config.getDiscoverJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.DISCOVER_JOB_KUBE_NODE_SELECTORS, NODE_SELECTORS); - assertNull(config.getDiscoverJobKubeNodeSelectors()); - - envMap.put(EnvConfigs.DISCOVER_JOB_KUBE_NODE_SELECTORS, ENV_STRING); - assertEquals(config.getDiscoverJobKubeNodeSelectors(), Map.of(KEY, "k")); - - envMap.put(EnvConfigs.DISCOVER_JOB_KUBE_NODE_SELECTORS, ONE_EQ_TWO); - assertEquals(config.getDiscoverJobKubeNodeSelectors(), Map.of(ONE, TWO)); - - envMap.put(EnvConfigs.DISCOVER_JOB_KUBE_NODE_SELECTORS, AIRB_SERV_SOME_NOTHING); - assertEquals(config.getDiscoverJobKubeNodeSelectors(), Map.of(AIRBYTE, SERVER, SOMETHING, NOTHING)); - } - - @Test - void testPublishMetrics() { - envMap.put(EnvConfigs.PUBLISH_METRICS, "true"); - assertTrue(config.getPublishMetrics()); - - envMap.put(EnvConfigs.PUBLISH_METRICS, "false"); - assertFalse(config.getPublishMetrics()); - - envMap.put(EnvConfigs.PUBLISH_METRICS, null); - assertFalse(config.getPublishMetrics()); - - envMap.put(EnvConfigs.PUBLISH_METRICS, ""); - assertFalse(config.getPublishMetrics()); - } - - @Test - @DisplayName("Should parse constant tags") - void testDDConstantTags() { - assertEquals(List.of(), config.getDDConstantTags()); - - envMap.put(EnvConfigs.DD_CONSTANT_TAGS, " "); - assertEquals(List.of(), config.getDDConstantTags()); - - envMap.put(EnvConfigs.DD_CONSTANT_TAGS, "airbyte_instance:dev,k8s-cluster:eks-dev"); - List expected = List.of("airbyte_instance:dev", "k8s-cluster:eks-dev"); - assertEquals(expected, config.getDDConstantTags()); - assertEquals(2, config.getDDConstantTags().size()); - } - - @Nested - @DisplayName("CheckJobResourceSettings") - class CheckJobResourceSettings { - - @Test - @DisplayName("should default to JobMainCpuRequest if not set") - void testCpuRequestDefaultToJobMainCpuRequest() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST, null); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_CPU_REQUEST, "1"); - assertEquals("1", config.getCheckJobMainContainerCpuRequest()); - } - - @Test - @DisplayName("checkJobCpuRequest should take precedent if set") - void testCheckJobCpuRequestTakePrecedentIfSet() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST, "1"); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_CPU_REQUEST, "2"); - assertEquals("1", config.getCheckJobMainContainerCpuRequest()); - } - - @Test - @DisplayName("should default to JobMainCpuLimit if not set") - void testCpuLimitDefaultToJobMainCpuLimit() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT, null); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_CPU_LIMIT, "1"); - assertEquals("1", config.getCheckJobMainContainerCpuLimit()); - } - - @Test - @DisplayName("checkJobCpuLimit should take precedent if set") - void testCheckJobCpuLimitTakePrecedentIfSet() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT, "1"); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_CPU_LIMIT, "2"); - assertEquals("1", config.getCheckJobMainContainerCpuLimit()); - } - - @Test - @DisplayName("should default to JobMainMemoryRequest if not set") - void testMemoryRequestDefaultToJobMainMemoryRequest() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST, null); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_MEMORY_REQUEST, "1"); - assertEquals("1", config.getCheckJobMainContainerMemoryRequest()); - } - - @Test - @DisplayName("checkJobMemoryRequest should take precedent if set") - void testCheckJobMemoryRequestTakePrecedentIfSet() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST, "1"); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_MEMORY_REQUEST, "2"); - assertEquals("1", config.getCheckJobMainContainerMemoryRequest()); - } - - @Test - @DisplayName("should default to JobMainMemoryLimit if not set") - void testMemoryLimitDefaultToJobMainMemoryLimit() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT, null); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_MEMORY_LIMIT, "1"); - assertEquals("1", config.getCheckJobMainContainerMemoryLimit()); - } - - @Test - @DisplayName("checkJobMemoryLimit should take precedent if set") - void testCheckJobMemoryLimitTakePrecedentIfSet() { - envMap.put(EnvConfigs.CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT, "1"); - envMap.put(EnvConfigs.JOB_MAIN_CONTAINER_MEMORY_LIMIT, "2"); - assertEquals("1", config.getCheckJobMainContainerMemoryLimit()); - } - - } - - @Test - void testSharedJobEnvMapRetrieval() { - envMap.put(EnvConfigs.AIRBYTE_VERSION, DEV); - envMap.put(EnvConfigs.WORKER_ENVIRONMENT, WorkerEnvironment.KUBERNETES.name()); - final Map expected = Map.of("AIRBYTE_VERSION", DEV, - "AIRBYTE_ROLE", "", - "DEPLOYMENT_MODE", "OSS", - "WORKER_ENVIRONMENT", "KUBERNETES"); - assertEquals(expected, config.getJobDefaultEnvMap()); - } - - @Test - void testAllJobEnvMapRetrieval() { - envMap.put(EnvConfigs.AIRBYTE_VERSION, DEV); - envMap.put(EnvConfigs.AIRBYTE_ROLE, "UNIT_TEST"); - envMap.put(EnvConfigs.JOB_DEFAULT_ENV_PREFIX + "ENV1", "VAL1"); - envMap.put(EnvConfigs.JOB_DEFAULT_ENV_PREFIX + "ENV2", "VAL\"2WithQuotesand$ymbols"); - envMap.put(EnvConfigs.DEPLOYMENT_MODE, DeploymentMode.CLOUD.name()); - - final Map expected = Map.of("ENV1", "VAL1", - "ENV2", "VAL\"2WithQuotesand$ymbols", - "AIRBYTE_VERSION", DEV, - "AIRBYTE_ROLE", "UNIT_TEST", - "DEPLOYMENT_MODE", "CLOUD", - "WORKER_ENVIRONMENT", "DOCKER"); - assertEquals(expected, config.getJobDefaultEnvMap()); - } - - @Test - void testRemoteConnectorCatalogUrl() { - envMap.put(EnvConfigs.REMOTE_CONNECTOR_CATALOG_URL, null); - assertEquals(Optional.empty(), config.getRemoteConnectorCatalogUrl()); - - envMap.put(EnvConfigs.REMOTE_CONNECTOR_CATALOG_URL, ""); - assertEquals(Optional.empty(), config.getRemoteConnectorCatalogUrl()); - - envMap.put(EnvConfigs.REMOTE_CONNECTOR_CATALOG_URL, "https://airbyte.com"); - assertEquals(Optional.of(URI.create("https://airbyte.com")), config.getRemoteConnectorCatalogUrl()); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/CloudLogsClientTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/CloudLogsClientTest.java deleted file mode 100644 index 06a8cdc70f394..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/CloudLogsClientTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.config.storage.CloudStorageConfigs; -import io.airbyte.config.storage.CloudStorageConfigs.GcsConfig; -import io.airbyte.config.storage.CloudStorageConfigs.MinioConfig; -import io.airbyte.config.storage.CloudStorageConfigs.S3Config; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class CloudLogsClientTest { - - @Test - void createCloudLogClientTestMinio() { - final var configs = new LogConfigs(Optional.of(CloudStorageConfigs.minio(new MinioConfig( - "test-bucket", - "access-key", - "access-key-secret", - "minio-endpoint")))); - - assertEquals(S3Logs.class, CloudLogs.createCloudLogClient(configs).getClass()); - } - - @Test - void createCloudLogClientTestAws() { - final var configs = new LogConfigs(Optional.of(CloudStorageConfigs.s3(new S3Config( - "test-bucket", - "access-key", - "access-key-secret", - "us-east-1")))); - - assertEquals(S3Logs.class, CloudLogs.createCloudLogClient(configs).getClass()); - } - - @Test - void createCloudLogClientTestGcs() { - final var configs = new LogConfigs(Optional.of(CloudStorageConfigs.gcs(new GcsConfig( - "storage-bucket", - "path/to/google/secret")))); - - assertEquals(GcsLogs.class, CloudLogs.createCloudLogClient(configs).getClass()); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/GcsLogsTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/GcsLogsTest.java deleted file mode 100644 index 2eee0cb235b08..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/GcsLogsTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.google.cloud.storage.Storage; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.storage.CloudStorageConfigs; -import io.airbyte.config.storage.DefaultGcsClientFactory; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; - -@Slf4j -@Tag("logger-client") -class GcsLogsTest { - - private static Storage getClientFactory() { - return new DefaultGcsClientFactory(new CloudStorageConfigs.GcsConfig( - System.getenv(LogClientSingleton.GCS_LOG_BUCKET), - System.getenv(LogClientSingleton.GOOGLE_APPLICATION_CREDENTIALS))).get(); - } - - /** - * The test files here were generated by {@link S3LogsTest}. See that class for more information. - * - * Generate enough files to force pagination and confirm all data is read. - */ - @Test - void testRetrieveAllLogs() throws IOException { - final File data; - data = GcsLogs.getFile(getClientFactory(), (new EnvConfigs()).getLogConfigs(), "paginate", 6); - final var retrieved = new ArrayList(); - Files.lines(data.toPath()).forEach(retrieved::add); - - final var expected = List.of("Line 0", "Line 1", "Line 2", "Line 3", "Line 4", "Line 5", "Line 6", "Line 7", "Line 8"); - - assertEquals(expected, retrieved); - - } - - /** - * The test files for this test have been pre-generated and uploaded into the bucket folder. The - * folder contains the following files with these contents: - *

  • first-file.txt - Line 1, Line 2, Line 3
  • - *
  • second-file.txt - Line 4, Line 5, Line 6
  • - *
  • third-file.txt - Line 7, Line 8, Line 9
  • - */ - @Test - void testTail() throws IOException { - final var data = new GcsLogs(GcsLogsTest::getClientFactory).tailCloudLog((new EnvConfigs()).getLogConfigs(), "tail", 6); - - final var expected = List.of("Line 4", "Line 5", "Line 6", "Line 7", "Line 8", "Line 9"); - assertEquals(data, expected); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/KubeLoggingConfigTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/KubeLoggingConfigTest.java deleted file mode 100644 index 80d3b980750d5..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/KubeLoggingConfigTest.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.commons.string.Strings; -import io.airbyte.config.EnvConfigs; -import java.io.IOException; -import java.nio.file.Path; -import java.util.List; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -@Tag("log4j2-config") -class KubeLoggingConfigTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(KubeLoggingConfigTest.class); - // We publish every minute. See log4j2.xml. - private static final long LOG_PUBLISH_DELAY = 70 * 1000; - - private String logPath; - - @AfterEach - void cleanUpLogs() { - if (logPath != null) { - final EnvConfigs envConfigs = new EnvConfigs(); - LogClientSingleton.getInstance().deleteLogs(envConfigs.getWorkerEnvironment(), envConfigs.getLogConfigs(), logPath); - } - } - - /** - * Because this test tests our env var set up is compatible with our Log4j2 configuration, we are - * unable to perform injection, and instead rely on env vars set in - * ./tools/bin/cloud_storage_logging_test.sh. - * - * This test will fail if certain env vars aren't set. - */ - @Test - void testLoggingConfiguration() throws IOException, InterruptedException { - final EnvConfigs envConfigs = new EnvConfigs(); - final var randPath = Strings.addRandomSuffix("-", "", 5); - // This mirrors our Log4j2 set up. See log4j2.xml. - LogClientSingleton.getInstance().setJobMdc(envConfigs.getWorkerEnvironment(), envConfigs.getLogConfigs(), Path.of(randPath)); - - final var toLog = List.of("line 1", "line 2", "line 3"); - for (final String l : toLog) { - LOGGER.info(l); - } - // So we don't publish anything else. - MDC.clear(); - - // Sleep to make sure the logs appear. - Thread.sleep(LOG_PUBLISH_DELAY); - - logPath = randPath + "/logs.log/"; - // The same env vars that log4j2 uses to determine where to publish to determine how to retrieve the - // log file. - final var logs = LogClientSingleton.getInstance().getJobLogFile(envConfigs.getWorkerEnvironment(), envConfigs.getLogConfigs(), Path.of(logPath)); - // Each log line is of the form . Further, there might be - // other log lines from the system running. Join all the lines to simplify assertions. - final var logsLine = Strings.join(logs, " "); - for (final String l : toLog) { - assertTrue(logsLine.contains(l)); - } - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/LogClientSingletonTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/LogClientSingletonTest.java deleted file mode 100644 index cd6a8ecc1e703..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/LogClientSingletonTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.WorkerEnvironment; -import java.io.IOException; -import java.nio.file.Path; -import java.util.Collections; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class LogClientSingletonTest { - - private Configs configs; - private CloudLogs mockLogClient; - - @BeforeEach - void setup() { - configs = mock(Configs.class); - mockLogClient = mock(CloudLogs.class); - LogClientSingleton.getInstance().logClient = mockLogClient; - } - - @Test - void testGetJobLogFileK8s() throws IOException { - when(configs.getWorkerEnvironment()).thenReturn(WorkerEnvironment.KUBERNETES); - assertEquals(Collections.emptyList(), - LogClientSingleton.getInstance().getJobLogFile(configs.getWorkerEnvironment(), configs.getLogConfigs(), Path.of("/job/1"))); - verify(mockLogClient).tailCloudLog(any(), eq("job-logging/job/1"), eq(LogClientSingleton.LOG_TAIL_SIZE)); - } - - @Test - void testGetJobLogFileNullPath() throws IOException { - assertEquals(Collections.emptyList(), - LogClientSingleton.getInstance().getJobLogFile(configs.getWorkerEnvironment(), configs.getLogConfigs(), null)); - verifyNoInteractions(mockLogClient); - } - - @Test - void testGetJobLogFileEmptyPath() throws IOException { - assertEquals(Collections.emptyList(), - LogClientSingleton.getInstance().getJobLogFile(configs.getWorkerEnvironment(), configs.getLogConfigs(), Path.of(""))); - verifyNoInteractions(mockLogClient); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java deleted file mode 100644 index f43e257c8421e..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.config.storage.CloudStorageConfigs; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; -import software.amazon.awssdk.core.sync.RequestBody; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.PutObjectRequest; - -@Tag("logger-client") -class S3LogsTest { - - private static final String REGION_STRING = "us-west-2"; - private static final Region REGION = Region.of(REGION_STRING); - private static final String BUCKET_NAME = "airbyte-kube-integration-logging-test"; - - private static final LogConfigs LOG_CONFIGS = new LogConfigs(Optional.of(CloudStorageConfigs.s3(new CloudStorageConfigs.S3Config( - System.getenv(LogClientSingleton.S3_LOG_BUCKET), - System.getenv(LogClientSingleton.AWS_ACCESS_KEY_ID), - System.getenv(LogClientSingleton.AWS_SECRET_ACCESS_KEY), - System.getenv(LogClientSingleton.S3_LOG_BUCKET_REGION))))); - - private S3Client s3Client; - - @BeforeEach - void setup() { - s3Client = S3Client.builder().region(REGION).build(); - generatePaginateTestFiles(); - } - - /** - * The test files here were generated by {@link #generatePaginateTestFiles()}. - * - * Generate enough files to force pagination and confirm all data is read. - */ - @Test - void testRetrieveAllLogs() throws IOException { - final var data = S3Logs.getFile(s3Client, LOG_CONFIGS, "paginate", 6); - - final var retrieved = new ArrayList(); - Files.lines(data.toPath()).forEach(retrieved::add); - - final var expected = List.of("Line 0", "Line 1", "Line 2", "Line 3", "Line 4", "Line 5", "Line 6", "Line 7", "Line 8"); - - assertEquals(expected, retrieved); - } - - /** - * The test files for this test have been pre-generated and uploaded into the bucket folder. The - * folder contains the following files with these contents: - *
  • first-file.txt - Line 1, Line 2, Line 3
  • - *
  • second-file.txt - Line 4, Line 5, Line 6
  • - *
  • third-file.txt - Line 7, Line 8, Line 9
  • - */ - @Test - void testTail() throws IOException { - final var data = new S3Logs(() -> s3Client).tailCloudLog(LOG_CONFIGS, "tail", 6); - final var expected = List.of("Line 4", "Line 5", "Line 6", "Line 7", "Line 8", "Line 9"); - assertEquals(data, expected); - } - - private void generatePaginateTestFiles() { - for (int i = 0; i < 9; i++) { - final var fileName = i + "-file"; - final var line = "Line " + i + "\n"; - final PutObjectRequest objectRequest = PutObjectRequest.builder() - .bucket(BUCKET_NAME) - .key("paginate/" + fileName) - .build(); - - s3Client.putObject(objectRequest, RequestBody.fromBytes(line.getBytes(StandardCharsets.UTF_8))); - } - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/ScheduleHelpersTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/ScheduleHelpersTest.java deleted file mode 100644 index 061180d4fa7dd..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/ScheduleHelpersTest.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.config.Schedule; -import java.util.Arrays; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class ScheduleHelpersTest { - - @Test - void testGetSecondsInUnit() { - assertEquals(60, ScheduleHelpers.getSecondsInUnit(Schedule.TimeUnit.MINUTES)); - assertEquals(3600, ScheduleHelpers.getSecondsInUnit(Schedule.TimeUnit.HOURS)); - assertEquals(86_400, ScheduleHelpers.getSecondsInUnit(Schedule.TimeUnit.DAYS)); - assertEquals(604_800, ScheduleHelpers.getSecondsInUnit(Schedule.TimeUnit.WEEKS)); - assertEquals(2_592_000, ScheduleHelpers.getSecondsInUnit(Schedule.TimeUnit.MONTHS)); - } - - // Will throw if a new TimeUnit is added but an appropriate mapping is not included in this method. - @Test - void testAllOfTimeUnitEnumValues() { - Arrays.stream(Schedule.TimeUnit.values()).forEach(ScheduleHelpers::getSecondsInUnit); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/StateMessageHelperTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/StateMessageHelperTest.java deleted file mode 100644 index 0c09cd08f517c..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/StateMessageHelperTest.java +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.State; -import io.airbyte.config.StateType; -import io.airbyte.config.StateWrapper; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - -class StateMessageHelperTest { - - private static final boolean USE_STREAM_CAPABLE_STATE = true; - private static final boolean DONT_USE_STREAM_CAPABALE_STATE = false; - - @Test - void testEmpty() { - final Optional stateWrapper = StateMessageHelper.getTypedState(null, USE_STREAM_CAPABLE_STATE); - Assertions.assertThat(stateWrapper).isEmpty(); - } - - @Test - void testEmptyList() { - final Optional stateWrapper = StateMessageHelper.getTypedState(Jsons.arrayNode(), USE_STREAM_CAPABLE_STATE); - Assertions.assertThat(stateWrapper).isEmpty(); - } - - @Test - void testLegacy() { - final Optional stateWrapper = StateMessageHelper.getTypedState(Jsons.emptyObject(), USE_STREAM_CAPABLE_STATE); - Assertions.assertThat(stateWrapper).isNotEmpty(); - Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); - } - - @Test - void testLegacyInList() { - final JsonNode jsonState = Jsons.jsonNode(List.of(Map.of("Any", "value"))); - - final Optional stateWrapper = StateMessageHelper.getTypedState(jsonState, USE_STREAM_CAPABLE_STATE); - Assertions.assertThat(stateWrapper).isNotEmpty(); - Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); - Assertions.assertThat(stateWrapper.get().getLegacyState()).isEqualTo(jsonState); - } - - @Test - void testLegacyInNewFormat() { - final AirbyteStateMessage stateMessage = new AirbyteStateMessage() - .withType(AirbyteStateType.LEGACY) - .withData(Jsons.emptyObject()); - final Optional stateWrapper = StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage)), USE_STREAM_CAPABLE_STATE); - Assertions.assertThat(stateWrapper).isNotEmpty(); - Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); - } - - @Test - void testGlobal() { - final AirbyteStateMessage stateMessage = new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal( - new AirbyteGlobalState() - .withSharedState(Jsons.emptyObject()) - .withStreamStates(List.of( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); - final Optional stateWrapper = - StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage)), USE_STREAM_CAPABLE_STATE); - Assertions.assertThat(stateWrapper).isNotEmpty(); - Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.GLOBAL); - Assertions.assertThat(stateWrapper.get().getGlobal()).isEqualTo(stateMessage); - } - - @Test - void testGlobalForceLegacy() { - final JsonNode legacyState = Jsons.jsonNode(1); - final AirbyteStateMessage stateMessage = new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal( - new AirbyteGlobalState() - .withSharedState(Jsons.emptyObject()) - .withStreamStates(List.of( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))) - .withData(legacyState); - final Optional stateWrapper = - StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage)), DONT_USE_STREAM_CAPABALE_STATE); - Assertions.assertThat(stateWrapper).isNotEmpty(); - Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); - Assertions.assertThat(stateWrapper.get().getLegacyState()).isEqualTo(legacyState); - } - - @Test - void testStream() { - final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject())); - final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())); - final Optional stateWrapper = - StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), USE_STREAM_CAPABLE_STATE); - Assertions.assertThat(stateWrapper).isNotEmpty(); - Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.STREAM); - Assertions.assertThat(stateWrapper.get().getStateMessages()).containsExactlyInAnyOrder(stateMessage1, stateMessage2); - } - - @Test - void testStreamForceLegacy() { - final JsonNode firstEmittedLegacyState = Jsons.jsonNode(1); - final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject())) - .withData(firstEmittedLegacyState); - final JsonNode secondEmittedLegacyState = Jsons.jsonNode(2); - final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())) - .withData(secondEmittedLegacyState); - final Optional stateWrapper = - StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), DONT_USE_STREAM_CAPABALE_STATE); - Assertions.assertThat(stateWrapper).isNotEmpty(); - Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); - Assertions.assertThat(stateWrapper.get().getLegacyState()).isEqualTo(secondEmittedLegacyState); - } - - @Test - void testInvalidMixedState() { - final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject())); - final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal( - new AirbyteGlobalState() - .withSharedState(Jsons.emptyObject()) - .withStreamStates(List.of( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); - Assertions - .assertThatThrownBy( - () -> StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), USE_STREAM_CAPABLE_STATE)) - .isInstanceOf(IllegalStateException.class); - } - - @Test - void testDuplicatedGlobalState() { - final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal( - new AirbyteGlobalState() - .withSharedState(Jsons.emptyObject()) - .withStreamStates(List.of( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); - final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal( - new AirbyteGlobalState() - .withSharedState(Jsons.emptyObject()) - .withStreamStates(List.of( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); - Assertions - .assertThatThrownBy( - () -> StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), USE_STREAM_CAPABLE_STATE)) - .isInstanceOf(IllegalStateException.class); - } - - @Test - void testLegacyStateConversion() { - final StateWrapper stateWrapper = new StateWrapper() - .withStateType(StateType.LEGACY) - .withLegacyState(Jsons.deserialize("{\"json\": \"blob\"}")); - final State expectedState = new State().withState(Jsons.deserialize("{\"json\": \"blob\"}")); - - final State convertedState = StateMessageHelper.getState(stateWrapper); - Assertions.assertThat(convertedState).isEqualTo(expectedState); - } - - @Test - void testGlobalStateConversion() { - final StateWrapper stateWrapper = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal( - new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal( - new AirbyteGlobalState() - .withSharedState(Jsons.deserialize("\"shared\"")) - .withStreamStates(Collections.singletonList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withNamespace("ns").withName("name")) - .withStreamState(Jsons.deserialize("\"stream state\"")))))); - final State expectedState = new State().withState(Jsons.deserialize( - """ - [{ - "type":"GLOBAL", - "global":{ - "shared_state":"shared", - "stream_states":[ - {"stream_descriptor":{"name":"name","namespace":"ns"},"stream_state":"stream state"} - ] - } - }] - """)); - - final State convertedState = StateMessageHelper.getState(stateWrapper); - Assertions.assertThat(convertedState).isEqualTo(expectedState); - } - - @Test - void testStreamStateConversion() { - final StateWrapper stateWrapper = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withNamespace("ns1").withName("name1")) - .withStreamState(Jsons.deserialize("\"state1\""))), - new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withNamespace("ns2").withName("name2")) - .withStreamState(Jsons.deserialize("\"state2\""))))); - final State expectedState = new State().withState(Jsons.deserialize( - """ - [ - {"type":"STREAM","stream":{"stream_descriptor":{"name":"name1","namespace":"ns1"},"stream_state":"state1"}}, - {"type":"STREAM","stream":{"stream_descriptor":{"name":"name2","namespace":"ns2"},"stream_state":"state2"}} - ] - """)); - - final State convertedState = StateMessageHelper.getState(stateWrapper); - Assertions.assertThat(convertedState).isEqualTo(expectedState); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/CloudLogsClientTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/CloudLogsClientTest.java deleted file mode 100644 index 9e4ebcfe2302c..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/CloudLogsClientTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.config.storage.CloudStorageConfigs.GcsConfig; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class CloudLogsClientTest { - - @Test - void testGcsMissingBucket() { - final var configs = Mockito.mock(GcsConfig.class); - Mockito.when(configs.getBucketName()).thenReturn(""); - Mockito.when(configs.getGoogleApplicationCredentials()).thenReturn("path/to/google/secret"); - - assertThrows(RuntimeException.class, () -> new DefaultGcsClientFactory(configs)); - } - - @Test - void testGcs() { - final var configs = Mockito.mock(GcsConfig.class); - Mockito.when(configs.getBucketName()).thenReturn("storage-bucket"); - Mockito.when(configs.getGoogleApplicationCredentials()).thenReturn("path/to/google/secret"); - - new DefaultGcsClientFactory(configs); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/DefaultS3ClientFactoryTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/DefaultS3ClientFactoryTest.java deleted file mode 100644 index d70f08a545e26..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/DefaultS3ClientFactoryTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.config.storage.CloudStorageConfigs.S3Config; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DefaultS3ClientFactoryTest { - - @Test - void testS3() { - final var s3Config = Mockito.mock(S3Config.class); - Mockito.when(s3Config.getAwsAccessKey()).thenReturn("access-key"); - Mockito.when(s3Config.getAwsSecretAccessKey()).thenReturn("access-key-secret"); - Mockito.when(s3Config.getBucketName()).thenReturn("test-bucket"); - Mockito.when(s3Config.getRegion()).thenReturn("us-east-1"); - - new DefaultS3ClientFactory(s3Config).get(); - } - - @Test - void testS3RegionNotSet() { - final var s3Config = Mockito.mock(S3Config.class); - // Missing bucket and access key. - Mockito.when(s3Config.getAwsAccessKey()).thenReturn(""); - Mockito.when(s3Config.getAwsSecretAccessKey()).thenReturn("access-key-secret"); - Mockito.when(s3Config.getBucketName()).thenReturn(""); - Mockito.when(s3Config.getRegion()).thenReturn(""); - - assertThrows(IllegalArgumentException.class, () -> new DefaultS3ClientFactory(s3Config)); - } - -} diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/MinioS3ClientFactoryTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/MinioS3ClientFactoryTest.java deleted file mode 100644 index b89c5b8a410b2..0000000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/MinioS3ClientFactoryTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.config.storage.CloudStorageConfigs.MinioConfig; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class MinioS3ClientFactoryTest { - - @Test - void testMinio() { - final var minioConfig = Mockito.mock(MinioConfig.class); - Mockito.when(minioConfig.getAwsAccessKey()).thenReturn("access-key"); - Mockito.when(minioConfig.getAwsSecretAccessKey()).thenReturn("access-key-secret"); - Mockito.when(minioConfig.getBucketName()).thenReturn("test-bucket"); - Mockito.when(minioConfig.getMinioEndpoint()).thenReturn("https://minio-endpoint"); - - new MinioS3ClientFactory(minioConfig).get(); - } - - @Test - void testMinioEndpointMissing() { - final var minioConfig = Mockito.mock(MinioConfig.class); - // Missing bucket and access key. - Mockito.when(minioConfig.getAwsAccessKey()).thenReturn("access-key"); - Mockito.when(minioConfig.getAwsSecretAccessKey()).thenReturn("access-key-secret"); - Mockito.when(minioConfig.getBucketName()).thenReturn("test-bucket"); - Mockito.when(minioConfig.getMinioEndpoint()).thenReturn(""); - - assertThrows(IllegalArgumentException.class, () -> new MinioS3ClientFactory(minioConfig)); - } - -} diff --git a/airbyte-config/config-persistence/build.gradle b/airbyte-config/config-persistence/build.gradle deleted file mode 100644 index 39b1a627b85c1..0000000000000 --- a/airbyte-config/config-persistence/build.gradle +++ /dev/null @@ -1,32 +0,0 @@ -plugins { - id 'java-library' - id 'airbyte-integration-test-java' -} - -configurations.all { - exclude group: 'io.micronaut.flyway' -} - -dependencies { - implementation project(':airbyte-commons') - implementation project(':airbyte-commons-protocol') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-db:db-lib') - implementation project(':airbyte-db:jooq') - implementation project(':airbyte-json-validation') - implementation libs.airbyte.protocol - - implementation 'commons-io:commons-io:2.7' - implementation 'com.google.cloud:google-cloud-secretmanager:2.0.5' - implementation 'com.bettercloud:vault-java-driver:5.1.0' - implementation 'com.amazonaws.secretsmanager:aws-secretsmanager-caching-java:1.0.2' - - testImplementation 'org.hamcrest:hamcrest-all:1.3' - testImplementation libs.platform.testcontainers.postgresql - testImplementation libs.flyway.core - testImplementation project(':airbyte-test-utils') - testImplementation "org.testcontainers:vault:1.17.2" - integrationTestJavaImplementation project(':airbyte-config:config-persistence') -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-config/config-persistence/readme.md b/airbyte-config/config-persistence/readme.md deleted file mode 100644 index b314dd62bf35c..0000000000000 --- a/airbyte-config/config-persistence/readme.md +++ /dev/null @@ -1,7 +0,0 @@ -# airbyte-config:config-persistence - -This module contains the logic for accessing the config database. This database is primarily used by the `airbyte-server` but is also accessed from `airbyte-workers`. It contains all configuration information for Airbyte. - -## Key files -* `ConfigPersistence.java` is the interface over "low-level" access to the db. The most commonly used implementation of it is `DatabaseConfigPersistence.java`. -* `ConfigRepository.java` is what is most used for accessing the databases. The `ConfigPersistence` iface was hard to work with. `ConfigRepository` builds on top of it and houses any databases queries to keep them from proliferating throughout the codebase. diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionMigrator.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionMigrator.java deleted file mode 100644 index 9b0e95302b542..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionMigrator.java +++ /dev/null @@ -1,356 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static org.jooq.impl.DSL.asterisk; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Sets; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.MoreIterators; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.AirbyteConfig; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.SourceType; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.sql.SQLException; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Takes in most up-to-date source and destination definitions from the Airbyte connector catalog - * and merges them with those already present in the database. See javadocs on methods for rules. - */ -public class ActorDefinitionMigrator { - - private static final Logger LOGGER = LoggerFactory.getLogger(ActorDefinitionMigrator.class); - - private static final String UNKNOWN_CONFIG_TYPE = "Unknown Config Type "; - - private final ExceptionWrappingDatabase database; - - public ActorDefinitionMigrator(final ExceptionWrappingDatabase database) { - this.database = database; - } - - public void migrate(final List latestSources, final List latestDestinations) - throws IOException { - database.transaction(ctx -> { - try { - updateConfigsFromSeed(ctx, latestSources, latestDestinations); - } catch (final IOException e) { - throw new SQLException(e); - } - return null; - }); - } - - @VisibleForTesting - void updateConfigsFromSeed(final DSLContext ctx, - final List latestSources, - final List latestDestinations) - throws IOException { - LOGGER.info("Updating connector definitions from the seed if necessary..."); - - final Set connectorRepositoriesInUse = ConfigWriter.getConnectorRepositoriesInUse(ctx); - LOGGER.info("Connectors in use: {}", connectorRepositoriesInUse); - - final Map connectorRepositoryToInfoMap = getConnectorRepositoryToInfoMap(ctx); - LOGGER.info("Current connector versions: {}", connectorRepositoryToInfoMap.values()); - - int newConnectorCount = 0; - int updatedConnectorCount = 0; - - final ConnectorCounter sourceConnectorCounter = updateConnectorDefinitions(ctx, ConfigSchema.STANDARD_SOURCE_DEFINITION, - latestSources, connectorRepositoriesInUse, connectorRepositoryToInfoMap); - newConnectorCount += sourceConnectorCounter.newCount; - updatedConnectorCount += sourceConnectorCounter.updateCount; - - final ConnectorCounter destinationConnectorCounter = updateConnectorDefinitions(ctx, ConfigSchema.STANDARD_DESTINATION_DEFINITION, - latestDestinations, connectorRepositoriesInUse, connectorRepositoryToInfoMap); - newConnectorCount += destinationConnectorCounter.newCount; - updatedConnectorCount += destinationConnectorCounter.updateCount; - - LOGGER.info("Connector definitions have been updated ({} new connectors, and {} updates)", newConnectorCount, updatedConnectorCount); - } - - /** - * @return A map about current connectors (both source and destination). It maps from connector - * repository to its definition id and docker image tag. We identify a connector by its - * repository name instead of definition id because connectors can be added manually by - * users, and are not always the same as those in the seed. - */ - @VisibleForTesting - Map getConnectorRepositoryToInfoMap(final DSLContext ctx) { - return ctx.select(asterisk()) - .from(ACTOR_DEFINITION) - .where(ACTOR_DEFINITION.RELEASE_STAGE.isNull() - .or(ACTOR_DEFINITION.RELEASE_STAGE.ne(ReleaseStage.custom).or(ACTOR_DEFINITION.CUSTOM))) - .fetch() - .stream() - .collect(Collectors.toMap( - row -> row.getValue(ACTOR_DEFINITION.DOCKER_REPOSITORY), - row -> { - final JsonNode jsonNode; - if (row.get(ACTOR_DEFINITION.ACTOR_TYPE) == ActorType.source) { - jsonNode = Jsons.jsonNode(new StandardSourceDefinition() - .withSourceDefinitionId(row.get(ACTOR_DEFINITION.ID)) - .withDockerImageTag(row.get(ACTOR_DEFINITION.DOCKER_IMAGE_TAG)) - .withIcon(row.get(ACTOR_DEFINITION.ICON)) - .withDockerRepository(row.get(ACTOR_DEFINITION.DOCKER_REPOSITORY)) - .withDocumentationUrl(row.get(ACTOR_DEFINITION.DOCUMENTATION_URL)) - .withName(row.get(ACTOR_DEFINITION.NAME)) - .withPublic(row.get(ACTOR_DEFINITION.PUBLIC)) - .withCustom(row.get(ACTOR_DEFINITION.CUSTOM)) - .withSourceType(row.get(ACTOR_DEFINITION.SOURCE_TYPE) == null ? null - : Enums.toEnum(row.get(ACTOR_DEFINITION.SOURCE_TYPE, String.class), SourceType.class).orElseThrow()) - .withSpec(Jsons.deserialize(row.get(ACTOR_DEFINITION.SPEC).data(), ConnectorSpecification.class))); - } else if (row.get(ACTOR_DEFINITION.ACTOR_TYPE) == ActorType.destination) { - jsonNode = Jsons.jsonNode(new StandardDestinationDefinition() - .withDestinationDefinitionId(row.get(ACTOR_DEFINITION.ID)) - .withDockerImageTag(row.get(ACTOR_DEFINITION.DOCKER_IMAGE_TAG)) - .withIcon(row.get(ACTOR_DEFINITION.ICON)) - .withDockerRepository(row.get(ACTOR_DEFINITION.DOCKER_REPOSITORY)) - .withDocumentationUrl(row.get(ACTOR_DEFINITION.DOCUMENTATION_URL)) - .withName(row.get(ACTOR_DEFINITION.NAME)) - .withPublic(row.get(ACTOR_DEFINITION.PUBLIC)) - .withCustom(row.get(ACTOR_DEFINITION.CUSTOM)) - .withSpec(Jsons.deserialize(row.get(ACTOR_DEFINITION.SPEC).data(), ConnectorSpecification.class))); - } else { - throw new RuntimeException("Unknown Actor Type " + row.get(ACTOR_DEFINITION.ACTOR_TYPE)); - } - return new ConnectorInfo(row.getValue(ACTOR_DEFINITION.ID).toString(), jsonNode); - }, - (c1, c2) -> { - final AirbyteVersion v1 = new AirbyteVersion(c1.dockerImageTag); - final AirbyteVersion v2 = new AirbyteVersion(c2.dockerImageTag); - LOGGER.warn("Duplicated connector version found for {}: {} ({}) vs {} ({})", - c1.dockerRepository, c1.dockerImageTag, c1.definitionId, c2.dockerImageTag, c2.definitionId); - final int comparison = v1.patchVersionCompareTo(v2); - if (comparison >= 0) { - return c1; - } else { - return c2; - } - })); - } - - /** - * The custom connector are not present in the seed and thus it is not relevant to validate their - * latest version. This method allows to filter them out. - * - * @param connectorRepositoryToIdVersionMap - * @param configType - * @return - */ - @VisibleForTesting - Map filterCustomConnector(final Map connectorRepositoryToIdVersionMap, - final AirbyteConfig configType) { - return connectorRepositoryToIdVersionMap.entrySet().stream() - // The validation is based on the of the connector name is based on the seed which doesn't contain - // any custom connectors. They can thus be - // filtered out. - .filter(entry -> { - if (configType == ConfigSchema.STANDARD_SOURCE_DEFINITION) { - return !Jsons.object(entry.getValue().definition, StandardSourceDefinition.class).getCustom(); - } else if (configType == ConfigSchema.STANDARD_DESTINATION_DEFINITION) { - return !Jsons.object(entry.getValue().definition, StandardDestinationDefinition.class).getCustom(); - } else { - return true; - } - }) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - /** - * @param connectorRepositoriesInUse when a connector is used in any standard sync, its definition - * will not be updated. This is necessary because the new connector version may not be - * backward compatible. - */ - @VisibleForTesting - ConnectorCounter updateConnectorDefinitions(final DSLContext ctx, - final AirbyteConfig configType, - final List latestDefinitions, - final Set connectorRepositoriesInUse, - final Map connectorRepositoryToIdVersionMap) - throws IOException { - int newCount = 0; - int updatedCount = 0; - - for (final T definition : latestDefinitions) { - final JsonNode latestDefinition = Jsons.jsonNode(definition); - final String repository = latestDefinition.get("dockerRepository").asText(); - - final Map connectorRepositoryToIdVersionMapWithoutCustom = filterCustomConnector(connectorRepositoryToIdVersionMap, - configType); - - // Add new connector - if (!connectorRepositoryToIdVersionMapWithoutCustom.containsKey(repository)) { - LOGGER.info("Adding new connector {}: {}", repository, latestDefinition); - writeOrUpdateStandardDefinition(ctx, configType, latestDefinition); - newCount++; - continue; - } - - final ConnectorInfo connectorInfo = connectorRepositoryToIdVersionMapWithoutCustom.get(repository); - final JsonNode currentDefinition = connectorInfo.definition; - - // todo (lmossman) - this logic to remove the "spec" field is temporary; it is necessary to avoid - // breaking users who are actively using an old connector version, otherwise specs from the most - // recent connector versions may be inserted into the db which could be incompatible with the - // version they are actually using. - // Once the faux major version bump has been merged, this "new field" logic will be removed - // entirely. - final Set newFields = Sets.difference(getNewFields(currentDefinition, latestDefinition), Set.of("spec")); - - // Process connector in use - if (connectorRepositoriesInUse.contains(repository)) { - final String latestImageTag = latestDefinition.get("dockerImageTag").asText(); - if (hasNewPatchVersion(connectorInfo.dockerImageTag, latestImageTag)) { - // Update connector to the latest patch version - LOGGER.info("Connector {} needs update: {} vs {}", repository, connectorInfo.dockerImageTag, latestImageTag); - writeOrUpdateStandardDefinition(ctx, configType, latestDefinition); - updatedCount++; - } else if (newFields.isEmpty()) { - LOGGER.info("Connector {} is in use and has all fields; skip updating", repository); - } else { - // Add new fields to the connector definition - final JsonNode definitionToUpdate = getDefinitionWithNewFields(currentDefinition, latestDefinition, newFields); - LOGGER.info("Connector {} has new fields: {}", repository, String.join(", ", newFields)); - writeOrUpdateStandardDefinition(ctx, configType, definitionToUpdate); - updatedCount++; - } - continue; - } - - // Process unused connector - final String latestImageTag = latestDefinition.get("dockerImageTag").asText(); - if (hasNewVersion(connectorInfo.dockerImageTag, latestImageTag)) { - // Update connector to the latest version - LOGGER.info("Connector {} needs update: {} vs {}", repository, connectorInfo.dockerImageTag, latestImageTag); - writeOrUpdateStandardDefinition(ctx, configType, latestDefinition); - updatedCount++; - } else if (!newFields.isEmpty()) { - // Add new fields to the connector definition - final JsonNode definitionToUpdate = getDefinitionWithNewFields(currentDefinition, latestDefinition, newFields); - LOGGER.info("Connector {} has new fields: {}", repository, String.join(", ", newFields)); - writeOrUpdateStandardDefinition(ctx, configType, definitionToUpdate); - updatedCount++; - } else { - LOGGER.info("Connector {} does not need update: {}", repository, connectorInfo.dockerImageTag); - } - } - - return new ConnectorCounter(newCount, updatedCount); - } - - private void writeOrUpdateStandardDefinition(final DSLContext ctx, - final AirbyteConfig configType, - final JsonNode definition) { - if (configType == ConfigSchema.STANDARD_SOURCE_DEFINITION) { - final StandardSourceDefinition sourceDef = Jsons.object(definition, StandardSourceDefinition.class); - sourceDef.withProtocolVersion(getProtocolVersion(sourceDef.getSpec())); - ConfigWriter.writeStandardSourceDefinition(Collections.singletonList(sourceDef), ctx); - } else if (configType == ConfigSchema.STANDARD_DESTINATION_DEFINITION) { - final StandardDestinationDefinition destDef = Jsons.object(definition, StandardDestinationDefinition.class); - destDef.withProtocolVersion(getProtocolVersion(destDef.getSpec())); - ConfigWriter.writeStandardDestinationDefinition(Collections.singletonList(destDef), ctx); - } else { - throw new IllegalArgumentException(UNKNOWN_CONFIG_TYPE + configType); - } - } - - private static String getProtocolVersion(final ConnectorSpecification spec) { - return AirbyteProtocolVersion.getWithDefault(spec != null ? spec.getProtocolVersion() : null).serialize(); - } - - @VisibleForTesting - static Set getNewFields(final JsonNode currentDefinition, final JsonNode latestDefinition) { - final Set currentFields = MoreIterators.toSet(currentDefinition.fieldNames()); - final Set latestFields = MoreIterators.toSet(latestDefinition.fieldNames()); - return Sets.difference(latestFields, currentFields); - } - - /** - * @return a clone of the current definition with the new fields from the latest definition. - */ - @VisibleForTesting - static JsonNode getDefinitionWithNewFields(final JsonNode currentDefinition, final JsonNode latestDefinition, final Set newFields) { - final ObjectNode currentClone = (ObjectNode) Jsons.clone(currentDefinition); - newFields.forEach(field -> currentClone.set(field, latestDefinition.get(field))); - return currentClone; - } - - @VisibleForTesting - static boolean hasNewVersion(final String currentVersion, final String latestVersion) { - try { - return new AirbyteVersion(latestVersion).patchVersionCompareTo(new AirbyteVersion(currentVersion)) > 0; - } catch (final Exception e) { - LOGGER.error("Failed to check version: {} vs {}", currentVersion, latestVersion); - return false; - } - } - - @VisibleForTesting - static boolean hasNewPatchVersion(final String currentVersion, final String latestVersion) { - try { - return new AirbyteVersion(latestVersion).checkOnlyPatchVersionIsUpdatedComparedTo(new AirbyteVersion(currentVersion)); - } catch (final Exception e) { - LOGGER.error("Failed to check version: {} vs {}", currentVersion, latestVersion); - return false; - } - } - - static class ConnectorInfo { - - final String definitionId; - final JsonNode definition; - final String dockerRepository; - final String dockerImageTag; - - ConnectorInfo(final String definitionId, final JsonNode definition) { - this.definitionId = definitionId; - this.definition = definition; - dockerRepository = definition.get("dockerRepository").asText(); - dockerImageTag = definition.get("dockerImageTag").asText(); - } - - @Override - public String toString() { - return String.format("%s: %s (%s)", dockerRepository, dockerImageTag, definitionId); - } - - } - - private static class ConnectorCounter { - - private final int newCount; - private final int updateCount; - - private ConnectorCounter(final int newCount, final int updateCount) { - this.newCount = newCount; - this.updateCount = updateCount; - } - - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigPersistence.java deleted file mode 100644 index 56dc16ea30efa..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigPersistence.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import io.airbyte.config.AirbyteConfig; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * We are moving migrating away from this interface entirely. Use ConfigRepository instead. - */ -@Deprecated(forRemoval = true) -public interface ConfigPersistence { - - T getConfig(AirbyteConfig configType, String configId, Class clazz) throws ConfigNotFoundException, JsonValidationException, IOException; - - List listConfigs(AirbyteConfig configType, Class clazz) throws JsonValidationException, IOException; - - ConfigWithMetadata getConfigWithMetadata(AirbyteConfig configType, String configId, Class clazz) - throws ConfigNotFoundException, JsonValidationException, IOException; - - List> listConfigsWithMetadata(AirbyteConfig configType, Class clazz) throws JsonValidationException, IOException; - - void writeConfig(AirbyteConfig configType, String configId, T config) throws JsonValidationException, IOException; - - void writeConfigs(AirbyteConfig configType, Map configs) throws IOException, JsonValidationException; - - void deleteConfig(AirbyteConfig configType, String configId) throws ConfigNotFoundException, IOException; - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java deleted file mode 100644 index 09cbb87b09e6f..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ /dev/null @@ -1,1607 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG_FETCH_EVENT; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION_WORKSPACE_GRANT; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_OAUTH_PARAMETER; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION_OPERATION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.OPERATION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE_SERVICE_ACCOUNT; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.groupConcat; -import static org.jooq.impl.DSL.noCondition; -import static org.jooq.impl.DSL.select; -import static org.jooq.impl.SQLDataType.VARCHAR; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Charsets; -import com.google.common.collect.Sets; -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.ActorCatalogFetchEvent; -import io.airbyte.config.ActorCatalogWithUpdatedAt; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.Geography; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorWebhook; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.WorkspaceServiceAccount; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import io.airbyte.db.instance.configs.jooq.generated.enums.StatusType; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.annotation.Nonnull; -import java.io.IOException; -import java.time.OffsetDateTime; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.lang3.ArrayUtils; -import org.jooq.Condition; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.JoinType; -import org.jooq.Record; -import org.jooq.Record1; -import org.jooq.Record2; -import org.jooq.Result; -import org.jooq.SelectJoinStep; -import org.jooq.Table; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings({"PMD.AvoidThrowingRawExceptionTypes", "PMD.CyclomaticComplexity", "PMD.AvoidLiteralsInIfCondition", - "OptionalUsedAsFieldOrParameterType"}) -public class ConfigRepository { - - public record StandardSyncQuery(@Nonnull UUID workspaceId, List sourceId, List destinationId, boolean includeDeleted) {} - - private static final Logger LOGGER = LoggerFactory.getLogger(ConfigRepository.class); - private static final String OPERATION_IDS_AGG_FIELD = "operation_ids_agg"; - private static final String OPERATION_IDS_AGG_DELIMITER = ","; - public static final String PRIMARY_KEY = "id"; - - private final ExceptionWrappingDatabase database; - private final ActorDefinitionMigrator actorDefinitionMigrator; - private final StandardSyncPersistence standardSyncPersistence; - - public ConfigRepository(final Database database) { - this(database, new ActorDefinitionMigrator(new ExceptionWrappingDatabase(database)), new StandardSyncPersistence(database)); - } - - ConfigRepository(final Database database, - final ActorDefinitionMigrator actorDefinitionMigrator, - final StandardSyncPersistence standardSyncPersistence) { - this.database = new ExceptionWrappingDatabase(database); - this.actorDefinitionMigrator = actorDefinitionMigrator; - this.standardSyncPersistence = standardSyncPersistence; - } - - /** - * Conduct a health check by attempting to read from the database. Since there isn't an - * out-of-the-box call for this, mimic doing so by reading the ID column from the Workspace table's - * first row. This query needs to be fast as this call can be made multiple times a second. - * - * @return true if read succeeds, even if the table is empty, and false if any error happens. - */ - public boolean healthCheck() { - try { - database.query(ctx -> ctx.select(WORKSPACE.ID).from(WORKSPACE).limit(1).fetch()).stream().count(); - } catch (final Exception e) { - LOGGER.error("Health check error: ", e); - return false; - } - return true; - } - - public StandardWorkspace getStandardWorkspaceNoSecrets(final UUID workspaceId, final boolean includeTombstone) - throws JsonValidationException, IOException, ConfigNotFoundException { - return listWorkspaceQuery(Optional.of(workspaceId), includeTombstone) - .findFirst() - .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.STANDARD_WORKSPACE, workspaceId)); - } - - public Optional getWorkspaceBySlugOptional(final String slug, final boolean includeTombstone) - throws IOException { - final Result result; - if (includeTombstone) { - result = database.query(ctx -> ctx.select(WORKSPACE.asterisk()) - .from(WORKSPACE) - .where(WORKSPACE.SLUG.eq(slug))).fetch(); - } else { - result = database.query(ctx -> ctx.select(WORKSPACE.asterisk()) - .from(WORKSPACE) - .where(WORKSPACE.SLUG.eq(slug)).andNot(WORKSPACE.TOMBSTONE)).fetch(); - } - - return result.stream().findFirst().map(DbConverter::buildStandardWorkspace); - } - - public StandardWorkspace getWorkspaceBySlug(final String slug, final boolean includeTombstone) throws IOException, ConfigNotFoundException { - return getWorkspaceBySlugOptional(slug, includeTombstone).orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.STANDARD_WORKSPACE, slug)); - } - - public List listStandardWorkspaces(final boolean includeTombstone) throws IOException { - return listWorkspaceQuery(Optional.empty(), includeTombstone).toList(); - } - - private Stream listWorkspaceQuery(final Optional workspaceId, final boolean includeTombstone) throws IOException { - return database.query(ctx -> ctx.select(WORKSPACE.asterisk()) - .from(WORKSPACE) - .where(includeTombstone ? noCondition() : WORKSPACE.TOMBSTONE.notEqual(true)) - .and(workspaceId.map(WORKSPACE.ID::eq).orElse(noCondition())) - .fetch()) - .stream() - .map(DbConverter::buildStandardWorkspace); - } - - /** - * MUST NOT ACCEPT SECRETS - Should only be called from { @link SecretsRepositoryWriter } - * - * Write a StandardWorkspace to the database. - * - * @param workspace - The configuration of the workspace - * @throws JsonValidationException - throws is the workspace is invalid - * @throws IOException - you never know when you IO - */ - public void writeStandardWorkspaceNoSecrets(final StandardWorkspace workspace) throws JsonValidationException, IOException { - database.transaction(ctx -> { - final OffsetDateTime timestamp = OffsetDateTime.now(); - final boolean isExistingConfig = ctx.fetchExists(select() - .from(WORKSPACE) - .where(WORKSPACE.ID.eq(workspace.getWorkspaceId()))); - - if (isExistingConfig) { - ctx.update(WORKSPACE) - .set(WORKSPACE.ID, workspace.getWorkspaceId()) - .set(WORKSPACE.CUSTOMER_ID, workspace.getCustomerId()) - .set(WORKSPACE.NAME, workspace.getName()) - .set(WORKSPACE.SLUG, workspace.getSlug()) - .set(WORKSPACE.EMAIL, workspace.getEmail()) - .set(WORKSPACE.INITIAL_SETUP_COMPLETE, workspace.getInitialSetupComplete()) - .set(WORKSPACE.ANONYMOUS_DATA_COLLECTION, workspace.getAnonymousDataCollection()) - .set(WORKSPACE.SEND_NEWSLETTER, workspace.getNews()) - .set(WORKSPACE.SEND_SECURITY_UPDATES, workspace.getSecurityUpdates()) - .set(WORKSPACE.DISPLAY_SETUP_WIZARD, workspace.getDisplaySetupWizard()) - .set(WORKSPACE.TOMBSTONE, workspace.getTombstone() != null && workspace.getTombstone()) - .set(WORKSPACE.NOTIFICATIONS, JSONB.valueOf(Jsons.serialize(workspace.getNotifications()))) - .set(WORKSPACE.FIRST_SYNC_COMPLETE, workspace.getFirstCompletedSync()) - .set(WORKSPACE.FEEDBACK_COMPLETE, workspace.getFeedbackDone()) - .set(WORKSPACE.GEOGRAPHY, Enums.toEnum( - workspace.getDefaultGeography().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.GeographyType.class).orElseThrow()) - .set(WORKSPACE.UPDATED_AT, timestamp) - .set(WORKSPACE.WEBHOOK_OPERATION_CONFIGS, workspace.getWebhookOperationConfigs() == null ? null - : JSONB.valueOf(Jsons.serialize(workspace.getWebhookOperationConfigs()))) - .where(WORKSPACE.ID.eq(workspace.getWorkspaceId())) - .execute(); - } else { - ctx.insertInto(WORKSPACE) - .set(WORKSPACE.ID, workspace.getWorkspaceId()) - .set(WORKSPACE.CUSTOMER_ID, workspace.getCustomerId()) - .set(WORKSPACE.NAME, workspace.getName()) - .set(WORKSPACE.SLUG, workspace.getSlug()) - .set(WORKSPACE.EMAIL, workspace.getEmail()) - .set(WORKSPACE.INITIAL_SETUP_COMPLETE, workspace.getInitialSetupComplete()) - .set(WORKSPACE.ANONYMOUS_DATA_COLLECTION, workspace.getAnonymousDataCollection()) - .set(WORKSPACE.SEND_NEWSLETTER, workspace.getNews()) - .set(WORKSPACE.SEND_SECURITY_UPDATES, workspace.getSecurityUpdates()) - .set(WORKSPACE.DISPLAY_SETUP_WIZARD, workspace.getDisplaySetupWizard()) - .set(WORKSPACE.TOMBSTONE, workspace.getTombstone() != null && workspace.getTombstone()) - .set(WORKSPACE.NOTIFICATIONS, JSONB.valueOf(Jsons.serialize(workspace.getNotifications()))) - .set(WORKSPACE.FIRST_SYNC_COMPLETE, workspace.getFirstCompletedSync()) - .set(WORKSPACE.FEEDBACK_COMPLETE, workspace.getFeedbackDone()) - .set(WORKSPACE.CREATED_AT, timestamp) - .set(WORKSPACE.UPDATED_AT, timestamp) - .set(WORKSPACE.GEOGRAPHY, Enums.toEnum( - workspace.getDefaultGeography().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.GeographyType.class).orElseThrow()) - .set(WORKSPACE.WEBHOOK_OPERATION_CONFIGS, workspace.getWebhookOperationConfigs() == null ? null - : JSONB.valueOf(Jsons.serialize(workspace.getWebhookOperationConfigs()))) - .execute(); - } - return null; - - }); - } - - public void setFeedback(final UUID workflowId) throws IOException { - database.query(ctx -> ctx.update(WORKSPACE).set(WORKSPACE.FEEDBACK_COMPLETE, true).where(WORKSPACE.ID.eq(workflowId)).execute()); - } - - public StandardSourceDefinition getStandardSourceDefinition(final UUID sourceDefinitionId) - throws JsonValidationException, IOException, ConfigNotFoundException { - return sourceDefQuery(Optional.of(sourceDefinitionId), true) - .findFirst() - .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefinitionId)); - } - - public StandardSourceDefinition getSourceDefinitionFromSource(final UUID sourceId) { - try { - final SourceConnection source = getSourceConnection(sourceId); - return getStandardSourceDefinition(source.getSourceDefinitionId()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - public StandardSourceDefinition getSourceDefinitionFromConnection(final UUID connectionId) { - try { - final StandardSync sync = getStandardSync(connectionId); - return getSourceDefinitionFromSource(sync.getSourceId()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - public StandardWorkspace getStandardWorkspaceFromConnection(final UUID connectionId, final boolean isTombstone) { - try { - final StandardSync sync = getStandardSync(connectionId); - final SourceConnection source = getSourceConnection(sync.getSourceId()); - return getStandardWorkspaceNoSecrets(source.getWorkspaceId(), isTombstone); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - public List listStandardSourceDefinitions(final boolean includeTombstone) throws IOException { - return sourceDefQuery(Optional.empty(), includeTombstone).toList(); - } - - private Stream sourceDefQuery(final Optional sourceDefId, final boolean includeTombstone) throws IOException { - return database.query(ctx -> ctx.select(ACTOR_DEFINITION.asterisk()) - .from(ACTOR_DEFINITION) - .where(ACTOR_DEFINITION.ACTOR_TYPE.eq(ActorType.source)) - .and(sourceDefId.map(ACTOR_DEFINITION.ID::eq).orElse(noCondition())) - .and(includeTombstone ? noCondition() : ACTOR_DEFINITION.TOMBSTONE.notEqual(true)) - .fetch()) - .stream() - .map(DbConverter::buildStandardSourceDefinition) - // Ensure version is set. Needed for connectors not upgraded since we added versioning. - .map(def -> def.withProtocolVersion(AirbyteProtocolVersion.getWithDefault(def.getProtocolVersion()).serialize())); - } - - public Map> getActorDefinitionToProtocolVersionMap() throws IOException { - return database.query(ConfigWriter::getActorDefinitionsInUseToProtocolVersion); - } - - public List listPublicSourceDefinitions(final boolean includeTombstone) throws IOException { - return listStandardActorDefinitions( - ActorType.source, - DbConverter::buildStandardSourceDefinition, - includeTombstones(ACTOR_DEFINITION.TOMBSTONE, includeTombstone), - ACTOR_DEFINITION.PUBLIC.eq(true)); - } - - public List listGrantedSourceDefinitions(final UUID workspaceId, final boolean includeTombstones) - throws IOException { - return listActorDefinitionsJoinedWithGrants( - workspaceId, - JoinType.JOIN, - ActorType.source, - DbConverter::buildStandardSourceDefinition, - includeTombstones(ACTOR_DEFINITION.TOMBSTONE, includeTombstones)); - } - - public List> listGrantableSourceDefinitions(final UUID workspaceId, - final boolean includeTombstones) - throws IOException { - return listActorDefinitionsJoinedWithGrants( - workspaceId, - JoinType.LEFT_OUTER_JOIN, - ActorType.source, - record -> actorDefinitionWithGrantStatus(record, DbConverter::buildStandardSourceDefinition), - ACTOR_DEFINITION.CUSTOM.eq(false), - includeTombstones(ACTOR_DEFINITION.TOMBSTONE, includeTombstones)); - } - - public void writeStandardSourceDefinition(final StandardSourceDefinition sourceDefinition) throws JsonValidationException, IOException { - database.transaction(ctx -> { - ConfigWriter.writeStandardSourceDefinition(Collections.singletonList(sourceDefinition), ctx); - return null; - }); - } - - public void writeCustomSourceDefinition(final StandardSourceDefinition sourceDefinition, final UUID workspaceId) - throws IOException { - database.transaction(ctx -> { - ConfigWriter.writeStandardSourceDefinition(Collections.singletonList(sourceDefinition), ctx); - writeActorDefinitionWorkspaceGrant(sourceDefinition.getSourceDefinitionId(), workspaceId, ctx); - return null; - }); - } - - private Stream destDefQuery(final Optional destDefId, final boolean includeTombstone) throws IOException { - return database.query(ctx -> ctx.select(ACTOR_DEFINITION.asterisk()) - .from(ACTOR_DEFINITION) - .where(ACTOR_DEFINITION.ACTOR_TYPE.eq(ActorType.destination)) - .and(destDefId.map(ACTOR_DEFINITION.ID::eq).orElse(noCondition())) - .and(includeTombstone ? noCondition() : ACTOR_DEFINITION.TOMBSTONE.notEqual(true)) - .fetch()) - .stream() - .map(DbConverter::buildStandardDestinationDefinition) - // Ensure version is set. Needed for connectors not upgraded since we added versioning. - .map(def -> def.withProtocolVersion(AirbyteProtocolVersion.getWithDefault(def.getProtocolVersion()).serialize())); - } - - public StandardDestinationDefinition getStandardDestinationDefinition(final UUID destinationDefinitionId) - throws JsonValidationException, IOException, ConfigNotFoundException { - return destDefQuery(Optional.of(destinationDefinitionId), true) - .findFirst() - .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.STANDARD_DESTINATION_DEFINITION, destinationDefinitionId)); - } - - public StandardDestinationDefinition getDestinationDefinitionFromDestination(final UUID destinationId) { - try { - final DestinationConnection destination = getDestinationConnection(destinationId); - return getStandardDestinationDefinition(destination.getDestinationDefinitionId()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - public StandardDestinationDefinition getDestinationDefinitionFromConnection(final UUID connectionId) { - try { - final StandardSync sync = getStandardSync(connectionId); - return getDestinationDefinitionFromDestination(sync.getDestinationId()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - public List listStandardDestinationDefinitions(final boolean includeTombstone) throws IOException { - return destDefQuery(Optional.empty(), includeTombstone).toList(); - } - - public List listPublicDestinationDefinitions(final boolean includeTombstone) throws IOException { - return listStandardActorDefinitions( - ActorType.destination, - DbConverter::buildStandardDestinationDefinition, - includeTombstones(ACTOR_DEFINITION.TOMBSTONE, includeTombstone), - ACTOR_DEFINITION.PUBLIC.eq(true)); - } - - public List listGrantedDestinationDefinitions(final UUID workspaceId, final boolean includeTombstones) - throws IOException { - return listActorDefinitionsJoinedWithGrants( - workspaceId, - JoinType.JOIN, - ActorType.destination, - DbConverter::buildStandardDestinationDefinition, - includeTombstones(ACTOR_DEFINITION.TOMBSTONE, includeTombstones)); - } - - public List> listGrantableDestinationDefinitions(final UUID workspaceId, - final boolean includeTombstones) - throws IOException { - return listActorDefinitionsJoinedWithGrants( - workspaceId, - JoinType.LEFT_OUTER_JOIN, - ActorType.destination, - record -> actorDefinitionWithGrantStatus(record, DbConverter::buildStandardDestinationDefinition), - ACTOR_DEFINITION.CUSTOM.eq(false), - includeTombstones(ACTOR_DEFINITION.TOMBSTONE, includeTombstones)); - } - - public void writeStandardDestinationDefinition(final StandardDestinationDefinition destinationDefinition) - throws JsonValidationException, IOException { - database.transaction(ctx -> { - ConfigWriter.writeStandardDestinationDefinition(Collections.singletonList(destinationDefinition), ctx); - return null; - }); - } - - public void writeCustomDestinationDefinition(final StandardDestinationDefinition destinationDefinition, final UUID workspaceId) - throws IOException { - database.transaction(ctx -> { - ConfigWriter.writeStandardDestinationDefinition(List.of(destinationDefinition), ctx); - writeActorDefinitionWorkspaceGrant(destinationDefinition.getDestinationDefinitionId(), workspaceId, ctx); - return null; - }); - } - - public void deleteStandardSync(final UUID syncId) throws IOException { - standardSyncPersistence.deleteStandardSync(syncId); - } - - public void writeActorDefinitionWorkspaceGrant(final UUID actorDefinitionId, final UUID workspaceId) throws IOException { - database.query(ctx -> writeActorDefinitionWorkspaceGrant(actorDefinitionId, workspaceId, ctx)); - } - - private int writeActorDefinitionWorkspaceGrant(final UUID actorDefinitionId, final UUID workspaceId, final DSLContext ctx) { - return ctx.insertInto(ACTOR_DEFINITION_WORKSPACE_GRANT) - .set(ACTOR_DEFINITION_WORKSPACE_GRANT.ACTOR_DEFINITION_ID, actorDefinitionId) - .set(ACTOR_DEFINITION_WORKSPACE_GRANT.WORKSPACE_ID, workspaceId) - .execute(); - } - - public boolean actorDefinitionWorkspaceGrantExists(final UUID actorDefinitionId, final UUID workspaceId) throws IOException { - final Integer count = database.query(ctx -> ctx.fetchCount( - DSL.selectFrom(ACTOR_DEFINITION_WORKSPACE_GRANT) - .where(ACTOR_DEFINITION_WORKSPACE_GRANT.ACTOR_DEFINITION_ID.eq(actorDefinitionId)) - .and(ACTOR_DEFINITION_WORKSPACE_GRANT.WORKSPACE_ID.eq(workspaceId)))); - return count == 1; - } - - public void deleteActorDefinitionWorkspaceGrant(final UUID actorDefinitionId, final UUID workspaceId) throws IOException { - database.query(ctx -> ctx.deleteFrom(ACTOR_DEFINITION_WORKSPACE_GRANT) - .where(ACTOR_DEFINITION_WORKSPACE_GRANT.ACTOR_DEFINITION_ID.eq(actorDefinitionId)) - .and(ACTOR_DEFINITION_WORKSPACE_GRANT.WORKSPACE_ID.eq(workspaceId)) - .execute()); - } - - public boolean workspaceCanUseDefinition(final UUID actorDefinitionId, final UUID workspaceId) - throws IOException { - final Result records = actorDefinitionsJoinedWithGrants( - workspaceId, - JoinType.LEFT_OUTER_JOIN, - ACTOR_DEFINITION.ID.eq(actorDefinitionId), - ACTOR_DEFINITION.PUBLIC.eq(true).or(ACTOR_DEFINITION_WORKSPACE_GRANT.WORKSPACE_ID.eq(workspaceId))); - return records.isNotEmpty(); - } - - public boolean workspaceCanUseCustomDefinition(final UUID actorDefinitionId, final UUID workspaceId) - throws IOException { - final Result records = actorDefinitionsJoinedWithGrants( - workspaceId, - JoinType.JOIN, - ACTOR_DEFINITION.ID.eq(actorDefinitionId), - ACTOR_DEFINITION.CUSTOM.eq(true)); - return records.isNotEmpty(); - } - - private List listStandardActorDefinitions(final ActorType actorType, - final Function recordToActorDefinition, - final Condition... conditions) - throws IOException { - final Result records = database.query(ctx -> ctx.select(asterisk()).from(ACTOR_DEFINITION) - .where(conditions) - .and(ACTOR_DEFINITION.ACTOR_TYPE.eq(actorType)) - .fetch()); - - return records.stream() - .map(recordToActorDefinition) - .toList(); - } - - private List listActorDefinitionsJoinedWithGrants(final UUID workspaceId, - final JoinType joinType, - final ActorType actorType, - final Function recordToReturnType, - final Condition... conditions) - throws IOException { - final Result records = actorDefinitionsJoinedWithGrants( - workspaceId, - joinType, - ArrayUtils.addAll(conditions, - ACTOR_DEFINITION.ACTOR_TYPE.eq(actorType), - ACTOR_DEFINITION.PUBLIC.eq(false))); - - return records.stream() - .map(recordToReturnType) - .toList(); - } - - private Entry actorDefinitionWithGrantStatus(final Record outerJoinRecord, - final Function recordToActorDefinition) { - final T actorDefinition = recordToActorDefinition.apply(outerJoinRecord); - final boolean granted = outerJoinRecord.get(ACTOR_DEFINITION_WORKSPACE_GRANT.WORKSPACE_ID) != null; - return Map.entry(actorDefinition, granted); - } - - private Result actorDefinitionsJoinedWithGrants(final UUID workspaceId, - final JoinType joinType, - final Condition... conditions) - throws IOException { - return database.query(ctx -> ctx.select(asterisk()).from(ACTOR_DEFINITION) - .join(ACTOR_DEFINITION_WORKSPACE_GRANT, joinType) - .on(ACTOR_DEFINITION.ID.eq(ACTOR_DEFINITION_WORKSPACE_GRANT.ACTOR_DEFINITION_ID), - ACTOR_DEFINITION_WORKSPACE_GRANT.WORKSPACE_ID.eq(workspaceId)) - .where(conditions) - .fetch()); - } - - private Stream listSourceQuery(final Optional configId) throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(ACTOR); - if (configId.isPresent()) { - return query.where(ACTOR.ACTOR_TYPE.eq(ActorType.source), ACTOR.ID.eq(configId.get())).fetch(); - } - return query.where(ACTOR.ACTOR_TYPE.eq(ActorType.source)).fetch(); - }); - - return result.map(DbConverter::buildSourceConnection).stream(); - } - - /** - * Returns source with a given id. Does not contain secrets. To hydrate with secrets see { @link - * SecretsRepositoryReader#getSourceConnectionWithSecrets(final UUID sourceId) }. - * - * @param sourceId - id of source to fetch. - * @return sources - * @throws JsonValidationException - throws if returned sources are invalid - * @throws IOException - you never know when you IO - * @throws ConfigNotFoundException - throws if no source with that id can be found. - */ - public SourceConnection getSourceConnection(final UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException { - return listSourceQuery(Optional.of(sourceId)) - .findFirst() - .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.SOURCE_CONNECTION, sourceId)); - } - - /** - * MUST NOT ACCEPT SECRETS - Should only be called from { @link SecretsRepositoryWriter } - * - * Write a SourceConnection to the database. The configuration of the Source will be a partial - * configuration (no secrets, just pointer to the secrets store). - * - * @param partialSource - The configuration of the Source will be a partial configuration (no - * secrets, just pointer to the secrets store) - * @throws IOException - you never know when you IO - */ - public void writeSourceConnectionNoSecrets(final SourceConnection partialSource) throws IOException { - database.transaction(ctx -> { - writeSourceConnection(Collections.singletonList(partialSource), ctx); - return null; - }); - } - - private void writeSourceConnection(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((sourceConnection) -> { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(ACTOR) - .where(ACTOR.ID.eq(sourceConnection.getSourceId()))); - - if (isExistingConfig) { - ctx.update(ACTOR) - .set(ACTOR.ID, sourceConnection.getSourceId()) - .set(ACTOR.WORKSPACE_ID, sourceConnection.getWorkspaceId()) - .set(ACTOR.ACTOR_DEFINITION_ID, sourceConnection.getSourceDefinitionId()) - .set(ACTOR.NAME, sourceConnection.getName()) - .set(ACTOR.CONFIGURATION, JSONB.valueOf(Jsons.serialize(sourceConnection.getConfiguration()))) - .set(ACTOR.ACTOR_TYPE, ActorType.source) - .set(ACTOR.TOMBSTONE, sourceConnection.getTombstone() != null && sourceConnection.getTombstone()) - .set(ACTOR.UPDATED_AT, timestamp) - .where(ACTOR.ID.eq(sourceConnection.getSourceId())) - .execute(); - } else { - ctx.insertInto(ACTOR) - .set(ACTOR.ID, sourceConnection.getSourceId()) - .set(ACTOR.WORKSPACE_ID, sourceConnection.getWorkspaceId()) - .set(ACTOR.ACTOR_DEFINITION_ID, sourceConnection.getSourceDefinitionId()) - .set(ACTOR.NAME, sourceConnection.getName()) - .set(ACTOR.CONFIGURATION, JSONB.valueOf(Jsons.serialize(sourceConnection.getConfiguration()))) - .set(ACTOR.ACTOR_TYPE, ActorType.source) - .set(ACTOR.TOMBSTONE, sourceConnection.getTombstone() != null && sourceConnection.getTombstone()) - .set(ACTOR.CREATED_AT, timestamp) - .set(ACTOR.UPDATED_AT, timestamp) - .execute(); - } - }); - } - - public boolean deleteSource(final UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException { - return deleteById(ACTOR, sourceId); - } - - /** - * Returns all sources in the database. Does not contain secrets. To hydrate with secrets see - * { @link SecretsRepositoryReader#listSourceConnectionWithSecrets() }. - * - * @return sources - * @throws IOException - you never know when you IO - */ - public List listSourceConnection() throws IOException { - return listSourceQuery(Optional.empty()).toList(); - } - - /** - * Returns all sources for a workspace. Does not contain secrets. - * - * @param workspaceId - id of the workspace - * @return sources - * @throws IOException - you never know when you IO - */ - public List listWorkspaceSourceConnection(final UUID workspaceId) throws IOException { - final Result result = database.query(ctx -> ctx.select(asterisk()) - .from(ACTOR) - .where(ACTOR.ACTOR_TYPE.eq(ActorType.source)) - .and(ACTOR.WORKSPACE_ID.eq(workspaceId)) - .andNot(ACTOR.TOMBSTONE).fetch()); - return result.stream().map(DbConverter::buildSourceConnection).collect(Collectors.toList()); - } - - private Stream listDestinationQuery(final Optional configId) throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(ACTOR); - if (configId.isPresent()) { - return query.where(ACTOR.ACTOR_TYPE.eq(ActorType.destination), ACTOR.ID.eq(configId.get())).fetch(); - } - return query.where(ACTOR.ACTOR_TYPE.eq(ActorType.destination)).fetch(); - }); - - return result.map(DbConverter::buildDestinationConnection).stream(); - } - - /** - * Returns destination with a given id. Does not contain secrets. To hydrate with secrets see - * { @link SecretsRepositoryReader#getDestinationConnectionWithSecrets(final UUID destinationId) }. - * - * @param destinationId - id of destination to fetch. - * @return destinations - * @throws JsonValidationException - throws if returned destinations are invalid - * @throws IOException - you never know when you IO - * @throws ConfigNotFoundException - throws if no destination with that id can be found. - */ - public DestinationConnection getDestinationConnection(final UUID destinationId) - throws JsonValidationException, IOException, ConfigNotFoundException { - return listDestinationQuery(Optional.of(destinationId)) - .findFirst() - .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.DESTINATION_CONNECTION, destinationId)); - } - - /** - * MUST NOT ACCEPT SECRETS - Should only be called from { @link SecretsRepositoryWriter } - * - * Write a DestinationConnection to the database. The configuration of the Destination will be a - * partial configuration (no secrets, just pointer to the secrets store). - * - * @param partialDestination - The configuration of the Destination will be a partial configuration - * (no secrets, just pointer to the secrets store) - * @throws IOException - you never know when you IO - */ - public void writeDestinationConnectionNoSecrets(final DestinationConnection partialDestination) throws IOException { - database.transaction(ctx -> { - writeDestinationConnection(Collections.singletonList(partialDestination), ctx); - return null; - }); - } - - private void writeDestinationConnection(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((destinationConnection) -> { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(ACTOR) - .where(ACTOR.ID.eq(destinationConnection.getDestinationId()))); - - if (isExistingConfig) { - ctx.update(ACTOR) - .set(ACTOR.ID, destinationConnection.getDestinationId()) - .set(ACTOR.WORKSPACE_ID, destinationConnection.getWorkspaceId()) - .set(ACTOR.ACTOR_DEFINITION_ID, destinationConnection.getDestinationDefinitionId()) - .set(ACTOR.NAME, destinationConnection.getName()) - .set(ACTOR.CONFIGURATION, JSONB.valueOf(Jsons.serialize(destinationConnection.getConfiguration()))) - .set(ACTOR.ACTOR_TYPE, ActorType.destination) - .set(ACTOR.TOMBSTONE, destinationConnection.getTombstone() != null && destinationConnection.getTombstone()) - .set(ACTOR.UPDATED_AT, timestamp) - .where(ACTOR.ID.eq(destinationConnection.getDestinationId())) - .execute(); - - } else { - ctx.insertInto(ACTOR) - .set(ACTOR.ID, destinationConnection.getDestinationId()) - .set(ACTOR.WORKSPACE_ID, destinationConnection.getWorkspaceId()) - .set(ACTOR.ACTOR_DEFINITION_ID, destinationConnection.getDestinationDefinitionId()) - .set(ACTOR.NAME, destinationConnection.getName()) - .set(ACTOR.CONFIGURATION, JSONB.valueOf(Jsons.serialize(destinationConnection.getConfiguration()))) - .set(ACTOR.ACTOR_TYPE, ActorType.destination) - .set(ACTOR.TOMBSTONE, destinationConnection.getTombstone() != null && destinationConnection.getTombstone()) - .set(ACTOR.CREATED_AT, timestamp) - .set(ACTOR.UPDATED_AT, timestamp) - .execute(); - } - }); - } - - /** - * Returns all destinations in the database. Does not contain secrets. To hydrate with secrets see - * { @link SecretsRepositoryReader#listDestinationConnectionWithSecrets() }. - * - * @return destinations - * @throws IOException - you never know when you IO - */ - public List listDestinationConnection() throws IOException { - return listDestinationQuery(Optional.empty()).toList(); - } - - /** - * Returns all destinations for a workspace. Does not contain secrets. - * - * @param workspaceId - id of the workspace - * @return destinations - * @throws IOException - you never know when you IO - */ - public List listWorkspaceDestinationConnection(final UUID workspaceId) throws IOException { - final Result result = database.query(ctx -> ctx.select(asterisk()) - .from(ACTOR) - .where(ACTOR.ACTOR_TYPE.eq(ActorType.destination)) - .and(ACTOR.WORKSPACE_ID.eq(workspaceId)) - .andNot(ACTOR.TOMBSTONE).fetch()); - return result.stream().map(DbConverter::buildDestinationConnection).collect(Collectors.toList()); - } - - /** - * List workspace IDs with most recently running jobs within a given time window (in hours). - * - * @param timeWindowInHours - integer, e.g. 24, 48, etc - * @return List - list of workspace IDs - * @throws IOException - failed to query data - */ - public List listWorkspacesByMostRecentlyRunningJobs(final int timeWindowInHours) throws IOException { - final Result> records = database.query(ctx -> ctx.selectDistinct(ACTOR.WORKSPACE_ID) - .from(ACTOR) - .join(CONNECTION) - .on(CONNECTION.SOURCE_ID.eq(ACTOR.ID)) - .join(JOBS) - .on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) - .where(JOBS.UPDATED_AT.greaterOrEqual(OffsetDateTime.now().minusHours(timeWindowInHours))) - .fetch()); - return records.stream().map(record -> record.get(ACTOR.WORKSPACE_ID)).collect(Collectors.toList()); - } - - /** - * Returns all active sources using a definition - * - * @param definitionId - id for the definition - * @return sources - * @throws IOException - exception while interacting with the db - */ - public List listSourcesForDefinition(final UUID definitionId) throws IOException { - final Result result = database.query(ctx -> ctx.select(asterisk()) - .from(ACTOR) - .where(ACTOR.ACTOR_TYPE.eq(ActorType.source)) - .and(ACTOR.ACTOR_DEFINITION_ID.eq(definitionId)) - .andNot(ACTOR.TOMBSTONE).fetch()); - return result.stream().map(DbConverter::buildSourceConnection).collect(Collectors.toList()); - } - - /** - * Returns all active destinations using a definition - * - * @param definitionId - id for the definition - * @return destinations - * @throws IOException - exception while interacting with the db - */ - public List listDestinationsForDefinition(final UUID definitionId) throws IOException { - final Result result = database.query(ctx -> ctx.select(asterisk()) - .from(ACTOR) - .where(ACTOR.ACTOR_TYPE.eq(ActorType.destination)) - .and(ACTOR.ACTOR_DEFINITION_ID.eq(definitionId)) - .andNot(ACTOR.TOMBSTONE).fetch()); - return result.stream().map(DbConverter::buildDestinationConnection).collect(Collectors.toList()); - } - - public StandardSync getStandardSync(final UUID connectionId) throws JsonValidationException, IOException, ConfigNotFoundException { - return standardSyncPersistence.getStandardSync(connectionId); - } - - public void writeStandardSync(final StandardSync standardSync) throws IOException { - standardSyncPersistence.writeStandardSync(standardSync); - } - - /** - * For the StandardSyncs related to actorDefinitionId, clear the unsupported protocol version flag - * if both connectors are now within support range. - * - * @param actorDefinitionId the actorDefinitionId to query - * @param actorType the ActorType of actorDefinitionId - * @param supportedRange the supported range of protocol versions - */ - // We have conflicting imports here, ActorType is imported from jooq for most internal uses. Since - // this is a public method, we should be using the ActorType from airbyte-config. - public void clearUnsupportedProtocolVersionFlag(final UUID actorDefinitionId, - final io.airbyte.config.ActorType actorType, - final AirbyteProtocolVersionRange supportedRange) - throws IOException { - standardSyncPersistence.clearUnsupportedProtocolVersionFlag(actorDefinitionId, actorType, supportedRange); - } - - public List listStandardSyncs() throws IOException { - return standardSyncPersistence.listStandardSync(); - } - - public List listStandardSyncsUsingOperation(final UUID operationId) - throws IOException { - - final Result connectionAndOperationIdsResult = database.query(ctx -> ctx - // SELECT connection.* plus the connection's associated operationIds as a concatenated list - .select( - CONNECTION.asterisk(), - groupConcat(CONNECTION_OPERATION.OPERATION_ID).separator(OPERATION_IDS_AGG_DELIMITER).as(OPERATION_IDS_AGG_FIELD)) - .from(CONNECTION) - - // inner join with all connection_operation rows that match the connection's id - .join(CONNECTION_OPERATION).on(CONNECTION_OPERATION.CONNECTION_ID.eq(CONNECTION.ID)) - - // only keep rows for connections that have an operationId that matches the input. - // needs to be a sub query because we want to keep all operationIds for matching connections - // in the main query - .where(CONNECTION.ID.in( - select(CONNECTION.ID).from(CONNECTION).join(CONNECTION_OPERATION).on(CONNECTION_OPERATION.CONNECTION_ID.eq(CONNECTION.ID)) - .where(CONNECTION_OPERATION.OPERATION_ID.eq(operationId)))) - - // group by connection.id so that the groupConcat above works - .groupBy(CONNECTION.ID)).fetch(); - - return getStandardSyncsFromResult(connectionAndOperationIdsResult); - } - - public List listWorkspaceStandardSyncs(final UUID workspaceId, final boolean includeDeleted) throws IOException { - return listWorkspaceStandardSyncs(new StandardSyncQuery(workspaceId, null, null, includeDeleted)); - } - - public List listWorkspaceStandardSyncs(final StandardSyncQuery standardSyncQuery) throws IOException { - final Result connectionAndOperationIdsResult = database.query(ctx -> ctx - // SELECT connection.* plus the connection's associated operationIds as a concatenated list - .select( - CONNECTION.asterisk(), - groupConcat(CONNECTION_OPERATION.OPERATION_ID).separator(OPERATION_IDS_AGG_DELIMITER).as(OPERATION_IDS_AGG_FIELD)) - .from(CONNECTION) - - // left join with all connection_operation rows that match the connection's id. - // left join includes connections that don't have any connection_operations - .leftJoin(CONNECTION_OPERATION).on(CONNECTION_OPERATION.CONNECTION_ID.eq(CONNECTION.ID)) - - // join with source actors so that we can filter by workspaceId - .join(ACTOR).on(CONNECTION.SOURCE_ID.eq(ACTOR.ID)) - .where(ACTOR.WORKSPACE_ID.eq(standardSyncQuery.workspaceId) - .and(standardSyncQuery.destinationId == null || standardSyncQuery.destinationId.isEmpty() ? noCondition() - : CONNECTION.DESTINATION_ID.in(standardSyncQuery.destinationId)) - .and(standardSyncQuery.sourceId == null || standardSyncQuery.sourceId.isEmpty() ? noCondition() - : CONNECTION.SOURCE_ID.in(standardSyncQuery.sourceId)) - .and(standardSyncQuery.includeDeleted ? noCondition() : CONNECTION.STATUS.notEqual(StatusType.deprecated))) - - // group by connection.id so that the groupConcat above works - .groupBy(CONNECTION.ID)).fetch(); - - return getStandardSyncsFromResult(connectionAndOperationIdsResult); - } - - public List listConnectionsBySource(final UUID sourceId, final boolean includeDeleted) throws IOException { - final Result connectionAndOperationIdsResult = database.query(ctx -> ctx - .select( - CONNECTION.asterisk(), - groupConcat(CONNECTION_OPERATION.OPERATION_ID).separator(OPERATION_IDS_AGG_DELIMITER).as(OPERATION_IDS_AGG_FIELD)) - .from(CONNECTION) - .leftJoin(CONNECTION_OPERATION).on(CONNECTION_OPERATION.CONNECTION_ID.eq(CONNECTION.ID)) - .where(CONNECTION.SOURCE_ID.eq(sourceId) - .and(includeDeleted ? noCondition() : CONNECTION.STATUS.notEqual(StatusType.deprecated))) - .groupBy(CONNECTION.ID)).fetch(); - - return getStandardSyncsFromResult(connectionAndOperationIdsResult); - } - - private List getStandardSyncsFromResult(final Result connectionAndOperationIdsResult) { - final List standardSyncs = new ArrayList<>(); - - for (final Record record : connectionAndOperationIdsResult) { - final String operationIdsFromRecord = record.get(OPERATION_IDS_AGG_FIELD, String.class); - - // can be null when connection has no connectionOperations - final List operationIds = operationIdsFromRecord == null - ? Collections.emptyList() - : Arrays.stream(operationIdsFromRecord.split(OPERATION_IDS_AGG_DELIMITER)).map(UUID::fromString).toList(); - - standardSyncs.add(DbConverter.buildStandardSync(record, operationIds)); - } - - return standardSyncs; - } - - private Stream listStandardSyncOperationQuery(final Optional configId) throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(OPERATION); - if (configId.isPresent()) { - return query.where(OPERATION.ID.eq(configId.get())).fetch(); - } - return query.fetch(); - }); - - return result.map(ConfigRepository::buildStandardSyncOperation).stream(); - } - - private static StandardSyncOperation buildStandardSyncOperation(final Record record) { - return new StandardSyncOperation() - .withOperationId(record.get(OPERATION.ID)) - .withName(record.get(OPERATION.NAME)) - .withWorkspaceId(record.get(OPERATION.WORKSPACE_ID)) - .withOperatorType(Enums.toEnum(record.get(OPERATION.OPERATOR_TYPE, String.class), OperatorType.class).orElseThrow()) - .withOperatorNormalization(Jsons.deserialize(record.get(OPERATION.OPERATOR_NORMALIZATION).data(), OperatorNormalization.class)) - .withOperatorDbt(Jsons.deserialize(record.get(OPERATION.OPERATOR_DBT).data(), OperatorDbt.class)) - .withOperatorWebhook(record.get(OPERATION.OPERATOR_WEBHOOK) == null ? null - : Jsons.deserialize(record.get(OPERATION.OPERATOR_WEBHOOK).data(), OperatorWebhook.class)) - .withTombstone(record.get(OPERATION.TOMBSTONE)); - } - - public StandardSyncOperation getStandardSyncOperation(final UUID operationId) throws JsonValidationException, IOException, ConfigNotFoundException { - return listStandardSyncOperationQuery(Optional.of(operationId)) - .findFirst() - .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.STANDARD_SYNC_OPERATION, operationId)); - } - - public void writeStandardSyncOperation(final StandardSyncOperation standardSyncOperation) throws IOException { - database.transaction(ctx -> { - writeStandardSyncOperation(Collections.singletonList(standardSyncOperation), ctx); - return null; - }); - } - - private void writeStandardSyncOperation(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((standardSyncOperation) -> { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(OPERATION) - .where(OPERATION.ID.eq(standardSyncOperation.getOperationId()))); - - if (isExistingConfig) { - ctx.update(OPERATION) - .set(OPERATION.ID, standardSyncOperation.getOperationId()) - .set(OPERATION.WORKSPACE_ID, standardSyncOperation.getWorkspaceId()) - .set(OPERATION.NAME, standardSyncOperation.getName()) - .set(OPERATION.OPERATOR_TYPE, Enums.toEnum(standardSyncOperation.getOperatorType().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.OperatorType.class).orElseThrow()) - .set(OPERATION.OPERATOR_NORMALIZATION, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorNormalization()))) - .set(OPERATION.OPERATOR_DBT, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorDbt()))) - .set(OPERATION.OPERATOR_WEBHOOK, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorWebhook()))) - .set(OPERATION.TOMBSTONE, standardSyncOperation.getTombstone() != null && standardSyncOperation.getTombstone()) - .set(OPERATION.UPDATED_AT, timestamp) - .where(OPERATION.ID.eq(standardSyncOperation.getOperationId())) - .execute(); - - } else { - ctx.insertInto(OPERATION) - .set(OPERATION.ID, standardSyncOperation.getOperationId()) - .set(OPERATION.WORKSPACE_ID, standardSyncOperation.getWorkspaceId()) - .set(OPERATION.NAME, standardSyncOperation.getName()) - .set(OPERATION.OPERATOR_TYPE, Enums.toEnum(standardSyncOperation.getOperatorType().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.OperatorType.class).orElseThrow()) - .set(OPERATION.OPERATOR_NORMALIZATION, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorNormalization()))) - .set(OPERATION.OPERATOR_DBT, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorDbt()))) - .set(OPERATION.OPERATOR_WEBHOOK, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorWebhook()))) - .set(OPERATION.TOMBSTONE, standardSyncOperation.getTombstone() != null && standardSyncOperation.getTombstone()) - .set(OPERATION.CREATED_AT, timestamp) - .set(OPERATION.UPDATED_AT, timestamp) - .execute(); - } - }); - } - - public List listStandardSyncOperations() throws IOException, JsonValidationException { - return listStandardSyncOperationQuery(Optional.empty()).toList(); - } - - /** - * Updates {@link io.airbyte.db.instance.configs.jooq.generated.tables.ConnectionOperation} records - * for the given {@code connectionId}. - * - * @param connectionId ID of the associated connection to update operations for - * @param newOperationIds Set of all operationIds that should be associated to the connection - * @throws IOException - exception while interacting with the db - */ - public void updateConnectionOperationIds(final UUID connectionId, final Set newOperationIds) throws IOException { - database.transaction(ctx -> { - final Set existingOperationIds = ctx - .selectFrom(CONNECTION_OPERATION) - .where(CONNECTION_OPERATION.CONNECTION_ID.eq(connectionId)) - .fetchSet(CONNECTION_OPERATION.OPERATION_ID); - - final Set existingOperationIdsToKeep = Sets.intersection(existingOperationIds, newOperationIds); - - // DELETE existing connection_operation records that aren't in the input list - final Set operationIdsToDelete = Sets.difference(existingOperationIds, existingOperationIdsToKeep); - - ctx.deleteFrom(CONNECTION_OPERATION) - .where(CONNECTION_OPERATION.CONNECTION_ID.eq(connectionId)) - .and(CONNECTION_OPERATION.OPERATION_ID.in(operationIdsToDelete)) - .execute(); - - // INSERT connection_operation records that are in the input list and don't yet exist - final Set operationIdsToAdd = Sets.difference(newOperationIds, existingOperationIdsToKeep); - - operationIdsToAdd.forEach(operationId -> ctx - .insertInto(CONNECTION_OPERATION) - .columns(CONNECTION_OPERATION.ID, CONNECTION_OPERATION.CONNECTION_ID, CONNECTION_OPERATION.OPERATION_ID) - .values(UUID.randomUUID(), connectionId, operationId) - .execute()); - - return null; - }); - } - - public void deleteStandardSyncOperation(final UUID standardSyncOperationId) throws IOException { - database.transaction(ctx -> { - ctx.deleteFrom(CONNECTION_OPERATION) - .where(CONNECTION_OPERATION.OPERATION_ID.eq(standardSyncOperationId)).execute(); - ctx.update(OPERATION) - .set(OPERATION.TOMBSTONE, true) - .where(OPERATION.ID.eq(standardSyncOperationId)).execute(); - return null; - }); - } - - private Stream listSourceOauthParamQuery(final Optional configId) throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(ACTOR_OAUTH_PARAMETER); - if (configId.isPresent()) { - return query.where(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE.eq(ActorType.source), ACTOR_OAUTH_PARAMETER.ID.eq(configId.get())).fetch(); - } - return query.where(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE.eq(ActorType.source)).fetch(); - }); - - return result.map(DbConverter::buildSourceOAuthParameter).stream(); - } - - public Optional getSourceOAuthParamByDefinitionIdOptional(final UUID workspaceId, final UUID sourceDefinitionId) - throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(ACTOR_OAUTH_PARAMETER); - return query.where(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE.eq(ActorType.source), - ACTOR_OAUTH_PARAMETER.WORKSPACE_ID.eq(workspaceId), - ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID.eq(sourceDefinitionId)).fetch(); - }); - - return result.stream().findFirst().map(DbConverter::buildSourceOAuthParameter); - } - - public void writeSourceOAuthParam(final SourceOAuthParameter sourceOAuthParameter) throws IOException { - database.transaction(ctx -> { - writeSourceOauthParameter(Collections.singletonList(sourceOAuthParameter), ctx); - return null; - }); - } - - private void writeSourceOauthParameter(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((sourceOAuthParameter) -> { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(ACTOR_OAUTH_PARAMETER) - .where(ACTOR_OAUTH_PARAMETER.ID.eq(sourceOAuthParameter.getOauthParameterId()))); - - if (isExistingConfig) { - ctx.update(ACTOR_OAUTH_PARAMETER) - .set(ACTOR_OAUTH_PARAMETER.ID, sourceOAuthParameter.getOauthParameterId()) - .set(ACTOR_OAUTH_PARAMETER.WORKSPACE_ID, sourceOAuthParameter.getWorkspaceId()) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID, sourceOAuthParameter.getSourceDefinitionId()) - .set(ACTOR_OAUTH_PARAMETER.CONFIGURATION, JSONB.valueOf(Jsons.serialize(sourceOAuthParameter.getConfiguration()))) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE, ActorType.source) - .set(ACTOR_OAUTH_PARAMETER.UPDATED_AT, timestamp) - .where(ACTOR_OAUTH_PARAMETER.ID.eq(sourceOAuthParameter.getOauthParameterId())) - .execute(); - } else { - ctx.insertInto(ACTOR_OAUTH_PARAMETER) - .set(ACTOR_OAUTH_PARAMETER.ID, sourceOAuthParameter.getOauthParameterId()) - .set(ACTOR_OAUTH_PARAMETER.WORKSPACE_ID, sourceOAuthParameter.getWorkspaceId()) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID, sourceOAuthParameter.getSourceDefinitionId()) - .set(ACTOR_OAUTH_PARAMETER.CONFIGURATION, JSONB.valueOf(Jsons.serialize(sourceOAuthParameter.getConfiguration()))) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE, ActorType.source) - .set(ACTOR_OAUTH_PARAMETER.CREATED_AT, timestamp) - .set(ACTOR_OAUTH_PARAMETER.UPDATED_AT, timestamp) - .execute(); - } - }); - } - - public List listSourceOAuthParam() throws JsonValidationException, IOException { - return listSourceOauthParamQuery(Optional.empty()).toList(); - } - - private Stream listDestinationOauthParamQuery(final Optional configId) - throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(ACTOR_OAUTH_PARAMETER); - if (configId.isPresent()) { - return query.where(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE.eq(ActorType.destination), ACTOR_OAUTH_PARAMETER.ID.eq(configId.get())).fetch(); - } - return query.where(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE.eq(ActorType.destination)).fetch(); - }); - - return result.map(DbConverter::buildDestinationOAuthParameter).stream(); - } - - public Optional getDestinationOAuthParamByDefinitionIdOptional(final UUID workspaceId, - final UUID destinationDefinitionId) - throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(ACTOR_OAUTH_PARAMETER); - return query.where(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE.eq(ActorType.destination), - ACTOR_OAUTH_PARAMETER.WORKSPACE_ID.eq(workspaceId), - ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID.eq(destinationDefinitionId)).fetch(); - }); - - return result.stream().findFirst().map(DbConverter::buildDestinationOAuthParameter); - } - - public void writeDestinationOAuthParam(final DestinationOAuthParameter destinationOAuthParameter) throws IOException { - database.transaction(ctx -> { - writeDestinationOauthParameter(Collections.singletonList(destinationOAuthParameter), ctx); - return null; - }); - } - - private void writeDestinationOauthParameter(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((destinationOAuthParameter) -> { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(ACTOR_OAUTH_PARAMETER) - .where(ACTOR_OAUTH_PARAMETER.ID.eq(destinationOAuthParameter.getOauthParameterId()))); - - if (isExistingConfig) { - ctx.update(ACTOR_OAUTH_PARAMETER) - .set(ACTOR_OAUTH_PARAMETER.ID, destinationOAuthParameter.getOauthParameterId()) - .set(ACTOR_OAUTH_PARAMETER.WORKSPACE_ID, destinationOAuthParameter.getWorkspaceId()) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID, destinationOAuthParameter.getDestinationDefinitionId()) - .set(ACTOR_OAUTH_PARAMETER.CONFIGURATION, JSONB.valueOf(Jsons.serialize(destinationOAuthParameter.getConfiguration()))) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE, ActorType.destination) - .set(ACTOR_OAUTH_PARAMETER.UPDATED_AT, timestamp) - .where(ACTOR_OAUTH_PARAMETER.ID.eq(destinationOAuthParameter.getOauthParameterId())) - .execute(); - - } else { - ctx.insertInto(ACTOR_OAUTH_PARAMETER) - .set(ACTOR_OAUTH_PARAMETER.ID, destinationOAuthParameter.getOauthParameterId()) - .set(ACTOR_OAUTH_PARAMETER.WORKSPACE_ID, destinationOAuthParameter.getWorkspaceId()) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID, destinationOAuthParameter.getDestinationDefinitionId()) - .set(ACTOR_OAUTH_PARAMETER.CONFIGURATION, JSONB.valueOf(Jsons.serialize(destinationOAuthParameter.getConfiguration()))) - .set(ACTOR_OAUTH_PARAMETER.ACTOR_TYPE, ActorType.destination) - .set(ACTOR_OAUTH_PARAMETER.CREATED_AT, timestamp) - .set(ACTOR_OAUTH_PARAMETER.UPDATED_AT, timestamp) - .execute(); - } - }); - - } - - public List listDestinationOAuthParam() throws JsonValidationException, IOException { - return listDestinationOauthParamQuery(Optional.empty()).toList(); - } - - private Map findCatalogByHash(final String catalogHash, final DSLContext context) { - final Result> records = context.select(ACTOR_CATALOG.ID, ACTOR_CATALOG.CATALOG) - .from(ACTOR_CATALOG) - .where(ACTOR_CATALOG.CATALOG_HASH.eq(catalogHash)).fetch(); - - final Map result = new HashMap<>(); - for (final Record record : records) { - // We do not apply the on-the-fly migration here because the only caller is getOrInsertActorCatalog - // which is using this to figure out if the catalog has already been inserted. Migrating on the fly - // here will cause us to add a duplicate each time we check for existence of a catalog. - final AirbyteCatalog catalog = Jsons.deserialize(record.get(ACTOR_CATALOG.CATALOG).toString(), AirbyteCatalog.class); - result.put(record.get(ACTOR_CATALOG.ID), catalog); - } - return result; - } - - /** - * Updates the database with the most up-to-date source and destination definitions in the connector - * catalog. - * - * @param seedSourceDefs - most up-to-date source definitions - * @param seedDestDefs - most up-to-date destination definitions - * @throws IOException - throws if exception when interacting with db - */ - public void seedActorDefinitions(final List seedSourceDefs, final List seedDestDefs) - throws IOException { - actorDefinitionMigrator.migrate(seedSourceDefs, seedDestDefs); - } - - // Data-carrier records to hold combined result of query for a Source or Destination and its - // corresponding Definition. This enables the API layer to - // process combined information about a Source/Destination/Definition pair without requiring two - // separate queries and in-memory join operation, - // because the config models are grouped immediately in the repository layer. - @VisibleForTesting - public record SourceAndDefinition(SourceConnection source, StandardSourceDefinition definition) { - - } - - @VisibleForTesting - public record DestinationAndDefinition(DestinationConnection destination, StandardDestinationDefinition definition) { - - } - - public List getSourceAndDefinitionsFromSourceIds(final List sourceIds) throws IOException { - final Result records = database.query(ctx -> ctx - .select(ACTOR.asterisk(), ACTOR_DEFINITION.asterisk()) - .from(ACTOR) - .join(ACTOR_DEFINITION) - .on(ACTOR.ACTOR_DEFINITION_ID.eq(ACTOR_DEFINITION.ID)) - .where(ACTOR.ACTOR_TYPE.eq(ActorType.source), ACTOR.ID.in(sourceIds)) - .fetch()); - - final List sourceAndDefinitions = new ArrayList<>(); - - for (final Record record : records) { - final SourceConnection source = DbConverter.buildSourceConnection(record); - final StandardSourceDefinition definition = DbConverter.buildStandardSourceDefinition(record); - sourceAndDefinitions.add(new SourceAndDefinition(source, definition)); - } - - return sourceAndDefinitions; - } - - public List getDestinationAndDefinitionsFromDestinationIds(final List destinationIds) throws IOException { - final Result records = database.query(ctx -> ctx - .select(ACTOR.asterisk(), ACTOR_DEFINITION.asterisk()) - .from(ACTOR) - .join(ACTOR_DEFINITION) - .on(ACTOR.ACTOR_DEFINITION_ID.eq(ACTOR_DEFINITION.ID)) - .where(ACTOR.ACTOR_TYPE.eq(ActorType.destination), ACTOR.ID.in(destinationIds)) - .fetch()); - - final List destinationAndDefinitions = new ArrayList<>(); - - for (final Record record : records) { - final DestinationConnection destination = DbConverter.buildDestinationConnection(record); - final StandardDestinationDefinition definition = DbConverter.buildStandardDestinationDefinition(record); - destinationAndDefinitions.add(new DestinationAndDefinition(destination, definition)); - } - - return destinationAndDefinitions; - } - - public ActorCatalog getActorCatalogById(final UUID actorCatalogId) - throws IOException, ConfigNotFoundException { - final Result result = database.query(ctx -> ctx.select(ACTOR_CATALOG.asterisk()) - .from(ACTOR_CATALOG).where(ACTOR_CATALOG.ID.eq(actorCatalogId))).fetch(); - - if (result.size() > 0) { - return DbConverter.buildActorCatalog(result.get(0)); - } - throw new ConfigNotFoundException(ConfigSchema.ACTOR_CATALOG, actorCatalogId); - } - - /** - * Store an Airbyte catalog in DB if it is not present already - * - * Checks in the config DB if the catalog is present already, if so returns it identifier. It is not - * present, it is inserted in DB with a new identifier and that identifier is returned. - * - * @param airbyteCatalog An Airbyte catalog to cache - * @param context - db context - * @return the db identifier for the cached catalog. - */ - private UUID getOrInsertActorCatalog(final AirbyteCatalog airbyteCatalog, - final DSLContext context, - final OffsetDateTime timestamp) { - final HashFunction hashFunction = Hashing.murmur3_32_fixed(); - final String catalogHash = hashFunction.hashBytes(Jsons.serialize(airbyteCatalog).getBytes( - Charsets.UTF_8)).toString(); - final Map catalogs = findCatalogByHash(catalogHash, context); - - for (final Map.Entry entry : catalogs.entrySet()) { - if (entry.getValue().equals(airbyteCatalog)) { - return entry.getKey(); - } - } - - final UUID catalogId = UUID.randomUUID(); - context.insertInto(ACTOR_CATALOG) - .set(ACTOR_CATALOG.ID, catalogId) - .set(ACTOR_CATALOG.CATALOG, JSONB.valueOf(Jsons.serialize(airbyteCatalog))) - .set(ACTOR_CATALOG.CATALOG_HASH, catalogHash) - .set(ACTOR_CATALOG.CREATED_AT, timestamp) - .set(ACTOR_CATALOG.MODIFIED_AT, timestamp).execute(); - return catalogId; - } - - public Optional getActorCatalog(final UUID actorId, - final String actorVersion, - final String configHash) - throws IOException { - final Result records = database.transaction(ctx -> ctx.select(ACTOR_CATALOG.asterisk()) - .from(ACTOR_CATALOG).join(ACTOR_CATALOG_FETCH_EVENT) - .on(ACTOR_CATALOG.ID.eq(ACTOR_CATALOG_FETCH_EVENT.ACTOR_CATALOG_ID)) - .where(ACTOR_CATALOG_FETCH_EVENT.ACTOR_ID.eq(actorId)) - .and(ACTOR_CATALOG_FETCH_EVENT.ACTOR_VERSION.eq(actorVersion)) - .and(ACTOR_CATALOG_FETCH_EVENT.CONFIG_HASH.eq(configHash)) - .orderBy(ACTOR_CATALOG_FETCH_EVENT.CREATED_AT.desc()).limit(1)).fetch(); - - return records.stream().findFirst().map(DbConverter::buildActorCatalog); - } - - public Optional getMostRecentSourceActorCatalog(final UUID sourceId) throws IOException { - final Result records = database.query(ctx -> ctx.select(ACTOR_CATALOG.asterisk(), ACTOR_CATALOG_FETCH_EVENT.CREATED_AT) - .from(ACTOR_CATALOG) - .join(ACTOR_CATALOG_FETCH_EVENT) - .on(ACTOR_CATALOG_FETCH_EVENT.ACTOR_CATALOG_ID.eq(ACTOR_CATALOG.ID)) - .where(ACTOR_CATALOG_FETCH_EVENT.ACTOR_ID.eq(sourceId)) - .orderBy(ACTOR_CATALOG_FETCH_EVENT.CREATED_AT.desc()).limit(1).fetch()); - return records.stream().findFirst().map(DbConverter::buildActorCatalogWithUpdatedAt); - } - - public Optional getMostRecentActorCatalogForSource(final UUID sourceId) throws IOException { - final Result records = database.query(ctx -> ctx.select(ACTOR_CATALOG.asterisk()) - .from(ACTOR_CATALOG) - .join(ACTOR_CATALOG_FETCH_EVENT) - .on(ACTOR_CATALOG_FETCH_EVENT.ACTOR_CATALOG_ID.eq(ACTOR_CATALOG.ID)) - .where(ACTOR_CATALOG_FETCH_EVENT.ACTOR_ID.eq(sourceId)) - .orderBy(ACTOR_CATALOG_FETCH_EVENT.CREATED_AT.desc()).limit(1).fetch()); - return records.stream().findFirst().map(DbConverter::buildActorCatalog); - } - - public Optional getMostRecentActorCatalogFetchEventForSource(final UUID sourceId) throws IOException { - - final Result records = database.query(ctx -> ctx.select(ACTOR_CATALOG_FETCH_EVENT.asterisk()) - .from(ACTOR_CATALOG_FETCH_EVENT) - .where(ACTOR_CATALOG_FETCH_EVENT.ACTOR_ID.eq(sourceId)) - .orderBy(ACTOR_CATALOG_FETCH_EVENT.CREATED_AT.desc()).limit(1).fetch()); - return records.stream().findFirst().map(DbConverter::buildActorCatalogFetchEvent); - } - - @SuppressWarnings({"unused", "SqlNoDataSourceInspection"}) - public Map getMostRecentActorCatalogFetchEventForSources(final List sourceIds) throws IOException { - // noinspection SqlResolve - if (sourceIds.isEmpty()) { - return Collections.emptyMap(); - } - return database.query(ctx -> ctx.fetch( - """ - select distinct actor_catalog_id, actor_id, created_at from - (select actor_catalog_id, actor_id, created_at, row_number() over (partition by actor_id order by created_at desc) as creation_order_row_number - from public.actor_catalog_fetch_event - where actor_id in ({0}) - ) table_with_rank - where creation_order_row_number = 1; - """, - DSL.list(sourceIds.stream().map(DSL::value).collect(Collectors.toList())))) - .stream().map(DbConverter::buildActorCatalogFetchEvent) - .collect(Collectors.toMap(ActorCatalogFetchEvent::getActorId, record -> record)); - } - - /** - * Stores source catalog information. - * - * This function is called each time the schema of a source is fetched. This can occur because the - * source is set up for the first time, because the configuration or version of the connector - * changed or because the user explicitly requested a schema refresh. Schemas are stored separately - * and de-duplicated upon insertion. Once a schema has been successfully stored, a call to - * getActorCatalog(actorId, connectionVersion, configurationHash) will return the most recent schema - * stored for those parameters. - * - * @param catalog - catalog that was fetched. - * @param actorId - actor the catalog was fetched by - * @param connectorVersion - version of the connector when catalog was fetched - * @param configurationHash - hash of the config of the connector when catalog was fetched - * @return The identifier (UUID) of the fetch event inserted in the database - * @throws IOException - error while interacting with db - */ - public UUID writeActorCatalogFetchEvent(final AirbyteCatalog catalog, - final UUID actorId, - final String connectorVersion, - final String configurationHash) - throws IOException { - final OffsetDateTime timestamp = OffsetDateTime.now(); - final UUID fetchEventID = UUID.randomUUID(); - return database.transaction(ctx -> { - final UUID catalogId = getOrInsertActorCatalog(catalog, ctx, timestamp); - ctx.insertInto(ACTOR_CATALOG_FETCH_EVENT) - .set(ACTOR_CATALOG_FETCH_EVENT.ID, fetchEventID) - .set(ACTOR_CATALOG_FETCH_EVENT.ACTOR_ID, actorId) - .set(ACTOR_CATALOG_FETCH_EVENT.ACTOR_CATALOG_ID, catalogId) - .set(ACTOR_CATALOG_FETCH_EVENT.CONFIG_HASH, configurationHash) - .set(ACTOR_CATALOG_FETCH_EVENT.ACTOR_VERSION, connectorVersion) - .set(ACTOR_CATALOG_FETCH_EVENT.MODIFIED_AT, timestamp) - .set(ACTOR_CATALOG_FETCH_EVENT.CREATED_AT, timestamp).execute(); - return catalogId; - }); - } - - public int countConnectionsForWorkspace(final UUID workspaceId) throws IOException { - return database.query(ctx -> ctx.selectCount() - .from(CONNECTION) - .join(ACTOR).on(CONNECTION.SOURCE_ID.eq(ACTOR.ID)) - .where(ACTOR.WORKSPACE_ID.eq(workspaceId)) - .and(CONNECTION.STATUS.notEqual(StatusType.deprecated)) - .andNot(ACTOR.TOMBSTONE)).fetchOne().into(int.class); - } - - public int countSourcesForWorkspace(final UUID workspaceId) throws IOException { - return database.query(ctx -> ctx.selectCount() - .from(ACTOR) - .where(ACTOR.WORKSPACE_ID.equal(workspaceId)) - .and(ACTOR.ACTOR_TYPE.eq(ActorType.source)) - .andNot(ACTOR.TOMBSTONE)).fetchOne().into(int.class); - } - - public int countDestinationsForWorkspace(final UUID workspaceId) throws IOException { - return database.query(ctx -> ctx.selectCount() - .from(ACTOR) - .where(ACTOR.WORKSPACE_ID.equal(workspaceId)) - .and(ACTOR.ACTOR_TYPE.eq(ActorType.destination)) - .andNot(ACTOR.TOMBSTONE)).fetchOne().into(int.class); - } - - /** - * The following methods are present to allow the JobCreationAndStatusUpdateActivity class to emit - * metrics without exposing the underlying database connection. - */ - - private Condition includeTombstones(final Field tombstoneField, final boolean includeTombstones) { - if (includeTombstones) { - return DSL.trueCondition(); - } else { - return tombstoneField.eq(false); - } - } - - public WorkspaceServiceAccount getWorkspaceServiceAccountNoSecrets(final UUID workspaceId) throws IOException, ConfigNotFoundException { - // breaking the pattern of doing a list query, because we never want to list this resource without - // scoping by workspace id. - return database.query(ctx -> ctx.select(asterisk()).from(WORKSPACE_SERVICE_ACCOUNT) - .where(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID.eq(workspaceId)) - .fetch()) - .map(DbConverter::buildWorkspaceServiceAccount) - .stream() - .findFirst() - .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, workspaceId)); - } - - public void writeWorkspaceServiceAccountNoSecrets(final WorkspaceServiceAccount workspaceServiceAccount) throws IOException { - database.transaction(ctx -> { - writeWorkspaceServiceAccount(Collections.singletonList(workspaceServiceAccount), ctx); - return null; - }); - } - - private void writeWorkspaceServiceAccount(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((workspaceServiceAccount) -> { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(WORKSPACE_SERVICE_ACCOUNT) - .where(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID.eq(workspaceServiceAccount.getWorkspaceId()))); - - if (isExistingConfig) { - ctx.update(WORKSPACE_SERVICE_ACCOUNT) - .set(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID, workspaceServiceAccount.getWorkspaceId()) - .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_ID, workspaceServiceAccount.getServiceAccountId()) - .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_EMAIL, workspaceServiceAccount.getServiceAccountEmail()) - .set(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getJsonCredential()))) - .set(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getHmacKey()))) - .set(WORKSPACE_SERVICE_ACCOUNT.UPDATED_AT, timestamp) - .where(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID.eq(workspaceServiceAccount.getWorkspaceId())) - .execute(); - } else { - ctx.insertInto(WORKSPACE_SERVICE_ACCOUNT) - .set(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID, workspaceServiceAccount.getWorkspaceId()) - .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_ID, workspaceServiceAccount.getServiceAccountId()) - .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_EMAIL, workspaceServiceAccount.getServiceAccountEmail()) - .set(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getJsonCredential()))) - .set(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getHmacKey()))) - .set(WORKSPACE_SERVICE_ACCOUNT.CREATED_AT, timestamp) - .set(WORKSPACE_SERVICE_ACCOUNT.UPDATED_AT, timestamp) - .execute(); - } - }); - } - - public List getAllStreamsForConnection(final UUID connectionId) throws ConfigNotFoundException, IOException { - return standardSyncPersistence.getAllStreamsForConnection(connectionId); - } - - public ConfiguredAirbyteCatalog getConfiguredCatalogForConnection(final UUID connectionId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSync standardSync = getStandardSync(connectionId); - return standardSync.getCatalog(); - } - - public Geography getGeographyForConnection(final UUID connectionId) throws IOException { - return database.query(ctx -> ctx.select(CONNECTION.GEOGRAPHY) - .from(CONNECTION) - .where(CONNECTION.ID.eq(connectionId)) - .limit(1)) - .fetchOneInto(Geography.class); - } - - public Geography getGeographyForWorkspace(final UUID workspaceId) throws IOException { - return database.query(ctx -> ctx.select(WORKSPACE.GEOGRAPHY) - .from(WORKSPACE) - .where(WORKSPACE.ID.eq(workspaceId)) - .limit(1)) - .fetchOneInto(Geography.class); - } - - /** - * Specialized query for efficiently determining eligibility for the Free Connector Program. If a - * workspace has at least one Alpha or Beta connector, users of that workspace will be prompted to - * sign up for the program. This check is performed on nearly every page load so the query needs to - * be as efficient as possible. - * - * @param workspaceId ID of the workspace to check connectors for - * @return boolean indicating if an alpha or beta connector exists within the workspace - */ - public boolean getWorkspaceHasAlphaOrBetaConnector(final UUID workspaceId) throws IOException { - final Condition releaseStageAlphaOrBeta = ACTOR_DEFINITION.RELEASE_STAGE.eq(ReleaseStage.alpha) - .or(ACTOR_DEFINITION.RELEASE_STAGE.eq(ReleaseStage.beta)); - - final Integer countResult = database.query(ctx -> ctx.selectCount() - .from(ACTOR) - .join(ACTOR_DEFINITION).on(ACTOR_DEFINITION.ID.eq(ACTOR.ACTOR_DEFINITION_ID)) - .where(ACTOR.WORKSPACE_ID.eq(workspaceId)) - .and(ACTOR.TOMBSTONE.notEqual(true)) - .and(releaseStageAlphaOrBeta)) - .fetchOneInto(Integer.class); - - return countResult > 0; - } - - /** - * Specialized query for efficiently determining a connection's eligibility for the Free Connector - * Program. If a connection has at least one Alpha or Beta connector, it will be free to use as long - * as the workspace is enrolled in the Free Connector Program. This check is used to allow free - * connections to continue running even when a workspace runs out of credits. - * - * @param connectionId ID of the connection to check connectors for - * @return boolean indicating if an alpha or beta connector is used by the connection - */ - public boolean getConnectionHasAlphaOrBetaConnector(final UUID connectionId) throws IOException { - final Condition releaseStageAlphaOrBeta = ACTOR_DEFINITION.RELEASE_STAGE.eq(ReleaseStage.alpha) - .or(ACTOR_DEFINITION.RELEASE_STAGE.eq(ReleaseStage.beta)); - - final Integer countResult = database.query(ctx -> ctx.selectCount() - .from(CONNECTION) - .join(ACTOR).on(ACTOR.ID.eq(CONNECTION.SOURCE_ID).or(ACTOR.ID.eq(CONNECTION.DESTINATION_ID))) - .join(ACTOR_DEFINITION).on(ACTOR_DEFINITION.ID.eq(ACTOR.ACTOR_DEFINITION_ID)) - .where(CONNECTION.ID.eq(connectionId)) - .and(releaseStageAlphaOrBeta)) - .fetchOneInto(Integer.class); - - return countResult > 0; - } - - /** - * Deletes all records with given id. If it deletes anything, returns true. Otherwise, false. - * - * @param table - table from which to delete the record - * @param id - id of the record to delete - * @return true if anything was deleted, otherwise false. - * @throws IOException - you never know when you io - */ - @SuppressWarnings("SameParameterValue") - private boolean deleteById(final Table table, final UUID id) throws IOException { - return database.transaction(ctx -> ctx.deleteFrom(table)).where(DSL.field(DSL.name(PRIMARY_KEY)).eq(id)).execute() > 0; - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigWriter.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigWriter.java deleted file mode 100644 index 0464079b1268c..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigWriter.java +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; - -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.db.instance.configs.jooq.generated.Tables; -import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import io.airbyte.db.instance.configs.jooq.generated.enums.SourceType; -import java.time.LocalDate; -import java.time.OffsetDateTime; -import java.util.*; -import java.util.Map.Entry; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.Record4; -import org.jooq.impl.DSL; - -/** - * This class can be used to store DB queries for persisting configs that we may want to reuse - * across this package. - *

    - * Currently this class is used to move write queries out of {@link ConfigPersistence} so that they - * can be reused/composed in {@link ConfigRepository}. - */ -@SuppressWarnings("PMD.CognitiveComplexity") -public class ConfigWriter { - - /** - * @return A set of connectors (both source and destination) that are already used in standard - * syncs. We identify connectors by its repository name instead of definition id because - * connectors can be added manually by users, and their config ids are not always the same - * as those in the seed. - */ - static Set getConnectorRepositoriesInUse(final DSLContext ctx) { - return getActorDefinitionsInUse(ctx) - .map(r -> r.get(ACTOR_DEFINITION.DOCKER_REPOSITORY)) - .collect(Collectors.toSet()); - } - - /** - * Get a map of connector to protocol version for all the connectors that are used in a standard - * syncs. - */ - static Map> getActorDefinitionsInUseToProtocolVersion(final DSLContext ctx) { - return getActorDefinitionsInUse(ctx) - .collect(Collectors.toMap(r -> r.get(ACTOR_DEFINITION.ID), - r -> Map.entry( - r.get(ACTOR_DEFINITION.ACTOR_TYPE) == ActorType.source ? io.airbyte.config.ActorType.SOURCE : io.airbyte.config.ActorType.DESTINATION, - AirbyteProtocolVersion.getWithDefault(r.get(ACTOR_DEFINITION.PROTOCOL_VERSION))), - // We may have duplicated entries from the data. We can pick any values in the merge function - (lhs, rhs) -> lhs)); - } - - private static Stream> getActorDefinitionsInUse(final DSLContext ctx) { - return ctx.select(ACTOR_DEFINITION.ID, ACTOR_DEFINITION.DOCKER_REPOSITORY, ACTOR_DEFINITION.ACTOR_TYPE, ACTOR_DEFINITION.PROTOCOL_VERSION) - .from(ACTOR_DEFINITION) - .join(ACTOR).on(ACTOR.ACTOR_DEFINITION_ID.equal(ACTOR_DEFINITION.ID)) - .fetch() - .stream(); - } - - static void writeStandardSourceDefinition(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((standardSourceDefinition) -> { - final boolean isExistingConfig = ctx.fetchExists(DSL.select() - .from(Tables.ACTOR_DEFINITION) - .where(Tables.ACTOR_DEFINITION.ID.eq(standardSourceDefinition.getSourceDefinitionId()))); - - if (isExistingConfig) { - ctx.update(Tables.ACTOR_DEFINITION) - .set(Tables.ACTOR_DEFINITION.ID, standardSourceDefinition.getSourceDefinitionId()) - .set(Tables.ACTOR_DEFINITION.NAME, standardSourceDefinition.getName()) - .set(Tables.ACTOR_DEFINITION.DOCKER_REPOSITORY, standardSourceDefinition.getDockerRepository()) - .set(Tables.ACTOR_DEFINITION.DOCKER_IMAGE_TAG, standardSourceDefinition.getDockerImageTag()) - .set(Tables.ACTOR_DEFINITION.DOCUMENTATION_URL, standardSourceDefinition.getDocumentationUrl()) - .set(Tables.ACTOR_DEFINITION.ICON, standardSourceDefinition.getIcon()) - .set(Tables.ACTOR_DEFINITION.ACTOR_TYPE, ActorType.source) - .set(Tables.ACTOR_DEFINITION.SOURCE_TYPE, - standardSourceDefinition.getSourceType() == null ? null - : Enums.toEnum(standardSourceDefinition.getSourceType().value(), - SourceType.class).orElseThrow()) - .set(Tables.ACTOR_DEFINITION.SPEC, JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getSpec()))) - .set(Tables.ACTOR_DEFINITION.PROTOCOL_VERSION, standardSourceDefinition.getProtocolVersion()) - .set(Tables.ACTOR_DEFINITION.TOMBSTONE, standardSourceDefinition.getTombstone()) - .set(Tables.ACTOR_DEFINITION.PUBLIC, standardSourceDefinition.getPublic()) - .set(Tables.ACTOR_DEFINITION.CUSTOM, standardSourceDefinition.getCustom()) - .set(Tables.ACTOR_DEFINITION.RELEASE_STAGE, standardSourceDefinition.getReleaseStage() == null ? null - : Enums.toEnum(standardSourceDefinition.getReleaseStage().value(), - ReleaseStage.class).orElseThrow()) - .set(Tables.ACTOR_DEFINITION.RELEASE_DATE, standardSourceDefinition.getReleaseDate() == null ? null - : LocalDate.parse(standardSourceDefinition.getReleaseDate())) - .set(Tables.ACTOR_DEFINITION.RESOURCE_REQUIREMENTS, - standardSourceDefinition.getResourceRequirements() == null ? null - : JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getResourceRequirements()))) - .set(Tables.ACTOR_DEFINITION.ALLOWED_HOSTS, standardSourceDefinition.getAllowedHosts() == null ? null - : JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getAllowedHosts()))) - .set(ACTOR_DEFINITION.SUGGESTED_STREAMS, standardSourceDefinition.getSuggestedStreams() == null ? null - : JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getSuggestedStreams()))) - .set(Tables.ACTOR_DEFINITION.UPDATED_AT, timestamp) - .where(Tables.ACTOR_DEFINITION.ID.eq(standardSourceDefinition.getSourceDefinitionId())) - .execute(); - - } else { - ctx.insertInto(Tables.ACTOR_DEFINITION) - .set(Tables.ACTOR_DEFINITION.ID, standardSourceDefinition.getSourceDefinitionId()) - .set(Tables.ACTOR_DEFINITION.NAME, standardSourceDefinition.getName()) - .set(Tables.ACTOR_DEFINITION.DOCKER_REPOSITORY, standardSourceDefinition.getDockerRepository()) - .set(Tables.ACTOR_DEFINITION.DOCKER_IMAGE_TAG, standardSourceDefinition.getDockerImageTag()) - .set(Tables.ACTOR_DEFINITION.DOCUMENTATION_URL, standardSourceDefinition.getDocumentationUrl()) - .set(Tables.ACTOR_DEFINITION.ICON, standardSourceDefinition.getIcon()) - .set(Tables.ACTOR_DEFINITION.ACTOR_TYPE, ActorType.source) - .set(Tables.ACTOR_DEFINITION.SOURCE_TYPE, - standardSourceDefinition.getSourceType() == null ? null - : Enums.toEnum(standardSourceDefinition.getSourceType().value(), - SourceType.class).orElseThrow()) - .set(Tables.ACTOR_DEFINITION.SPEC, JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getSpec()))) - .set(Tables.ACTOR_DEFINITION.PROTOCOL_VERSION, standardSourceDefinition.getProtocolVersion()) - .set(Tables.ACTOR_DEFINITION.TOMBSTONE, standardSourceDefinition.getTombstone() != null && standardSourceDefinition.getTombstone()) - .set(Tables.ACTOR_DEFINITION.PUBLIC, standardSourceDefinition.getPublic()) - .set(Tables.ACTOR_DEFINITION.CUSTOM, standardSourceDefinition.getCustom()) - .set(Tables.ACTOR_DEFINITION.RELEASE_STAGE, - standardSourceDefinition.getReleaseStage() == null ? null - : Enums.toEnum(standardSourceDefinition.getReleaseStage().value(), - ReleaseStage.class).orElseThrow()) - .set(Tables.ACTOR_DEFINITION.RELEASE_DATE, standardSourceDefinition.getReleaseDate() == null ? null - : LocalDate.parse(standardSourceDefinition.getReleaseDate())) - .set(Tables.ACTOR_DEFINITION.RESOURCE_REQUIREMENTS, - standardSourceDefinition.getResourceRequirements() == null ? null - : JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getResourceRequirements()))) - .set(ACTOR_DEFINITION.ALLOWED_HOSTS, standardSourceDefinition.getAllowedHosts() == null ? null - : JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getAllowedHosts()))) - .set(ACTOR_DEFINITION.SUGGESTED_STREAMS, standardSourceDefinition.getSuggestedStreams() == null ? null - : JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getSuggestedStreams()))) - .set(Tables.ACTOR_DEFINITION.CREATED_AT, timestamp) - .set(Tables.ACTOR_DEFINITION.UPDATED_AT, timestamp) - .execute(); - } - }); - } - - static void writeStandardDestinationDefinition(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((standardDestinationDefinition) -> { - final boolean isExistingConfig = ctx.fetchExists(DSL.select() - .from(Tables.ACTOR_DEFINITION) - .where(Tables.ACTOR_DEFINITION.ID.eq(standardDestinationDefinition.getDestinationDefinitionId()))); - - if (isExistingConfig) { - ctx.update(Tables.ACTOR_DEFINITION) - .set(Tables.ACTOR_DEFINITION.ID, standardDestinationDefinition.getDestinationDefinitionId()) - .set(Tables.ACTOR_DEFINITION.NAME, standardDestinationDefinition.getName()) - .set(Tables.ACTOR_DEFINITION.DOCKER_REPOSITORY, standardDestinationDefinition.getDockerRepository()) - .set(Tables.ACTOR_DEFINITION.DOCKER_IMAGE_TAG, standardDestinationDefinition.getDockerImageTag()) - .set(Tables.ACTOR_DEFINITION.DOCUMENTATION_URL, standardDestinationDefinition.getDocumentationUrl()) - .set(Tables.ACTOR_DEFINITION.ICON, standardDestinationDefinition.getIcon()) - .set(Tables.ACTOR_DEFINITION.ACTOR_TYPE, ActorType.destination) - .set(Tables.ACTOR_DEFINITION.SPEC, JSONB.valueOf(Jsons.serialize(standardDestinationDefinition.getSpec()))) - .set(Tables.ACTOR_DEFINITION.PROTOCOL_VERSION, standardDestinationDefinition.getProtocolVersion()) - .set(Tables.ACTOR_DEFINITION.TOMBSTONE, standardDestinationDefinition.getTombstone()) - .set(Tables.ACTOR_DEFINITION.PUBLIC, standardDestinationDefinition.getPublic()) - .set(Tables.ACTOR_DEFINITION.CUSTOM, standardDestinationDefinition.getCustom()) - .set(Tables.ACTOR_DEFINITION.RELEASE_STAGE, standardDestinationDefinition.getReleaseStage() == null ? null - : Enums.toEnum(standardDestinationDefinition.getReleaseStage().value(), - ReleaseStage.class).orElseThrow()) - .set(Tables.ACTOR_DEFINITION.RELEASE_DATE, standardDestinationDefinition.getReleaseDate() == null ? null - : LocalDate.parse(standardDestinationDefinition.getReleaseDate())) - .set(Tables.ACTOR_DEFINITION.RESOURCE_REQUIREMENTS, - standardDestinationDefinition.getResourceRequirements() == null ? null - : JSONB.valueOf(Jsons.serialize(standardDestinationDefinition.getResourceRequirements()))) - .set(Tables.ACTOR_DEFINITION.NORMALIZATION_REPOSITORY, - Objects.nonNull(standardDestinationDefinition.getNormalizationConfig()) - ? standardDestinationDefinition.getNormalizationConfig().getNormalizationRepository() - : null) - .set(Tables.ACTOR_DEFINITION.NORMALIZATION_TAG, - Objects.nonNull(standardDestinationDefinition.getNormalizationConfig()) - ? standardDestinationDefinition.getNormalizationConfig().getNormalizationTag() - : null) - .set(Tables.ACTOR_DEFINITION.SUPPORTS_DBT, standardDestinationDefinition.getSupportsDbt()) - .set(Tables.ACTOR_DEFINITION.NORMALIZATION_INTEGRATION_TYPE, - Objects.nonNull(standardDestinationDefinition.getNormalizationConfig()) - ? standardDestinationDefinition.getNormalizationConfig().getNormalizationIntegrationType() - : null) - .set(ACTOR_DEFINITION.ALLOWED_HOSTS, standardDestinationDefinition.getAllowedHosts() == null ? null - : JSONB.valueOf(Jsons.serialize(standardDestinationDefinition.getAllowedHosts()))) - .set(Tables.ACTOR_DEFINITION.UPDATED_AT, timestamp) - .where(Tables.ACTOR_DEFINITION.ID.eq(standardDestinationDefinition.getDestinationDefinitionId())) - .execute(); - - } else { - ctx.insertInto(Tables.ACTOR_DEFINITION) - .set(Tables.ACTOR_DEFINITION.ID, standardDestinationDefinition.getDestinationDefinitionId()) - .set(Tables.ACTOR_DEFINITION.NAME, standardDestinationDefinition.getName()) - .set(Tables.ACTOR_DEFINITION.DOCKER_REPOSITORY, standardDestinationDefinition.getDockerRepository()) - .set(Tables.ACTOR_DEFINITION.DOCKER_IMAGE_TAG, standardDestinationDefinition.getDockerImageTag()) - .set(Tables.ACTOR_DEFINITION.DOCUMENTATION_URL, standardDestinationDefinition.getDocumentationUrl()) - .set(Tables.ACTOR_DEFINITION.ICON, standardDestinationDefinition.getIcon()) - .set(Tables.ACTOR_DEFINITION.ACTOR_TYPE, ActorType.destination) - .set(Tables.ACTOR_DEFINITION.SPEC, JSONB.valueOf(Jsons.serialize(standardDestinationDefinition.getSpec()))) - .set(Tables.ACTOR_DEFINITION.PROTOCOL_VERSION, standardDestinationDefinition.getProtocolVersion()) - .set(Tables.ACTOR_DEFINITION.TOMBSTONE, - standardDestinationDefinition.getTombstone() != null && standardDestinationDefinition.getTombstone()) - .set(Tables.ACTOR_DEFINITION.PUBLIC, standardDestinationDefinition.getPublic()) - .set(Tables.ACTOR_DEFINITION.CUSTOM, standardDestinationDefinition.getCustom()) - .set(Tables.ACTOR_DEFINITION.RELEASE_STAGE, - standardDestinationDefinition.getReleaseStage() == null ? null - : Enums.toEnum(standardDestinationDefinition.getReleaseStage().value(), - ReleaseStage.class).orElseThrow()) - .set(Tables.ACTOR_DEFINITION.RELEASE_DATE, standardDestinationDefinition.getReleaseDate() == null ? null - : LocalDate.parse(standardDestinationDefinition.getReleaseDate())) - .set(Tables.ACTOR_DEFINITION.RESOURCE_REQUIREMENTS, - standardDestinationDefinition.getResourceRequirements() == null ? null - : JSONB.valueOf(Jsons.serialize(standardDestinationDefinition.getResourceRequirements()))) - .set(Tables.ACTOR_DEFINITION.NORMALIZATION_REPOSITORY, - Objects.nonNull(standardDestinationDefinition.getNormalizationConfig()) - ? standardDestinationDefinition.getNormalizationConfig().getNormalizationRepository() - : null) - .set(Tables.ACTOR_DEFINITION.NORMALIZATION_TAG, - Objects.nonNull(standardDestinationDefinition.getNormalizationConfig()) - ? standardDestinationDefinition.getNormalizationConfig().getNormalizationTag() - : null) - .set(Tables.ACTOR_DEFINITION.SUPPORTS_DBT, standardDestinationDefinition.getSupportsDbt()) - .set(Tables.ACTOR_DEFINITION.NORMALIZATION_INTEGRATION_TYPE, - Objects.nonNull(standardDestinationDefinition.getNormalizationConfig()) - ? standardDestinationDefinition.getNormalizationConfig().getNormalizationIntegrationType() - : null) - .set(ACTOR_DEFINITION.ALLOWED_HOSTS, standardDestinationDefinition.getAllowedHosts() == null ? null - : JSONB.valueOf(Jsons.serialize(standardDestinationDefinition.getAllowedHosts()))) - .set(Tables.ACTOR_DEFINITION.CREATED_AT, timestamp) - .set(Tables.ACTOR_DEFINITION.UPDATED_AT, timestamp) - .execute(); - } - }); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/DbConverter.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/DbConverter.java deleted file mode 100644 index 4120dedf63bf9..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/DbConverter.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG_FETCH_EVENT; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_OAUTH_PARAMETER; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE_SERVICE_ACCOUNT; - -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.protocol.migrations.v1.CatalogMigrationV1Helper; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.ActorCatalogFetchEvent; -import io.airbyte.config.ActorCatalogWithUpdatedAt; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.AllowedHosts; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.FieldSelectionData; -import io.airbyte.config.Geography; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; -import io.airbyte.config.Notification; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.Schedule; -import io.airbyte.config.ScheduleData; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.SourceType; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.NonBreakingChangesPreference; -import io.airbyte.config.StandardSync.ScheduleType; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.SuggestedStreams; -import io.airbyte.config.WorkspaceServiceAccount; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.UUID; -import org.jooq.Record; - -/** - * Provides static methods for converting from repository layer results (often in the form of a jooq - * {@link Record}) to config models. - */ -public class DbConverter { - - public static StandardSync buildStandardSync(final Record record, final List connectionOperationId) { - return new StandardSync() - .withConnectionId(record.get(CONNECTION.ID)) - .withNamespaceDefinition( - Enums.toEnum(record.get(CONNECTION.NAMESPACE_DEFINITION, String.class), NamespaceDefinitionType.class) - .orElseThrow()) - .withNamespaceFormat(record.get(CONNECTION.NAMESPACE_FORMAT)) - .withPrefix(record.get(CONNECTION.PREFIX)) - .withSourceId(record.get(CONNECTION.SOURCE_ID)) - .withDestinationId(record.get(CONNECTION.DESTINATION_ID)) - .withName(record.get(CONNECTION.NAME)) - .withCatalog(parseConfiguredAirbyteCatalog(record.get(CONNECTION.CATALOG).data())) - .withFieldSelectionData(record.get(CONNECTION.FIELD_SELECTION_DATA) == null ? null - : Jsons.deserialize(record.get(CONNECTION.FIELD_SELECTION_DATA).data(), FieldSelectionData.class)) - .withStatus( - record.get(CONNECTION.STATUS) == null ? null - : Enums.toEnum(record.get(CONNECTION.STATUS, String.class), Status.class).orElseThrow()) - .withSchedule(Jsons.deserialize(record.get(CONNECTION.SCHEDULE).data(), Schedule.class)) - .withManual(record.get(CONNECTION.MANUAL)) - .withScheduleType(record.get(CONNECTION.SCHEDULE_TYPE) == null ? null - : Enums.toEnum(record.get(CONNECTION.SCHEDULE_TYPE, String.class), ScheduleType.class).orElseThrow()) - .withScheduleData( - record.get(CONNECTION.SCHEDULE_DATA) == null ? null - : Jsons.deserialize(record.get(CONNECTION.SCHEDULE_DATA).data(), ScheduleData.class)) - .withOperationIds(connectionOperationId) - .withResourceRequirements( - Jsons.deserialize(record.get(CONNECTION.RESOURCE_REQUIREMENTS).data(), ResourceRequirements.class)) - .withSourceCatalogId(record.get(CONNECTION.SOURCE_CATALOG_ID)) - .withBreakingChange(record.get(CONNECTION.BREAKING_CHANGE)) - .withGeography(Enums.toEnum(record.get(CONNECTION.GEOGRAPHY, String.class), Geography.class).orElseThrow()) - .withNonBreakingChangesPreference( - Enums.toEnum(record.get(CONNECTION.NON_BREAKING_CHANGE_PREFERENCE, String.class), NonBreakingChangesPreference.class).orElseThrow()) - .withNotifySchemaChanges(record.get(CONNECTION.NOTIFY_SCHEMA_CHANGES)); - } - - private static ConfiguredAirbyteCatalog parseConfiguredAirbyteCatalog(final String configuredAirbyteCatalogString) { - final ConfiguredAirbyteCatalog configuredAirbyteCatalog = Jsons.deserialize(configuredAirbyteCatalogString, ConfiguredAirbyteCatalog.class); - // On-the-fly migration of persisted data types related objects (protocol v0->v1) - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(configuredAirbyteCatalog); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(configuredAirbyteCatalog); - return configuredAirbyteCatalog; - } - - public static StandardWorkspace buildStandardWorkspace(final Record record) { - final List notificationList = new ArrayList<>(); - final List fetchedNotifications = Jsons.deserialize(record.get(WORKSPACE.NOTIFICATIONS).data(), List.class); - for (final Object notification : fetchedNotifications) { - notificationList.add(Jsons.convertValue(notification, Notification.class)); - } - return new StandardWorkspace() - .withWorkspaceId(record.get(WORKSPACE.ID)) - .withName(record.get(WORKSPACE.NAME)) - .withSlug(record.get(WORKSPACE.SLUG)) - .withInitialSetupComplete(record.get(WORKSPACE.INITIAL_SETUP_COMPLETE)) - .withCustomerId(record.get(WORKSPACE.CUSTOMER_ID)) - .withEmail(record.get(WORKSPACE.EMAIL)) - .withAnonymousDataCollection(record.get(WORKSPACE.ANONYMOUS_DATA_COLLECTION)) - .withNews(record.get(WORKSPACE.SEND_NEWSLETTER)) - .withSecurityUpdates(record.get(WORKSPACE.SEND_SECURITY_UPDATES)) - .withDisplaySetupWizard(record.get(WORKSPACE.DISPLAY_SETUP_WIZARD)) - .withTombstone(record.get(WORKSPACE.TOMBSTONE)) - .withNotifications(notificationList) - .withFirstCompletedSync(record.get(WORKSPACE.FIRST_SYNC_COMPLETE)) - .withFeedbackDone(record.get(WORKSPACE.FEEDBACK_COMPLETE)) - .withDefaultGeography( - Enums.toEnum(record.get(WORKSPACE.GEOGRAPHY, String.class), Geography.class).orElseThrow()) - .withWebhookOperationConfigs(record.get(WORKSPACE.WEBHOOK_OPERATION_CONFIGS) == null ? null - : Jsons.deserialize(record.get(WORKSPACE.WEBHOOK_OPERATION_CONFIGS).data())); - } - - public static SourceConnection buildSourceConnection(final Record record) { - return new SourceConnection() - .withSourceId(record.get(ACTOR.ID)) - .withConfiguration(Jsons.deserialize(record.get(ACTOR.CONFIGURATION).data())) - .withWorkspaceId(record.get(ACTOR.WORKSPACE_ID)) - .withSourceDefinitionId(record.get(ACTOR.ACTOR_DEFINITION_ID)) - .withTombstone(record.get(ACTOR.TOMBSTONE)) - .withName(record.get(ACTOR.NAME)); - } - - public static DestinationConnection buildDestinationConnection(final Record record) { - return new DestinationConnection() - .withDestinationId(record.get(ACTOR.ID)) - .withConfiguration(Jsons.deserialize(record.get(ACTOR.CONFIGURATION).data())) - .withWorkspaceId(record.get(ACTOR.WORKSPACE_ID)) - .withDestinationDefinitionId(record.get(ACTOR.ACTOR_DEFINITION_ID)) - .withTombstone(record.get(ACTOR.TOMBSTONE)) - .withName(record.get(ACTOR.NAME)); - } - - public static StandardSourceDefinition buildStandardSourceDefinition(final Record record) { - return new StandardSourceDefinition() - .withSourceDefinitionId(record.get(ACTOR_DEFINITION.ID)) - .withDockerImageTag(record.get(ACTOR_DEFINITION.DOCKER_IMAGE_TAG)) - .withIcon(record.get(ACTOR_DEFINITION.ICON)) - .withDockerRepository(record.get(ACTOR_DEFINITION.DOCKER_REPOSITORY)) - .withDocumentationUrl(record.get(ACTOR_DEFINITION.DOCUMENTATION_URL)) - .withName(record.get(ACTOR_DEFINITION.NAME)) - .withSourceType(record.get(ACTOR_DEFINITION.SOURCE_TYPE) == null ? null - : Enums.toEnum(record.get(ACTOR_DEFINITION.SOURCE_TYPE, String.class), SourceType.class).orElseThrow()) - .withSpec(Jsons.deserialize(record.get(ACTOR_DEFINITION.SPEC).data(), ConnectorSpecification.class)) - .withProtocolVersion(record.get(ACTOR_DEFINITION.PROTOCOL_VERSION, String.class)) - .withTombstone(record.get(ACTOR_DEFINITION.TOMBSTONE)) - .withPublic(record.get(ACTOR_DEFINITION.PUBLIC)) - .withCustom(record.get(ACTOR_DEFINITION.CUSTOM)) - .withReleaseStage(record.get(ACTOR_DEFINITION.RELEASE_STAGE) == null ? null - : Enums.toEnum(record.get(ACTOR_DEFINITION.RELEASE_STAGE, String.class), StandardSourceDefinition.ReleaseStage.class).orElseThrow()) - .withReleaseDate(record.get(ACTOR_DEFINITION.RELEASE_DATE) == null ? null - : record.get(ACTOR_DEFINITION.RELEASE_DATE).toString()) - .withResourceRequirements(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS) == null - ? null - : Jsons.deserialize(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS).data(), ActorDefinitionResourceRequirements.class)) - .withAllowedHosts(record.get(ACTOR_DEFINITION.ALLOWED_HOSTS) == null - ? null - : Jsons.deserialize(record.get(ACTOR_DEFINITION.ALLOWED_HOSTS).data(), AllowedHosts.class)) - .withSuggestedStreams(record.get(ACTOR_DEFINITION.SUGGESTED_STREAMS) == null - ? null - : Jsons.deserialize(record.get(ACTOR_DEFINITION.SUGGESTED_STREAMS).data(), SuggestedStreams.class)); - } - - public static StandardDestinationDefinition buildStandardDestinationDefinition(final Record record) { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(record.get(ACTOR_DEFINITION.ID)) - .withDockerImageTag(record.get(ACTOR_DEFINITION.DOCKER_IMAGE_TAG)) - .withIcon(record.get(ACTOR_DEFINITION.ICON)) - .withDockerRepository(record.get(ACTOR_DEFINITION.DOCKER_REPOSITORY)) - .withDocumentationUrl(record.get(ACTOR_DEFINITION.DOCUMENTATION_URL)) - .withName(record.get(ACTOR_DEFINITION.NAME)) - .withSpec(Jsons.deserialize(record.get(ACTOR_DEFINITION.SPEC).data(), ConnectorSpecification.class)) - .withProtocolVersion(record.get(ACTOR_DEFINITION.PROTOCOL_VERSION, String.class)) - .withTombstone(record.get(ACTOR_DEFINITION.TOMBSTONE)) - .withPublic(record.get(ACTOR_DEFINITION.PUBLIC)) - .withCustom(record.get(ACTOR_DEFINITION.CUSTOM)) - .withReleaseStage(record.get(ACTOR_DEFINITION.RELEASE_STAGE) == null ? null - : Enums.toEnum(record.get(ACTOR_DEFINITION.RELEASE_STAGE, String.class), StandardDestinationDefinition.ReleaseStage.class).orElseThrow()) - .withReleaseDate(record.get(ACTOR_DEFINITION.RELEASE_DATE) == null ? null - : record.get(ACTOR_DEFINITION.RELEASE_DATE).toString()) - .withSupportsDbt(record.get(ACTOR_DEFINITION.SUPPORTS_DBT) == null ? null - : record.get(ACTOR_DEFINITION.SUPPORTS_DBT)) - .withNormalizationConfig( - Objects.nonNull(record.get(ACTOR_DEFINITION.NORMALIZATION_REPOSITORY)) && Objects.nonNull(record.get(ACTOR_DEFINITION.NORMALIZATION_TAG)) - && - Objects.nonNull(record.get(ACTOR_DEFINITION.NORMALIZATION_INTEGRATION_TYPE)) - ? new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository(record.get(ACTOR_DEFINITION.NORMALIZATION_REPOSITORY)) - .withNormalizationTag(record.get(ACTOR_DEFINITION.NORMALIZATION_TAG)) - .withNormalizationIntegrationType(record.get(ACTOR_DEFINITION.NORMALIZATION_INTEGRATION_TYPE)) - : null) - .withResourceRequirements(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS) == null - ? null - : Jsons.deserialize(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS).data(), ActorDefinitionResourceRequirements.class)) - .withAllowedHosts(record.get(ACTOR_DEFINITION.ALLOWED_HOSTS) == null - ? null - : Jsons.deserialize(record.get(ACTOR_DEFINITION.ALLOWED_HOSTS).data(), AllowedHosts.class)); - } - - public static DestinationOAuthParameter buildDestinationOAuthParameter(final Record record) { - return new DestinationOAuthParameter() - .withOauthParameterId(record.get(ACTOR_OAUTH_PARAMETER.ID)) - .withConfiguration(Jsons.deserialize(record.get(ACTOR_OAUTH_PARAMETER.CONFIGURATION).data())) - .withWorkspaceId(record.get(ACTOR_OAUTH_PARAMETER.WORKSPACE_ID)) - .withDestinationDefinitionId(record.get(ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID)); - } - - public static SourceOAuthParameter buildSourceOAuthParameter(final Record record) { - return new SourceOAuthParameter() - .withOauthParameterId(record.get(ACTOR_OAUTH_PARAMETER.ID)) - .withConfiguration(Jsons.deserialize(record.get(ACTOR_OAUTH_PARAMETER.CONFIGURATION).data())) - .withWorkspaceId(record.get(ACTOR_OAUTH_PARAMETER.WORKSPACE_ID)) - .withSourceDefinitionId(record.get(ACTOR_OAUTH_PARAMETER.ACTOR_DEFINITION_ID)); - } - - public static ActorCatalog buildActorCatalog(final Record record) { - return new ActorCatalog() - .withId(record.get(ACTOR_CATALOG.ID)) - .withCatalog(Jsons.jsonNode(parseAirbyteCatalog(record.get(ACTOR_CATALOG.CATALOG).toString()))) - .withCatalogHash(record.get(ACTOR_CATALOG.CATALOG_HASH)); - } - - public static ActorCatalogWithUpdatedAt buildActorCatalogWithUpdatedAt(final Record record) { - return new ActorCatalogWithUpdatedAt() - .withId(record.get(ACTOR_CATALOG.ID)) - .withCatalog(Jsons.jsonNode(parseAirbyteCatalog(record.get(ACTOR_CATALOG.CATALOG).toString()))) - .withCatalogHash(record.get(ACTOR_CATALOG.CATALOG_HASH)) - .withUpdatedAt(record.get(ACTOR_CATALOG_FETCH_EVENT.CREATED_AT, LocalDateTime.class).toEpochSecond(ZoneOffset.UTC)); - } - - public static AirbyteCatalog parseAirbyteCatalog(final String airbyteCatalogString) { - final AirbyteCatalog airbyteCatalog = Jsons.deserialize(airbyteCatalogString, AirbyteCatalog.class); - // On-the-fly migration of persisted data types related objects (protocol v0->v1) - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(airbyteCatalog); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(airbyteCatalog); - return airbyteCatalog; - } - - public static ActorCatalogFetchEvent buildActorCatalogFetchEvent(final Record record) { - return new ActorCatalogFetchEvent() - .withActorId(record.get(ACTOR_CATALOG_FETCH_EVENT.ACTOR_ID)) - .withActorCatalogId(record.get(ACTOR_CATALOG_FETCH_EVENT.ACTOR_CATALOG_ID)) - .withCreatedAt(record.get(ACTOR_CATALOG_FETCH_EVENT.CREATED_AT, LocalDateTime.class).toEpochSecond(ZoneOffset.UTC)); - } - - public static WorkspaceServiceAccount buildWorkspaceServiceAccount(final Record record) { - return new WorkspaceServiceAccount() - .withWorkspaceId(record.get(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID)) - .withServiceAccountId(record.get(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_ID)) - .withServiceAccountEmail(record.get(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_EMAIL)) - .withJsonCredential(record.get(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL) == null ? null - : Jsons.deserialize(record.get(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL).data())) - .withHmacKey(record.get(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY) == null ? null - : Jsons.deserialize(record.get(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY).data())); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PersistenceHelpers.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PersistenceHelpers.java deleted file mode 100644 index 0fea850d14f29..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PersistenceHelpers.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.jooq.impl.DSL.select; - -import java.util.UUID; -import org.jooq.Condition; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.Record; -import org.jooq.TableField; -import org.jooq.impl.TableImpl; - -public class PersistenceHelpers { - - /** - * Helper function to handle null or equal case for the optional strings - * - * We need to have an explicit check for null values because NULL != "str" is NULL, not a boolean. - * - * @param field the targeted field - * @param value the value to check - * @return The Condition that performs the desired check - */ - public static Condition isNullOrEquals(final Field field, final String value) { - return value != null ? field.eq(value) : field.isNull(); - } - - /** - * Helper to delete records from the database - * - * @param table the table to delete from - * @param keyColumn the column to use as a key - * @param configId the id of the object to delete, must be from the keyColumn - * @param ctx the db context to use - */ - public static void deleteConfig(final TableImpl table, - final TableField keyColumn, - final UUID configId, - final DSLContext ctx) { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(table) - .where(keyColumn.eq(configId))); - - if (isExistingConfig) { - ctx.deleteFrom(table) - .where(keyColumn.eq(configId)) - .execute(); - } - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java deleted file mode 100644 index be11b794109e7..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.WorkspaceServiceAccount; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * This class is responsible for fetching both connectors and their secrets (from separate secrets - * stores). All methods in this class return secrets! Use it carefully. - */ -public class SecretsRepositoryReader { - - private final ConfigRepository configRepository; - private final SecretsHydrator secretsHydrator; - - public SecretsRepositoryReader(final ConfigRepository configRepository, - final SecretsHydrator secretsHydrator) { - this.configRepository = configRepository; - this.secretsHydrator = secretsHydrator; - } - - public SourceConnection getSourceConnectionWithSecrets(final UUID sourceId) throws JsonValidationException, IOException, ConfigNotFoundException { - final var source = configRepository.getSourceConnection(sourceId); - return hydrateSourcePartialConfig(source); - } - - public List listSourceConnectionWithSecrets() throws JsonValidationException, IOException { - final var sources = configRepository.listSourceConnection(); - - return sources - .stream() - .map(partialSource -> Exceptions.toRuntime(() -> hydrateSourcePartialConfig(partialSource))) - .collect(Collectors.toList()); - } - - public DestinationConnection getDestinationConnectionWithSecrets(final UUID destinationId) - throws JsonValidationException, IOException, ConfigNotFoundException { - final var destination = configRepository.getDestinationConnection(destinationId); - return hydrateDestinationPartialConfig(destination); - } - - public List listDestinationConnectionWithSecrets() throws JsonValidationException, IOException { - final var destinations = configRepository.listDestinationConnection(); - - return destinations - .stream() - .map(partialDestination -> Exceptions.toRuntime(() -> hydrateDestinationPartialConfig(partialDestination))) - .collect(Collectors.toList()); - } - - private SourceConnection hydrateSourcePartialConfig(final SourceConnection sourceWithPartialConfig) { - final JsonNode hydratedConfig = secretsHydrator.hydrate(sourceWithPartialConfig.getConfiguration()); - return Jsons.clone(sourceWithPartialConfig).withConfiguration(hydratedConfig); - } - - private DestinationConnection hydrateDestinationPartialConfig(final DestinationConnection sourceWithPartialConfig) { - final JsonNode hydratedConfig = secretsHydrator.hydrate(sourceWithPartialConfig.getConfiguration()); - return Jsons.clone(sourceWithPartialConfig).withConfiguration(hydratedConfig); - } - - @SuppressWarnings("unused") - private void hydrateValuesIfKeyPresent(final String key, final Map> dump) { - if (dump.containsKey(key)) { - final Stream augmentedValue = dump.get(key).map(secretsHydrator::hydrate); - dump.put(key, augmentedValue); - } - } - - public WorkspaceServiceAccount getWorkspaceServiceAccountWithSecrets(final UUID workspaceId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final WorkspaceServiceAccount workspaceServiceAccount = configRepository.getWorkspaceServiceAccountNoSecrets(workspaceId); - - final JsonNode jsonCredential = - workspaceServiceAccount.getJsonCredential() != null ? secretsHydrator.hydrateSecretCoordinate(workspaceServiceAccount.getJsonCredential()) - : null; - - final JsonNode hmacKey = - workspaceServiceAccount.getHmacKey() != null ? secretsHydrator.hydrateSecretCoordinate(workspaceServiceAccount.getHmacKey()) : null; - - return Jsons.clone(workspaceServiceAccount).withJsonCredential(jsonCredential).withHmacKey(hmacKey); - } - - public StandardWorkspace getWorkspaceWithSecrets(final UUID workspaceId, final boolean includeTombstone) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, includeTombstone); - final JsonNode webhookConfigs = secretsHydrator.hydrate(workspace.getWebhookOperationConfigs()); - workspace.withWebhookOperationConfigs(webhookConfigs); - return workspace; - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryWriter.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryWriter.java deleted file mode 100644 index 929e202b62f72..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryWriter.java +++ /dev/null @@ -1,283 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.WorkspaceServiceAccount; -import io.airbyte.config.persistence.split_secrets.SecretCoordinateToPayload; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpers; -import io.airbyte.config.persistence.split_secrets.SplitSecretConfig; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Optional; -import java.util.UUID; - -/** - * This class takes secrets as arguments but never returns a secrets as return values (even the ones - * that are passed in as arguments). It is responsible for writing connector secrets to the correct - * secrets store and then making sure the remainder of the configuration is written to the Config - * Database. - */ -@SuppressWarnings({"OptionalUsedAsFieldOrParameterType", "PMD.AvoidThrowingRawExceptionTypes"}) -public class SecretsRepositoryWriter { - - private static final UUID NO_WORKSPACE = UUID.fromString("00000000-0000-0000-0000-000000000000"); - - private final ConfigRepository configRepository; - private final JsonSchemaValidator validator; - private final Optional longLivedSecretPersistence; - private final Optional ephemeralSecretPersistence; - - public SecretsRepositoryWriter(final ConfigRepository configRepository, - final Optional longLivedSecretPersistence, - final Optional ephemeralSecretPersistence) { - this(configRepository, new JsonSchemaValidator(), longLivedSecretPersistence, ephemeralSecretPersistence); - } - - @VisibleForTesting - SecretsRepositoryWriter(final ConfigRepository configRepository, - final JsonSchemaValidator validator, - final Optional longLivedSecretPersistence, - final Optional ephemeralSecretPersistence) { - this.configRepository = configRepository; - this.validator = validator; - this.longLivedSecretPersistence = longLivedSecretPersistence; - this.ephemeralSecretPersistence = ephemeralSecretPersistence; - } - - private Optional getSourceIfExists(final UUID sourceId) throws JsonValidationException, IOException { - try { - return Optional.of(configRepository.getSourceConnection(sourceId)); - } catch (final ConfigNotFoundException e) { - return Optional.empty(); - } - } - - // validates too! - public void writeSourceConnection(final SourceConnection source, final ConnectorSpecification connectorSpecification) - throws JsonValidationException, IOException { - final var previousSourceConnection = getSourceIfExists(source.getSourceId()) - .map(SourceConnection::getConfiguration); - - // strip secrets - final JsonNode partialConfig = statefulUpdateSecrets( - source.getWorkspaceId(), - previousSourceConnection, - source.getConfiguration(), - connectorSpecification.getConnectionSpecification(), - source.getTombstone() == null || !source.getTombstone()); - final SourceConnection partialSource = Jsons.clone(source).withConfiguration(partialConfig); - - configRepository.writeSourceConnectionNoSecrets(partialSource); - } - - private Optional getDestinationIfExists(final UUID destinationId) throws JsonValidationException, IOException { - try { - return Optional.of(configRepository.getDestinationConnection(destinationId)); - } catch (final ConfigNotFoundException e) { - return Optional.empty(); - } - } - - public void writeDestinationConnection(final DestinationConnection destination, final ConnectorSpecification connectorSpecification) - throws JsonValidationException, IOException { - final var previousDestinationConnection = getDestinationIfExists(destination.getDestinationId()) - .map(DestinationConnection::getConfiguration); - - final JsonNode partialConfig = statefulUpdateSecrets( - destination.getWorkspaceId(), - previousDestinationConnection, - destination.getConfiguration(), - connectorSpecification.getConnectionSpecification(), - destination.getTombstone() == null || !destination.getTombstone()); - final DestinationConnection partialDestination = Jsons.clone(destination).withConfiguration(partialConfig); - - configRepository.writeDestinationConnectionNoSecrets(partialDestination); - } - - /** - * Detects secrets in the configuration. Writes them to the secrets store. It returns the config - * stripped of secrets (replaced with pointers to the secrets store). - * - * @param workspaceId workspace id for the config - * @param fullConfig full config - * @param spec connector specification - * @return partial config - */ - @SuppressWarnings("unused") - private JsonNode statefulSplitSecrets(final UUID workspaceId, final JsonNode fullConfig, final ConnectorSpecification spec) { - return splitSecretConfig(workspaceId, fullConfig, spec, longLivedSecretPersistence); - } - - // todo (cgardens) - the contract on this method is hard to follow, because it sometimes returns - // secrets (i.e. when there is no longLivedSecretPersistence). If we treated all secrets the same - // (i.e. used a separate db for secrets when the user didn't provide a store), this would be easier - // to reason about. - /** - * If a secrets store is present, this method attempts to fetch the existing config and merge its - * secrets with the passed in config. If there is no secrets store, it just returns the passed in - * config. Also validates the config. - * - * @param workspaceId workspace id for the config - * @param oldConfig old full config - * @param fullConfig new full config - * @param spec connector specification - * @param validate should the spec be validated, tombstone entries should not be validated - * @return partial config - */ - private JsonNode statefulUpdateSecrets(final UUID workspaceId, - final Optional oldConfig, - final JsonNode fullConfig, - final JsonNode spec, - final boolean validate) - throws JsonValidationException { - if (validate) { - validator.ensure(spec, fullConfig); - } - - if (longLivedSecretPersistence.isEmpty()) { - return fullConfig; - } - - final SplitSecretConfig splitSecretConfig; - if (oldConfig.isPresent()) { - splitSecretConfig = SecretsHelpers.splitAndUpdateConfig( - workspaceId, - oldConfig.get(), - fullConfig, - spec, - longLivedSecretPersistence.get()); - } else { - splitSecretConfig = SecretsHelpers.splitConfig( - workspaceId, - fullConfig, - spec); - } - splitSecretConfig.getCoordinateToPayload().forEach(longLivedSecretPersistence.get()::write); - return splitSecretConfig.getPartialConfig(); - } - - /** - * @param fullConfig full config - * @param spec connector specification - * @return partial config - */ - public JsonNode statefulSplitEphemeralSecrets(final JsonNode fullConfig, final ConnectorSpecification spec) { - return splitSecretConfig(NO_WORKSPACE, fullConfig, spec, ephemeralSecretPersistence); - } - - private JsonNode splitSecretConfig(final UUID workspaceId, - final JsonNode fullConfig, - final ConnectorSpecification spec, - final Optional secretPersistence) { - if (secretPersistence.isPresent()) { - final SplitSecretConfig splitSecretConfig = SecretsHelpers.splitConfig(workspaceId, fullConfig, spec.getConnectionSpecification()); - splitSecretConfig.getCoordinateToPayload().forEach(secretPersistence.get()::write); - return splitSecretConfig.getPartialConfig(); - } else { - return fullConfig; - } - } - - public void writeServiceAccountJsonCredentials(final WorkspaceServiceAccount workspaceServiceAccount) - throws JsonValidationException, IOException { - final WorkspaceServiceAccount workspaceServiceAccountForDB = getWorkspaceServiceAccountWithSecretCoordinate(workspaceServiceAccount); - configRepository.writeWorkspaceServiceAccountNoSecrets(workspaceServiceAccountForDB); - } - - /** - * This method is to encrypt the secret JSON key and HMAC key of a GCP service account a associated - * with a workspace. If in future we build a similar feature i.e. an AWS account associated with a - * workspace, we will have to build new implementation for it - */ - private WorkspaceServiceAccount getWorkspaceServiceAccountWithSecretCoordinate(final WorkspaceServiceAccount workspaceServiceAccount) - throws JsonValidationException, IOException { - if (longLivedSecretPersistence.isPresent()) { - final WorkspaceServiceAccount clonedWorkspaceServiceAccount = Jsons.clone(workspaceServiceAccount); - final Optional optionalWorkspaceServiceAccount = getOptionalWorkspaceServiceAccount( - workspaceServiceAccount.getWorkspaceId()); - // Convert the JSON key of Service Account into secret co-oridnate. Ref : - // https://cloud.google.com/iam/docs/service-accounts#key-types - if (workspaceServiceAccount.getJsonCredential() != null) { - final SecretCoordinateToPayload jsonCredSecretCoordinateToPayload = - SecretsHelpers.convertServiceAccountCredsToSecret(workspaceServiceAccount.getJsonCredential().toPrettyString(), - longLivedSecretPersistence.get(), - workspaceServiceAccount.getWorkspaceId(), - UUID::randomUUID, - optionalWorkspaceServiceAccount.map(WorkspaceServiceAccount::getJsonCredential).orElse(null), - "json"); - longLivedSecretPersistence.get().write(jsonCredSecretCoordinateToPayload.secretCoordinate(), jsonCredSecretCoordinateToPayload.payload()); - clonedWorkspaceServiceAccount.setJsonCredential(jsonCredSecretCoordinateToPayload.secretCoordinateForDB()); - } - // Convert the HMAC key of Service Account into secret co-oridnate. Ref : - // https://cloud.google.com/storage/docs/authentication/hmackeys - if (workspaceServiceAccount.getHmacKey() != null) { - final SecretCoordinateToPayload hmackKeySecretCoordinateToPayload = - SecretsHelpers.convertServiceAccountCredsToSecret(workspaceServiceAccount.getHmacKey().toString(), - longLivedSecretPersistence.get(), - workspaceServiceAccount.getWorkspaceId(), - UUID::randomUUID, - optionalWorkspaceServiceAccount.map(WorkspaceServiceAccount::getHmacKey).orElse(null), - "hmac"); - longLivedSecretPersistence.get().write(hmackKeySecretCoordinateToPayload.secretCoordinate(), hmackKeySecretCoordinateToPayload.payload()); - clonedWorkspaceServiceAccount.setHmacKey(hmackKeySecretCoordinateToPayload.secretCoordinateForDB()); - } - return clonedWorkspaceServiceAccount; - } - return workspaceServiceAccount; - } - - public Optional getOptionalWorkspaceServiceAccount(final UUID workspaceId) - throws JsonValidationException, IOException { - try { - return Optional.of(configRepository.getWorkspaceServiceAccountNoSecrets(workspaceId)); - } catch (final ConfigNotFoundException e) { - return Optional.empty(); - } - } - - public void writeWorkspace(final StandardWorkspace workspace) - throws JsonValidationException, IOException { - // Get the schema for the webhook config so we can split out any secret fields. - final JsonNode webhookConfigSchema = Jsons.jsonNodeFromFile(ConfigSchema.WORKSPACE_WEBHOOK_OPERATION_CONFIGS.getConfigSchemaFile()); - // Check if there's an existing config, so we can re-use the secret coordinates. - final var previousWorkspace = getWorkspaceIfExists(workspace.getWorkspaceId(), false); - Optional previousWebhookConfigs = Optional.empty(); - if (previousWorkspace.isPresent() && previousWorkspace.get().getWebhookOperationConfigs() != null) { - previousWebhookConfigs = Optional.of(previousWorkspace.get().getWebhookOperationConfigs()); - } - // Split out the secrets from the webhook config. - final JsonNode partialConfig = workspace.getWebhookOperationConfigs() == null ? null - : statefulUpdateSecrets( - workspace.getWorkspaceId(), - previousWebhookConfigs, - workspace.getWebhookOperationConfigs(), - webhookConfigSchema, true); - final StandardWorkspace partialWorkspace = Jsons.clone(workspace); - if (partialConfig != null) { - partialWorkspace.withWebhookOperationConfigs(partialConfig); - } - configRepository.writeStandardWorkspaceNoSecrets(partialWorkspace); - } - - private Optional getWorkspaceIfExists(final UUID workspaceId, final boolean includeTombstone) { - try { - final StandardWorkspace existingWorkspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, includeTombstone); - return existingWorkspace == null ? Optional.empty() : Optional.of(existingWorkspace); - } catch (final JsonValidationException | IOException | ConfigNotFoundException e) { - return Optional.empty(); - } - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StandardSyncPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StandardSyncPersistence.java deleted file mode 100644 index 866c6ef8ae1d4..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StandardSyncPersistence.java +++ /dev/null @@ -1,311 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION_OPERATION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.STATE; -import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.select; - -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorType; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.NonBreakingChangesPreference; -import io.airbyte.config.helpers.ScheduleHelpers; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.instance.configs.jooq.generated.tables.Actor; -import io.airbyte.db.instance.configs.jooq.generated.tables.ActorDefinition; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.StreamDescriptor; -import java.io.IOException; -import java.time.OffsetDateTime; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Stream; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.SelectJoinStep; - -public class StandardSyncPersistence { - - private record StandardSyncIdsWithProtocolVersions( - UUID standardSyncId, - UUID sourceDefId, - Version sourceProtocolVersion, - UUID destinationDefId, - Version destinationProtocolVersion) {} - - private final ExceptionWrappingDatabase database; - - public StandardSyncPersistence(final Database database) { - this.database = new ExceptionWrappingDatabase(database); - } - - public StandardSync getStandardSync(final UUID connectionId) throws IOException, ConfigNotFoundException { - return getStandardSyncWithMetadata(connectionId).getConfig(); - } - - public ConfigWithMetadata getStandardSyncWithMetadata(final UUID connectionId) throws IOException, ConfigNotFoundException { - final List> result = listStandardSyncWithMetadata(Optional.of(connectionId)); - - final boolean foundMoreThanOneConfig = result.size() > 1; - if (result.isEmpty()) { - throw new ConfigNotFoundException(ConfigSchema.STANDARD_SYNC, connectionId.toString()); - } else if (foundMoreThanOneConfig) { - throw new IllegalStateException(String.format("Multiple %s configs found for ID %s: %s", ConfigSchema.STANDARD_SYNC, connectionId, result)); - } - return result.get(0); - } - - public List listStandardSync() throws IOException { - return listStandardSyncWithMetadata(Optional.empty()).stream().map(ConfigWithMetadata::getConfig).toList(); - } - - public void writeStandardSync(final StandardSync standardSync) throws IOException { - database.transaction(ctx -> { - writeStandardSync(standardSync, ctx); - return null; - }); - } - - /** - * Deletes a connection (sync) and all of dependent resources (state and connection_operations). - * - * @param standardSyncId - id of the sync (a.k.a. connection_id) - * @throws IOException - error while accessing db. - */ - public void deleteStandardSync(final UUID standardSyncId) throws IOException { - database.transaction(ctx -> { - PersistenceHelpers.deleteConfig(CONNECTION_OPERATION, CONNECTION_OPERATION.CONNECTION_ID, standardSyncId, ctx); - PersistenceHelpers.deleteConfig(STATE, STATE.CONNECTION_ID, standardSyncId, ctx); - PersistenceHelpers.deleteConfig(CONNECTION, CONNECTION.ID, standardSyncId, ctx); - return null; - }); - } - - /** - * For the StandardSyncs related to actorDefinitionId, clear the unsupported protocol version flag - * if both connectors are now within support range. - * - * @param actorDefinitionId the actorDefinitionId to query - * @param actorType the ActorType of actorDefinitionId - * @param supportedRange the supported range of protocol versions - */ - public void clearUnsupportedProtocolVersionFlag(final UUID actorDefinitionId, - final ActorType actorType, - final AirbyteProtocolVersionRange supportedRange) - throws IOException { - final Stream candidateSyncs = database.query(ctx -> findDisabledSyncs(ctx, actorDefinitionId, actorType)); - final List standardSyncsToReEnable = candidateSyncs - .filter(sync -> supportedRange.isSupported(sync.sourceProtocolVersion()) && supportedRange.isSupported(sync.destinationProtocolVersion())) - .map(StandardSyncIdsWithProtocolVersions::standardSyncId) - .toList(); - database.query(ctx -> { - clearProtocolVersionFlag(ctx, standardSyncsToReEnable); - return null; - }); - } - - public List getAllStreamsForConnection(final UUID connectionId) throws ConfigNotFoundException, IOException { - final StandardSync standardSync = getStandardSync(connectionId); - return CatalogHelpers.extractStreamDescriptors(standardSync.getCatalog()); - } - - private List> listStandardSyncWithMetadata(final Optional configId) throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk()).from(CONNECTION); - if (configId.isPresent()) { - return query.where(CONNECTION.ID.eq(configId.get())).fetch(); - } - return query.fetch(); - }); - - final List> standardSyncs = new ArrayList<>(); - for (final Record record : result) { - final StandardSync standardSync = DbConverter.buildStandardSync(record, connectionOperationIds(record.get(CONNECTION.ID))); - if (ScheduleHelpers.isScheduleTypeMismatch(standardSync)) { - throw new RuntimeException("unexpected schedule type mismatch"); - } - standardSyncs.add(new ConfigWithMetadata<>( - record.get(CONNECTION.ID).toString(), - ConfigSchema.STANDARD_SYNC.name(), - record.get(CONNECTION.CREATED_AT).toInstant(), - record.get(CONNECTION.UPDATED_AT).toInstant(), - standardSync)); - } - return standardSyncs; - } - - private List connectionOperationIds(final UUID connectionId) throws IOException { - final Result result = database.query(ctx -> ctx.select(asterisk()) - .from(CONNECTION_OPERATION) - .where(CONNECTION_OPERATION.CONNECTION_ID.eq(connectionId)) - .fetch()); - - final List ids = new ArrayList<>(); - for (final Record record : result) { - ids.add(record.get(CONNECTION_OPERATION.OPERATION_ID)); - } - - return ids; - } - - private void writeStandardSync(final StandardSync standardSync, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - final boolean isExistingConfig = ctx.fetchExists(select() - .from(CONNECTION) - .where(CONNECTION.ID.eq(standardSync.getConnectionId()))); - - if (ScheduleHelpers.isScheduleTypeMismatch(standardSync)) { - throw new RuntimeException("unexpected schedule type mismatch"); - } - - if (isExistingConfig) { - ctx.update(CONNECTION) - .set(CONNECTION.ID, standardSync.getConnectionId()) - .set(CONNECTION.NAMESPACE_DEFINITION, Enums.toEnum(standardSync.getNamespaceDefinition().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.NamespaceDefinitionType.class).orElseThrow()) - .set(CONNECTION.NAMESPACE_FORMAT, standardSync.getNamespaceFormat()) - .set(CONNECTION.PREFIX, standardSync.getPrefix()) - .set(CONNECTION.SOURCE_ID, standardSync.getSourceId()) - .set(CONNECTION.DESTINATION_ID, standardSync.getDestinationId()) - .set(CONNECTION.NAME, standardSync.getName()) - .set(CONNECTION.CATALOG, JSONB.valueOf(Jsons.serialize(standardSync.getCatalog()))) - .set(CONNECTION.FIELD_SELECTION_DATA, JSONB.valueOf(Jsons.serialize(standardSync.getFieldSelectionData()))) - .set(CONNECTION.STATUS, standardSync.getStatus() == null ? null - : Enums.toEnum(standardSync.getStatus().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.StatusType.class).orElseThrow()) - .set(CONNECTION.SCHEDULE, JSONB.valueOf(Jsons.serialize(standardSync.getSchedule()))) - .set(CONNECTION.MANUAL, standardSync.getManual()) - .set(CONNECTION.SCHEDULE_TYPE, - standardSync.getScheduleType() == null ? null - : Enums.toEnum(standardSync.getScheduleType().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.ScheduleType.class) - .orElseThrow()) - .set(CONNECTION.SCHEDULE_DATA, JSONB.valueOf(Jsons.serialize(standardSync.getScheduleData()))) - .set(CONNECTION.RESOURCE_REQUIREMENTS, - JSONB.valueOf(Jsons.serialize(standardSync.getResourceRequirements()))) - .set(CONNECTION.UPDATED_AT, timestamp) - .set(CONNECTION.SOURCE_CATALOG_ID, standardSync.getSourceCatalogId()) - .set(CONNECTION.BREAKING_CHANGE, standardSync.getBreakingChange()) - .set(CONNECTION.GEOGRAPHY, Enums.toEnum(standardSync.getGeography().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.GeographyType.class).orElseThrow()) - .set(CONNECTION.NON_BREAKING_CHANGE_PREFERENCE, standardSync.getNonBreakingChangesPreference().value()) - .set(CONNECTION.NOTIFY_SCHEMA_CHANGES, standardSync.getNotifySchemaChanges()) - .where(CONNECTION.ID.eq(standardSync.getConnectionId())) - .execute(); - - ctx.deleteFrom(CONNECTION_OPERATION) - .where(CONNECTION_OPERATION.CONNECTION_ID.eq(standardSync.getConnectionId())) - .execute(); - for (final UUID operationIdFromStandardSync : standardSync.getOperationIds()) { - ctx.insertInto(CONNECTION_OPERATION) - .set(CONNECTION_OPERATION.ID, UUID.randomUUID()) - .set(CONNECTION_OPERATION.CONNECTION_ID, standardSync.getConnectionId()) - .set(CONNECTION_OPERATION.OPERATION_ID, operationIdFromStandardSync) - .set(CONNECTION_OPERATION.CREATED_AT, timestamp) - .set(CONNECTION_OPERATION.UPDATED_AT, timestamp) - .execute(); - } - } else { - ctx.insertInto(CONNECTION) - .set(CONNECTION.ID, standardSync.getConnectionId()) - .set(CONNECTION.NAMESPACE_DEFINITION, Enums.toEnum(standardSync.getNamespaceDefinition().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.NamespaceDefinitionType.class).orElseThrow()) - .set(CONNECTION.NAMESPACE_FORMAT, standardSync.getNamespaceFormat()) - .set(CONNECTION.PREFIX, standardSync.getPrefix()) - .set(CONNECTION.SOURCE_ID, standardSync.getSourceId()) - .set(CONNECTION.DESTINATION_ID, standardSync.getDestinationId()) - .set(CONNECTION.NAME, standardSync.getName()) - .set(CONNECTION.CATALOG, JSONB.valueOf(Jsons.serialize(standardSync.getCatalog()))) - .set(CONNECTION.FIELD_SELECTION_DATA, JSONB.valueOf(Jsons.serialize(standardSync.getFieldSelectionData()))) - .set(CONNECTION.STATUS, standardSync.getStatus() == null ? null - : Enums.toEnum(standardSync.getStatus().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.StatusType.class).orElseThrow()) - .set(CONNECTION.SCHEDULE, JSONB.valueOf(Jsons.serialize(standardSync.getSchedule()))) - .set(CONNECTION.MANUAL, standardSync.getManual()) - .set(CONNECTION.SCHEDULE_TYPE, - standardSync.getScheduleType() == null ? null - : Enums.toEnum(standardSync.getScheduleType().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.ScheduleType.class) - .orElseThrow()) - .set(CONNECTION.SCHEDULE_DATA, JSONB.valueOf(Jsons.serialize(standardSync.getScheduleData()))) - .set(CONNECTION.RESOURCE_REQUIREMENTS, - JSONB.valueOf(Jsons.serialize(standardSync.getResourceRequirements()))) - .set(CONNECTION.SOURCE_CATALOG_ID, standardSync.getSourceCatalogId()) - .set(CONNECTION.GEOGRAPHY, Enums.toEnum(standardSync.getGeography().value(), - io.airbyte.db.instance.configs.jooq.generated.enums.GeographyType.class).orElseThrow()) - .set(CONNECTION.BREAKING_CHANGE, standardSync.getBreakingChange()) - .set(CONNECTION.NON_BREAKING_CHANGE_PREFERENCE, - standardSync.getNonBreakingChangesPreference() == null ? NonBreakingChangesPreference.IGNORE.value() - : standardSync.getNonBreakingChangesPreference().value()) - .set(CONNECTION.CREATED_AT, timestamp) - .set(CONNECTION.UPDATED_AT, timestamp) - .execute(); - for (final UUID operationIdFromStandardSync : standardSync.getOperationIds()) { - ctx.insertInto(CONNECTION_OPERATION) - .set(CONNECTION_OPERATION.ID, UUID.randomUUID()) - .set(CONNECTION_OPERATION.CONNECTION_ID, standardSync.getConnectionId()) - .set(CONNECTION_OPERATION.OPERATION_ID, operationIdFromStandardSync) - .set(CONNECTION_OPERATION.CREATED_AT, timestamp) - .set(CONNECTION_OPERATION.UPDATED_AT, timestamp) - .execute(); - } - } - } - - private Stream findDisabledSyncs(final DSLContext ctx, final UUID actorDefId, final ActorType actorType) { - // Table aliasing to help have a readable join - final Actor source = ACTOR.as("source"); - final Actor destination = ACTOR.as("destination"); - final ActorDefinition sourceDef = ACTOR_DEFINITION.as("sourceDef"); - final ActorDefinition destDef = ACTOR_DEFINITION.as("destDef"); - - // Retrieve all the connections currently disabled due to a bad protocol version - // where the actor definition is matching the one provided to this function - final Stream results = ctx - .select(CONNECTION.ID, sourceDef.ID, sourceDef.PROTOCOL_VERSION, destDef.ID, destDef.PROTOCOL_VERSION) - .from(CONNECTION) - .join(source).on(CONNECTION.SOURCE_ID.eq(source.ID)) - .join(sourceDef).on(source.ACTOR_DEFINITION_ID.eq(sourceDef.ID)) - .join(destination).on(CONNECTION.DESTINATION_ID.eq(destination.ID)) - .join(destDef).on(destination.ACTOR_DEFINITION_ID.eq(destDef.ID)) - .where( - CONNECTION.UNSUPPORTED_PROTOCOL_VERSION.eq(true).and( - (actorType == ActorType.DESTINATION ? destDef : sourceDef).ID.eq(actorDefId))) - .fetch() - .stream() - .map(r -> new StandardSyncIdsWithProtocolVersions( - r.get(CONNECTION.ID), - r.get(sourceDef.ID), - AirbyteProtocolVersion.getWithDefault(r.get(sourceDef.PROTOCOL_VERSION)), - r.get(destDef.ID), - AirbyteProtocolVersion.getWithDefault(r.get(destDef.PROTOCOL_VERSION)))); - return results; - } - - private void clearProtocolVersionFlag(final DSLContext ctx, final List standardSyncIds) { - ctx.update(CONNECTION) - .set(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION, false) - .set(CONNECTION.UPDATED_AT, OffsetDateTime.now()) - .where(CONNECTION.ID.in(standardSyncIds)) - .execute(); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java deleted file mode 100644 index 595a17a1c77b7..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java +++ /dev/null @@ -1,315 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.STATE; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.State; -import io.airbyte.config.StateType; -import io.airbyte.config.StateWrapper; -import io.airbyte.config.helpers.StateMessageHelper; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.StreamDescriptor; -import java.io.IOException; -import java.time.OffsetDateTime; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.RecordMapper; -import org.jooq.impl.DSL; - -/** - * State Persistence - * - * Handle persisting States to the Database. - * - * Supports migration from Legacy to Global or Stream. Other type migrations need to go through a - * reset. (an exception will be thrown) - */ -public class StatePersistence { - - private final ExceptionWrappingDatabase database; - - public StatePersistence(final Database database) { - this.database = new ExceptionWrappingDatabase(database); - } - - /** - * Get the current State of a Connection - * - * @param connectionId - * @return - * @throws IOException - */ - public Optional getCurrentState(final UUID connectionId) throws IOException { - final List records = this.database.query(ctx -> getStateRecords(ctx, connectionId)); - - if (records.isEmpty()) { - return Optional.empty(); - } - - return switch (getStateType(connectionId, records)) { - case GLOBAL -> Optional.of(buildGlobalState(records)); - case STREAM -> Optional.of(buildStreamState(records)); - default -> Optional.of(buildLegacyState(records)); - }; - } - - /** - * Create or update the states described in the StateWrapper. Null states will be deleted. - * - * The only state migrations supported are going from a Legacy state to either a Global or Stream - * state. Other state type migrations should go through an explicit reset. An exception will be - * thrown to prevent the system from getting into a bad state. - * - * @param connectionId - * @param state - * @throws IOException - */ - public void updateOrCreateState(final UUID connectionId, final StateWrapper state) - throws IOException { - final Optional previousState = getCurrentState(connectionId); - final StateType currentStateType = state.getStateType(); - final boolean isMigration = StateMessageHelper.isMigration(currentStateType, previousState); - - // The only case where we allow a state migration is moving from LEGACY. - // We expect any other migration to go through an explicit reset. - if (!isMigration && previousState.isPresent() && previousState.get().getStateType() != currentStateType) { - throw new IllegalStateException("Unexpected type migration from '" + previousState.get().getStateType() + "' to '" + currentStateType + - "'. Migration of StateType need to go through an explicit reset."); - } - - this.database.transaction(ctx -> { - if (isMigration) { - clearLegacyState(ctx, connectionId); - } - switch (state.getStateType()) { - case GLOBAL -> saveGlobalState(ctx, connectionId, state.getGlobal().getGlobal()); - case STREAM -> saveStreamState(ctx, connectionId, state.getStateMessages()); - case LEGACY -> saveLegacyState(ctx, connectionId, state.getLegacyState()); - } - return null; - }); - } - - private static void clearLegacyState(final DSLContext ctx, final UUID connectionId) { - writeStateToDb(ctx, connectionId, null, null, StateType.LEGACY, null); - } - - private static void saveGlobalState(final DSLContext ctx, final UUID connectionId, final AirbyteGlobalState globalState) { - writeStateToDb(ctx, connectionId, null, null, StateType.GLOBAL, globalState.getSharedState()); - for (final AirbyteStreamState streamState : globalState.getStreamStates()) { - writeStateToDb(ctx, - connectionId, - streamState.getStreamDescriptor().getName(), - streamState.getStreamDescriptor().getNamespace(), - StateType.GLOBAL, - streamState.getStreamState()); - } - } - - private static void saveStreamState(final DSLContext ctx, final UUID connectionId, final List stateMessages) { - for (final AirbyteStateMessage stateMessage : stateMessages) { - final AirbyteStreamState streamState = stateMessage.getStream(); - writeStateToDb(ctx, - connectionId, - streamState.getStreamDescriptor().getName(), - streamState.getStreamDescriptor().getNamespace(), - StateType.STREAM, - streamState.getStreamState()); - } - } - - private static void saveLegacyState(final DSLContext ctx, final UUID connectionId, final JsonNode state) { - writeStateToDb(ctx, connectionId, null, null, StateType.LEGACY, state); - } - - /** - * Performs the actual SQL operation depending on the state - * - * If the state is null, it will delete the row, otherwise do an insert or update on conflict - */ - static void writeStateToDb(final DSLContext ctx, - final UUID connectionId, - final String streamName, - final String namespace, - final StateType stateType, - final JsonNode state) { - if (state != null) { - final boolean hasState = ctx.selectFrom(STATE) - .where( - STATE.CONNECTION_ID.eq(connectionId), - PersistenceHelpers.isNullOrEquals(STATE.STREAM_NAME, streamName), - PersistenceHelpers.isNullOrEquals(STATE.NAMESPACE, namespace)) - .fetch().isNotEmpty(); - - // NOTE: the legacy code was storing a State object instead of just the State data field. We kept - // the same behavior for consistency. - final JSONB jsonbState = JSONB.valueOf(Jsons.serialize(stateType != StateType.LEGACY ? state : new State().withState(state))); - final OffsetDateTime now = OffsetDateTime.now(); - - if (!hasState) { - ctx.insertInto(STATE) - .columns( - STATE.ID, - STATE.CREATED_AT, - STATE.UPDATED_AT, - STATE.CONNECTION_ID, - STATE.STREAM_NAME, - STATE.NAMESPACE, - STATE.STATE_, - STATE.TYPE) - .values( - UUID.randomUUID(), - now, - now, - connectionId, - streamName, - namespace, - jsonbState, - Enums.convertTo(stateType, io.airbyte.db.instance.configs.jooq.generated.enums.StateType.class)) - .execute(); - - } else { - ctx.update(STATE) - .set(STATE.UPDATED_AT, now) - .set(STATE.STATE_, jsonbState) - .where( - STATE.CONNECTION_ID.eq(connectionId), - PersistenceHelpers.isNullOrEquals(STATE.STREAM_NAME, streamName), - PersistenceHelpers.isNullOrEquals(STATE.NAMESPACE, namespace)) - .execute(); - } - - } else { - // If the state is null, we remove the state instead of keeping a null row - ctx.deleteFrom(STATE) - .where( - STATE.CONNECTION_ID.eq(connectionId), - PersistenceHelpers.isNullOrEquals(STATE.STREAM_NAME, streamName), - PersistenceHelpers.isNullOrEquals(STATE.NAMESPACE, namespace)) - .execute(); - } - } - - /** - * Get the StateType for a given list of StateRecords - * - * @param connectionId The connectionId of the records, used to add more debugging context if an - * error is detected - * @param records The list of StateRecords to process, must not be empty - * @return the StateType of the records - * @throws IllegalStateException If StateRecords have inconsistent types - */ - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - private static io.airbyte.db.instance.configs.jooq.generated.enums.StateType getStateType( - final UUID connectionId, - final List records) { - final Set types = - records.stream().map(r -> r.type).collect(Collectors.toSet()); - if (types.size() == 1) { - return types.stream().findFirst().get(); - } - - throw new IllegalStateException("Inconsistent StateTypes for connectionId " + connectionId + - " (" + String.join(", ", types.stream().map(stateType -> stateType.getLiteral()).toList()) + ")"); - } - - /** - * Get the state records from the DB - * - * @param ctx A valid DSL context to use for the query - * @param connectionId the ID of the connection - * @return The StateRecords for the connectionId - */ - private static List getStateRecords(final DSLContext ctx, final UUID connectionId) { - return ctx.select(DSL.asterisk()) - .from(STATE) - .where(STATE.CONNECTION_ID.eq(connectionId)) - .fetch(getStateRecordMapper()) - .stream().toList(); - } - - /** - * Build Global state - * - * The list of records can contain one global shared state that is the state without streamName and - * without namespace The other records should be translated into AirbyteStreamState - */ - private static StateWrapper buildGlobalState(final List records) { - // Split the global shared state from the other per stream records - final Map> partitions = records.stream() - .collect(Collectors.partitioningBy(r -> r.streamName == null && r.namespace == null)); - - final AirbyteGlobalState globalState = new AirbyteGlobalState() - .withSharedState(partitions.get(Boolean.TRUE).stream().map(r -> r.state).findFirst().orElse(null)) - .withStreamStates(partitions.get(Boolean.FALSE).stream().map(StatePersistence::buildAirbyteStreamState).toList()); - - final AirbyteStateMessage msg = new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(globalState); - return new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(msg); - } - - /** - * Build StateWrapper for a PerStream state - */ - private static StateWrapper buildStreamState(final List records) { - final List messages = records.stream().map( - record -> new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(buildAirbyteStreamState(record))) - .toList(); - return new StateWrapper().withStateType(StateType.STREAM).withStateMessages(messages); - } - - /** - * Build a StateWrapper for Legacy state - */ - private static StateWrapper buildLegacyState(final List records) { - final State legacyState = Jsons.convertValue(records.get(0).state, State.class); - return new StateWrapper() - .withStateType(StateType.LEGACY) - .withLegacyState(legacyState.getState()); - } - - /** - * Convert a StateRecord to an AirbyteStreamState - */ - private static AirbyteStreamState buildAirbyteStreamState(final StateRecord record) { - return new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(record.streamName).withNamespace(record.namespace)) - .withStreamState(record.state); - } - - private static RecordMapper getStateRecordMapper() { - return record -> new StateRecord( - record.get(STATE.TYPE, io.airbyte.db.instance.configs.jooq.generated.enums.StateType.class), - record.get(STATE.STREAM_NAME, String.class), - record.get(STATE.NAMESPACE, String.class), - Jsons.deserialize(record.get(STATE.STATE_).data())); - } - - private record StateRecord( - io.airbyte.db.instance.configs.jooq.generated.enums.StateType type, - String streamName, - String namespace, - JsonNode state) {} - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java deleted file mode 100644 index ff058a65fdcbd..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.STREAM_RESET; -import static org.jooq.impl.DSL.noCondition; - -import io.airbyte.config.StreamResetRecord; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.protocol.models.StreamDescriptor; -import java.io.IOException; -import java.time.OffsetDateTime; -import java.util.List; -import java.util.UUID; -import java.util.stream.Stream; -import org.jooq.Condition; -import org.jooq.DSLContext; -import org.jooq.Record; -import org.jooq.RecordMapper; -import org.jooq.impl.DSL; - -public class StreamResetPersistence { - - private final ExceptionWrappingDatabase database; - - public StreamResetPersistence(final Database database) { - this.database = new ExceptionWrappingDatabase(database); - } - - /* - * Get a list of StreamDescriptors for streams that have pending or running resets - */ - public List getStreamResets(final UUID connectionId) throws IOException { - return database.query(ctx -> ctx.select(DSL.asterisk()) - .from(STREAM_RESET)) - .where(STREAM_RESET.CONNECTION_ID.eq(connectionId)) - .fetch(getStreamResetRecordMapper()) - .stream() - .flatMap(row -> Stream.of(new StreamDescriptor().withName(row.streamName()).withNamespace(row.streamNamespace()))) - .toList(); - } - - /* - * Delete stream resets for a given connection. This is called to delete stream reset records for - * resets that are successfully completed. - */ - public void deleteStreamResets(final UUID connectionId, final List streamsToDelete) throws IOException { - Condition condition = noCondition(); - for (final StreamDescriptor streamDescriptor : streamsToDelete) { - condition = condition.or( - STREAM_RESET.CONNECTION_ID.eq(connectionId) - .and(STREAM_RESET.STREAM_NAME.eq(streamDescriptor.getName())) - .and(PersistenceHelpers.isNullOrEquals(STREAM_RESET.STREAM_NAMESPACE, streamDescriptor.getNamespace()))); - } - - database.query(ctx -> ctx.deleteFrom(STREAM_RESET)).where(condition).execute(); - } - - /** - * Create stream resets for a given connection. This is called to create stream reset records for - * resets that are going to be run. - * - * It will not attempt to create entries for any stream that already exists in the stream_reset - * table. - */ - public void createStreamResets(final UUID connectionId, final List streamsToCreate) throws IOException { - final OffsetDateTime timestamp = OffsetDateTime.now(); - database.transaction(ctx -> { - createStreamResets(ctx, connectionId, streamsToCreate, timestamp); - return null; - }); - } - - private void createStreamResets(final DSLContext ctx, - final UUID connectionId, - final List streamsToCreate, - final OffsetDateTime timestamp) { - for (final StreamDescriptor streamDescriptor : streamsToCreate) { - final boolean streamExists = ctx.fetchExists( - STREAM_RESET, - STREAM_RESET.CONNECTION_ID.eq(connectionId), - STREAM_RESET.STREAM_NAME.eq(streamDescriptor.getName()), - PersistenceHelpers.isNullOrEquals(STREAM_RESET.STREAM_NAMESPACE, streamDescriptor.getNamespace())); - - if (!streamExists) { - ctx.insertInto(STREAM_RESET) - .set(STREAM_RESET.ID, UUID.randomUUID()) - .set(STREAM_RESET.CONNECTION_ID, connectionId) - .set(STREAM_RESET.STREAM_NAME, streamDescriptor.getName()) - .set(STREAM_RESET.STREAM_NAMESPACE, streamDescriptor.getNamespace()) - .set(STREAM_RESET.CREATED_AT, timestamp) - .set(STREAM_RESET.UPDATED_AT, timestamp) - .execute(); - } - } - } - - private static RecordMapper getStreamResetRecordMapper() { - return record -> new StreamResetRecord( - UUID.fromString(record.get(STREAM_RESET.CONNECTION_ID, String.class)), - record.get(STREAM_RESET.STREAM_NAME, String.class), - record.get(STREAM_RESET.STREAM_NAMESPACE, String.class)); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ValidatingConfigPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ValidatingConfigPersistence.java deleted file mode 100644 index 21b5225ede1cc..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ValidatingConfigPersistence.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.AirbyteConfig; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Validates that json input and outputs for the ConfigPersistence against their schemas. - */ -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -@Deprecated(forRemoval = true) -public class ValidatingConfigPersistence implements ConfigPersistence { - - private final JsonSchemaValidator schemaValidator; - private final ConfigPersistence decoratedPersistence; - - public ValidatingConfigPersistence(final ConfigPersistence decoratedPersistence) { - this(decoratedPersistence, new JsonSchemaValidator()); - } - - public ValidatingConfigPersistence(final ConfigPersistence decoratedPersistence, final JsonSchemaValidator schemaValidator) { - this.decoratedPersistence = decoratedPersistence; - this.schemaValidator = schemaValidator; - } - - @Override - public T getConfig(final AirbyteConfig configType, final String configId, final Class clazz) - throws ConfigNotFoundException, JsonValidationException, IOException { - final T config = decoratedPersistence.getConfig(configType, configId, clazz); - validateJson(config, configType); - return config; - } - - @Override - public List listConfigs(final AirbyteConfig configType, final Class clazz) throws JsonValidationException, IOException { - final List configs = decoratedPersistence.listConfigs(configType, clazz); - for (final T config : configs) { - validateJson(config, configType); - } - return configs; - } - - @Override - public ConfigWithMetadata getConfigWithMetadata(final AirbyteConfig configType, final String configId, final Class clazz) - throws ConfigNotFoundException, JsonValidationException, IOException { - final ConfigWithMetadata config = decoratedPersistence.getConfigWithMetadata(configType, configId, clazz); - validateJson(config.getConfig(), configType); - return config; - } - - @Override - public List> listConfigsWithMetadata(final AirbyteConfig configType, final Class clazz) - throws JsonValidationException, IOException { - final List> configs = decoratedPersistence.listConfigsWithMetadata(configType, clazz); - for (final ConfigWithMetadata config : configs) { - validateJson(config.getConfig(), configType); - } - return configs; - } - - @Override - public void writeConfig(final AirbyteConfig configType, final String configId, final T config) throws JsonValidationException, IOException { - - final Map configIdToConfig = new HashMap<>(); - configIdToConfig.put(configId, config); - - writeConfigs(configType, configIdToConfig); - } - - @Override - public void writeConfigs(final AirbyteConfig configType, final Map configs) - throws IOException, JsonValidationException { - for (final Map.Entry config : configs.entrySet()) { - validateJson(Jsons.jsonNode(config.getValue()), configType); - } - decoratedPersistence.writeConfigs(configType, configs); - } - - @Override - public void deleteConfig(final AirbyteConfig configType, final String configId) throws ConfigNotFoundException, IOException { - decoratedPersistence.deleteConfig(configType, configId); - } - - private void validateJson(final T config, final AirbyteConfig configType) throws JsonValidationException { - final JsonNode schema = JsonSchemaValidator.getSchema(configType.getConfigSchemaFile()); - schemaValidator.ensure(schema, Jsons.jsonNode(config)); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/AWSSecretManagerPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/AWSSecretManagerPersistence.java deleted file mode 100644 index be259aac0f101..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/AWSSecretManagerPersistence.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; - -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.secretsmanager.caching.SecretCache; -import com.amazonaws.services.secretsmanager.AWSSecretsManager; -import com.amazonaws.services.secretsmanager.AWSSecretsManagerClientBuilder; -import com.amazonaws.services.secretsmanager.model.CreateSecretRequest; -import com.amazonaws.services.secretsmanager.model.DeleteSecretRequest; -import com.amazonaws.services.secretsmanager.model.ResourceNotFoundException; -import com.amazonaws.services.secretsmanager.model.UpdateSecretRequest; -import com.google.common.annotations.VisibleForTesting; -import java.util.Optional; -import lombok.extern.slf4j.Slf4j; - -/** - * SecretPersistence implementation for AWS Secret Manager using Java - * SDK The current implementation doesn't make use of `SecretCoordinate#getVersion` as this - * version is non-compatible with how AWS secret manager deals with versions. In AWS versions is an - * internal idiom that can is accessible, but it's a UUID + a tag more - * details. - */ -@Slf4j -public class AWSSecretManagerPersistence implements SecretPersistence { - - private final AWSSecretsManager client; - - @VisibleForTesting - protected final SecretCache cache; - - /** - * Creates a AWSSecretManagerPersistence using the default client and region from the current AWS - * credentials. Recommended way based on - * this - * This implementation makes use of SecretCache as optimization to access secrets. - * - * @see SecretCache - */ - public AWSSecretManagerPersistence() { - this.client = AWSSecretsManagerClientBuilder.defaultClient(); - this.cache = new SecretCache(this.client); - } - - /** - * Creates a AWSSecretManagerPersistence overriding the current region. This implementation makes - * use of SecretCache as optimization to access secrets - * - * @param region AWS region to use. - * @see SecretCache - */ - public AWSSecretManagerPersistence(final String region) { - checkNotNull(region, "Region cannot be null, to use a default region call AWSSecretManagerPersistence.AWSSecretManagerPersistence()"); - checkArgument(!region.isEmpty(), "Region can't be empty, to use a default region call AWSSecretManagerPersistence.AWSSecretManagerPersistence()"); - this.client = AWSSecretsManagerClientBuilder - .standard() - .withRegion(region) - .build(); - this.cache = new SecretCache(this.client); - } - - /** - * Creates a new AWSSecretManagerPersistence using the provided explicitly passed credentials. - * - * @param awsAccessKey The AWS access key. - * @param awsSecretAccessKey The AWS secret access key. - */ - public AWSSecretManagerPersistence(final String awsAccessKey, final String awsSecretAccessKey) { - checkNotNull(awsAccessKey, "awsAccessKey cannot be null, to use a default region call AWSSecretManagerPersistence.AWSSecretManagerPersistence()"); - checkNotNull(awsSecretAccessKey, - "awsSecretAccessKey cannot be null, to use a default region call AWSSecretManagerPersistence.AWSSecretManagerPersistence()"); - checkArgument(!awsAccessKey.isEmpty(), - "awsAccessKey cannot be empty, to use a default region call AWSSecretManagerPersistence.AWSSecretManagerPersistence()"); - checkArgument(!awsSecretAccessKey.isEmpty(), - "awsSecretAccessKey cannot be empty, to use a default region call AWSSecretManagerPersistence.AWSSecretManagerPersistence()"); - BasicAWSCredentials credentials = new BasicAWSCredentials(awsAccessKey, awsSecretAccessKey); - this.client = AWSSecretsManagerClientBuilder - .standard() - .withCredentials(new AWSStaticCredentialsProvider(credentials)) - .build(); - this.cache = new SecretCache(this.client); - } - - @Override - public Optional read(final SecretCoordinate coordinate) { - // fail fast, return an empty - if (coordinate == null) - return Optional.empty(); - - String secretString = null; - try { - log.debug("Reading secret {}", coordinate.getCoordinateBase()); - secretString = cache.getSecretString(coordinate.getCoordinateBase()); - } catch (ResourceNotFoundException e) { - log.warn("Secret {} not found", coordinate.getCoordinateBase()); - } - return Optional.ofNullable(secretString); - } - - @Override - public void write(final SecretCoordinate coordinate, final String payload) { - checkNotNull(coordinate, "SecretCoordinate cannot be null"); - checkNotNull(payload, "Payload cannot be null"); - checkArgument(!payload.isEmpty(), "Payload shouldn't be empty"); - - if (read(coordinate).isPresent()) { - log.debug("Secret {} found updating payload.", coordinate.getCoordinateBase()); - final UpdateSecretRequest request = new UpdateSecretRequest() - .withSecretId(coordinate.getCoordinateBase()) - .withSecretString(payload) - .withDescription("Airbyte secret."); - client.updateSecret(request); - } else { - log.debug("Secret {} not found, creating a new one.", coordinate.getCoordinateBase()); - final CreateSecretRequest secretRequest = new CreateSecretRequest() - .withName(coordinate.getCoordinateBase()) - .withSecretString(payload) - .withDescription("Airbyte secret."); - client.createSecret(secretRequest); - } - - } - - /** - * Utility to clean up after integration tests. - * - * @param coordinate SecretCoordinate to delete. - */ - @VisibleForTesting - protected void deleteSecret(final SecretCoordinate coordinate) { - client.deleteSecret(new DeleteSecretRequest() - .withSecretId(coordinate.getCoordinateBase()) - .withForceDeleteWithoutRecovery(true)); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/GoogleSecretManagerPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/GoogleSecretManagerPersistence.java deleted file mode 100644 index d099ec635e5ec..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/GoogleSecretManagerPersistence.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.google.api.gax.core.FixedCredentialsProvider; -import com.google.api.gax.rpc.NotFoundException; -import com.google.auth.oauth2.ServiceAccountCredentials; -import com.google.cloud.secretmanager.v1.ProjectName; -import com.google.cloud.secretmanager.v1.Replication; -import com.google.cloud.secretmanager.v1.Secret; -import com.google.cloud.secretmanager.v1.SecretManagerServiceClient; -import com.google.cloud.secretmanager.v1.SecretManagerServiceSettings; -import com.google.cloud.secretmanager.v1.SecretName; -import com.google.cloud.secretmanager.v1.SecretPayload; -import com.google.cloud.secretmanager.v1.SecretVersionName; -import com.google.protobuf.ByteString; -import com.google.protobuf.Duration; -import io.airbyte.commons.lang.Exceptions; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Optional; -import java.util.function.Supplier; -import javax.annotation.Nullable; -import org.joda.time.Days; - -/** - * Uses Google Secret Manager (https://cloud.google.com/secret-manager) as a K/V store to access - * secrets. In the future we will likely want to introduce more granular permission handling here. - * - * It's important to note that we are not making use of the versioning feature of Google Secret - * Manager. This is for a few reasons: 1. There isn't a clean interface for getting the most recent - * version. 2. Version writes must be sequential. This means that if we wanted to move between - * secrets management platforms such as Hashicorp Vault and GSM, we would need to create secrets in - * order (or depending on our retention for the secrets pretend to insert earlier versions). - */ -final public class GoogleSecretManagerPersistence implements SecretPersistence { - - /** - * The "latest" alias is a magic string that gives you access to the latest secret without - * explicitly specifying the version. For more info see: - * https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets#access - */ - private static final String LATEST = "latest"; - - private static final Duration EPHEMERAL_TTL = Duration.newBuilder() - .setSeconds(Days.days(5).toStandardSeconds().getSeconds()) - .build(); - - private static final Replication REPLICATION_POLICY = Replication.newBuilder() - .setAutomatic(Replication.Automatic.newBuilder().build()) - .build(); - - private final String gcpProjectId; - private final Supplier clientSupplier; - - private final @Nullable Duration ttl; - - /** - * Creates a persistence with an infinite TTL for stored secrets. Used for source/destination config - * secret storage. - */ - public static GoogleSecretManagerPersistence getLongLived(final String gcpProjectId, final String gcpCredentialsJson) { - return new GoogleSecretManagerPersistence(gcpProjectId, gcpCredentialsJson, null); - } - - /** - * Creates a persistence with a relatively short TTL for stored secrets. Used for temporary - * operations such as check/discover operations where we need to use secret storage to communicate - * from the server to Temporal, but where we don't want to maintain the secrets indefinitely. - */ - public static GoogleSecretManagerPersistence getEphemeral(final String gcpProjectId, final String gcpCredentialsJson) { - return new GoogleSecretManagerPersistence(gcpProjectId, gcpCredentialsJson, EPHEMERAL_TTL); - } - - private GoogleSecretManagerPersistence(final String gcpProjectId, final String gcpCredentialsJson, final @Nullable Duration ttl) { - this.gcpProjectId = gcpProjectId; - this.clientSupplier = () -> Exceptions.toRuntime(() -> getSecretManagerServiceClient(gcpCredentialsJson)); - this.ttl = ttl; - } - - @Override - public Optional read(final SecretCoordinate coordinate) { - try (final var client = clientSupplier.get()) { - final var secretVersionName = SecretVersionName.of(gcpProjectId, coordinate.getFullCoordinate(), LATEST); - final var response = client.accessSecretVersion(secretVersionName); - return Optional.of(response.getPayload().getData().toStringUtf8()); - } catch (final NotFoundException e) { - return Optional.empty(); - } - } - - @Override - public void write(final SecretCoordinate coordinate, final String payload) { - try (final var client = clientSupplier.get()) { - if (read(coordinate).isEmpty()) { - final var secretBuilder = Secret.newBuilder().setReplication(REPLICATION_POLICY); - - if (ttl != null) { - secretBuilder.setTtl(ttl); - } - - client.createSecret(ProjectName.of(gcpProjectId), coordinate.getFullCoordinate(), secretBuilder.build()); - } - - final var name = SecretName.of(gcpProjectId, coordinate.getFullCoordinate()); - final var secretPayload = SecretPayload.newBuilder() - .setData(ByteString.copyFromUtf8(payload)) - .build(); - - client.addSecretVersion(name, secretPayload); - } - } - - public static SecretManagerServiceClient getSecretManagerServiceClient(final String credentialsJson) throws IOException { - final var credentialsByteStream = new ByteArrayInputStream(credentialsJson.getBytes(StandardCharsets.UTF_8)); - final var credentials = ServiceAccountCredentials.fromStream(credentialsByteStream); - final var clientSettings = SecretManagerServiceSettings.newBuilder() - .setCredentialsProvider(FixedCredentialsProvider.create(credentials)) - .build(); - - return SecretManagerServiceClient.create(clientSettings); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java deleted file mode 100644 index 7ac84faf622d8..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.airbyte.commons.constants.AirbyteSecretConstants; -import io.airbyte.commons.json.JsonPaths; -import io.airbyte.commons.json.JsonSchemas; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.MoreIterators; -import io.airbyte.validation.json.JsonSchemaValidator; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import lombok.Builder; -import lombok.extern.slf4j.Slf4j; - -@Builder -@SuppressWarnings({"PMD.CognitiveComplexity", "PMD.CyclomaticComplexity"}) -@Slf4j -public class JsonSecretsProcessor { - - @Builder.Default - final private Boolean copySecrets = false; - - protected static final JsonSchemaValidator VALIDATOR = new JsonSchemaValidator(); - - static final String PROPERTIES_FIELD = "properties"; - static final String TYPE_FIELD = "type"; - static final String ARRAY_TYPE_FIELD = "array"; - static final String ITEMS_FIELD = "items"; - static final String ONE_OF_FIELD = "oneOf"; - - /** - * Returns a copy of the input object wherein any fields annotated with "airbyte_secret" in the - * input schema are masked. - *

    - * This method masks secrets both at the top level of the configuration object and in nested - * properties in a oneOf. - * - * @param schema Schema containing secret annotations - * @param obj Object containing potentially secret fields - */ - public JsonNode prepareSecretsForOutput(final JsonNode obj, final JsonNode schema) { - // todo (cgardens) this is not safe. should throw. - // if schema is an object and has a properties field - if (!isValidJsonSchema(schema)) { - log.error("The schema is not valid, the secret can't be hidden"); - return obj; - } - - return maskAllSecrets(obj, schema); - } - - /** - * Given a JSONSchema object and an object that conforms to that schema, obfuscate all fields in the - * object that are a secret. - * - * @param json - json object that conforms to the schema - * @param schema - jsonschema object - * @return json object with all secrets masked. - */ - public static JsonNode maskAllSecrets(final JsonNode json, final JsonNode schema) { - final Set pathsWithSecrets = JsonSchemas.collectPathsThatMeetCondition( - schema, - node -> MoreIterators.toList(node.fields()) - .stream() - .anyMatch(field -> AirbyteSecretConstants.AIRBYTE_SECRET_FIELD.equals(field.getKey()))) - .stream() - .map(JsonPaths::mapJsonSchemaPathToJsonPath) - .collect(Collectors.toSet()); - - JsonNode copy = Jsons.clone(json); - for (final String path : pathsWithSecrets) { - copy = JsonPaths.replaceAtString(copy, path, AirbyteSecretConstants.SECRETS_MASK); - } - - return copy; - } - - /** - * Returns a copy of the destination object in which any secret fields (as denoted by the input - * schema) found in the source object are added. - *

    - * This method absorbs secrets both at the top level of the configuration object and in nested - * properties in a oneOf. - * - * @param src The object potentially containing secrets - * @param dst The object to absorb secrets into - * @param schema Schema of objects - * @return dst object with secrets absorbed from src object - */ - // todo (cgardens) - figure out how to reused JsonSchemas and JsonPaths for this traversal as well. - public JsonNode copySecrets(final JsonNode src, final JsonNode dst, final JsonNode schema) { - if (copySecrets) { - // todo (cgardens) this is not safe. should throw. - if (!isValidJsonSchema(schema)) { - return dst; - } - - Preconditions.checkArgument(dst.isObject()); - Preconditions.checkArgument(src.isObject()); - - final ObjectNode dstCopy = dst.deepCopy(); - return copySecretsRecursive(src, dstCopy, schema); - } - - return src; - } - - // This function is modifying dstCopy in place. - private JsonNode copySecretsRecursive(final JsonNode src, JsonNode dstCopy, final JsonNode schema) { - // todo (cgardens) this is not safe. should throw. - if (!isValidJsonSchema(schema)) { - return dstCopy; - } - - Preconditions.checkArgument(dstCopy.isObject()); - Preconditions.checkArgument(src.isObject()); - - final Optional combinationKey = findJsonCombinationNode(schema); - if (combinationKey.isPresent()) { - final var arrayNode = (ArrayNode) schema.get(combinationKey.get()); - for (int i = 0; i < arrayNode.size(); i++) { - final JsonNode childSchema = arrayNode.get(i); - /* - * when traversing a oneOf or anyOf if multiple schema in the oneOf or anyOf have the SAME key, but - * a different type, then, without this test, we can try to apply the wrong schema to the object - * resulting in errors because of type mismatches. - */ - if (VALIDATOR.test(childSchema, dstCopy)) { - // Absorb field values if any of the combination option is declaring it as secrets - copySecretsRecursive(src, dstCopy, childSchema); - } - } - } else { - final ObjectNode properties = (ObjectNode) schema.get(PROPERTIES_FIELD); - for (final String key : Jsons.keys(properties)) { - // If the source or destination doesn't have this key then we have nothing to copy, so we should - // skip to the next key. - if (!src.has(key) || !dstCopy.has(key)) { - continue; - } - - final JsonNode fieldSchema = properties.get(key); - // We only copy the original secret if the destination object isn't attempting to overwrite it - // I.e. if the destination object's value is set to the mask, then we can copy the original secret - if (JsonSecretsProcessor.isSecret(fieldSchema) && AirbyteSecretConstants.SECRETS_MASK.equals(dstCopy.get(key).asText())) { - ((ObjectNode) dstCopy).set(key, src.get(key)); - - } else { - // Otherwise, this is just a plain old json node; recurse into it. If it's not actually an object, - // the recursive call will exit immediately. - final JsonNode copiedField = copySecretsRecursive(src.get(key), dstCopy.get(key), fieldSchema); - ((ObjectNode) dstCopy).set(key, copiedField); - } - } - } - - return dstCopy; - } - - static boolean isSecret(final JsonNode obj) { - return obj.isObject() && obj.has(AirbyteSecretConstants.AIRBYTE_SECRET_FIELD) && obj.get(AirbyteSecretConstants.AIRBYTE_SECRET_FIELD).asBoolean(); - } - - private static Optional findJsonCombinationNode(final JsonNode node) { - for (final String combinationNode : List.of("allOf", "anyOf", "oneOf")) { - if (node.has(combinationNode) && node.get(combinationNode).isArray()) { - return Optional.of(combinationNode); - } - } - return Optional.empty(); - } - - @SuppressWarnings("BooleanMethodIsAlwaysInverted") - @VisibleForTesting - public static boolean isValidJsonSchema(final JsonNode schema) { - return schema.isObject() && ((schema.has(PROPERTIES_FIELD) && schema.get(PROPERTIES_FIELD).isObject()) - || (schema.has(ONE_OF_FIELD) && schema.get(ONE_OF_FIELD).isArray())); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/LocalTestingSecretPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/LocalTestingSecretPersistence.java deleted file mode 100644 index e8c28fb7557df..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/LocalTestingSecretPersistence.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.db.Database; -import java.sql.SQLException; -import java.util.Optional; - -/** - * Secrets persistence intended only for local development. - */ -public class LocalTestingSecretPersistence implements SecretPersistence { - - private final Database configDatabase; - - private boolean initialized = false; - - public LocalTestingSecretPersistence(final Database configDatabase) { - this.configDatabase = configDatabase; - } - - @Override - public void initialize() throws SQLException { - if (!initialized) { - this.configDatabase.query(ctx -> { - ctx.execute("CREATE TABLE IF NOT EXISTS secrets ( coordinate TEXT PRIMARY KEY, payload TEXT);"); - return null; - }); - initialized = true; - } - } - - @Override - public Optional read(final SecretCoordinate coordinate) { - return Exceptions.toRuntime(() -> this.configDatabase.query(ctx -> { - initialize(); - final var result = ctx.fetch("SELECT payload FROM secrets WHERE coordinate = ?;", coordinate.getFullCoordinate()); - if (result.size() == 0) { - return Optional.empty(); - } else { - return Optional.of(result.get(0).getValue(0, String.class)); - } - })); - } - - @Override - public void write(final SecretCoordinate coordinate, final String payload) { - Exceptions.toRuntime(() -> this.configDatabase.query(ctx -> { - initialize(); - ctx.query("INSERT INTO secrets(coordinate,payload) VALUES(?, ?) ON CONFLICT (coordinate) DO UPDATE SET payload = ?;", - coordinate.getFullCoordinate(), payload, payload, coordinate.getFullCoordinate()).execute(); - return null; - })); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/MemorySecretPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/MemorySecretPersistence.java deleted file mode 100644 index e3f5c72652b2a..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/MemorySecretPersistence.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; - -/** - * Map-based implementation of a {@link SecretPersistence} used for unit testing. - */ -public class MemorySecretPersistence implements SecretPersistence { - - final Map secretMap = new HashMap<>(); - - @Override - public Optional read(final SecretCoordinate coordinate) { - return Optional.ofNullable(secretMap.get(coordinate)); - } - - @Override - public void write(final SecretCoordinate coordinate, final String payload) { - secretMap.put(coordinate, payload); - } - - public Map getMap() { - return new HashMap<>(secretMap); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/NoOpSecretsHydrator.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/NoOpSecretsHydrator.java deleted file mode 100644 index 7b859e28f48ed..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/NoOpSecretsHydrator.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; - -/** - * No-op hydrator. Used if there is no secrets persistence configured for this Airbyte instance. - */ -public class NoOpSecretsHydrator implements SecretsHydrator { - - @Override - public JsonNode hydrate(final JsonNode partialConfig) { - return partialConfig; - } - - @Override - public JsonNode hydrateSecretCoordinate(final JsonNode secretCoordinate) { - return secretCoordinate; - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/ReadOnlySecretPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/ReadOnlySecretPersistence.java deleted file mode 100644 index b4fb2040443d7..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/ReadOnlySecretPersistence.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import java.util.Optional; - -/** - * Provides a read-only interface to a backing secrets store similar to {@link SecretPersistence}. - * In practice, the functionality should be provided by a {@link SecretPersistence#read} function. - */ -@FunctionalInterface -public interface ReadOnlySecretPersistence { - - Optional read(SecretCoordinate coordinate); - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/RealSecretsHydrator.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/RealSecretsHydrator.java deleted file mode 100644 index 572f07abcbe29..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/RealSecretsHydrator.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; - -/** - * Adds secrets to a partial config based off a persistence. - */ -public class RealSecretsHydrator implements SecretsHydrator { - - private final ReadOnlySecretPersistence readOnlySecretPersistence; - - public RealSecretsHydrator(final ReadOnlySecretPersistence readOnlySecretPersistence) { - this.readOnlySecretPersistence = readOnlySecretPersistence; - } - - @Override - public JsonNode hydrate(final JsonNode partialConfig) { - return SecretsHelpers.combineConfig(partialConfig, readOnlySecretPersistence); - } - - @Override - public JsonNode hydrateSecretCoordinate(final JsonNode secretCoordinate) { - return SecretsHelpers.hydrateSecretCoordinate(secretCoordinate, readOnlySecretPersistence); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinate.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinate.java deleted file mode 100644 index 47b6f253fcf5f..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinate.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.google.api.client.util.Preconditions; -import java.util.Objects; -import lombok.ToString; - -/** - * A secret coordinate represents a specific secret at a specific version stored within a - * {@link SecretPersistence}. - * - * We use "coordinate base" to refer to a string reference to a secret without versioning - * information. We use "full coordinate" to refer to a string reference that includes both the - * coordinate base and version-specific information. You should be able to go from a "full - * coordinate" to a coordinate object and back without loss of information. - * - * Example coordinate base: - * airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22 - * - * Example full coordinate: - * airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1 - * - * This coordinate system was designed to work well with Google Secrets Manager but should work with - * other secret storage backends as well. - */ -@ToString -@SuppressWarnings("PMD.ShortVariable") -public class SecretCoordinate { - - private final String coordinateBase; - private final long version; - - public SecretCoordinate(final String coordinateBase, final long version) { - this.coordinateBase = coordinateBase; - this.version = version; - } - - /** - * Used to turn a full string coordinate into a coordinate object using a full coordinate generated - * by {@link SecretsHelpers#getCoordinate}. - * - * This will likely need refactoring if we end up using a secret store that doesn't allow the same - * format of full coordinate. - * - * @param fullCoordinate coordinate with version - * @return secret coordinate object - */ - public static SecretCoordinate fromFullCoordinate(final String fullCoordinate) { - final var splits = fullCoordinate.split("_v"); - Preconditions.checkArgument(splits.length == 2); - - return new SecretCoordinate(splits[0], Long.parseLong(splits[1])); - } - - public String getCoordinateBase() { - return coordinateBase; - } - - public long getVersion() { - return version; - } - - public String getFullCoordinate() { - return coordinateBase + "_v" + version; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final SecretCoordinate that = (SecretCoordinate) o; - return toString().equals(that.toString()); - } - - /** - * The hash code is computed using the {@link SecretCoordinate#getFullCoordinate} because the full - * secret coordinate should be a valid unique representation of the secret coordinate. - */ - @Override - public int hashCode() { - return Objects.hash(getFullCoordinate()); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateToPayload.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateToPayload.java deleted file mode 100644 index a10bded4e1f52..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateToPayload.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; - -public record SecretCoordinateToPayload(SecretCoordinate secretCoordinate, - String payload, - JsonNode secretCoordinateForDB) { - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java deleted file mode 100644 index e065f3939d5cb..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import io.airbyte.config.Configs; -import io.airbyte.db.Database; -import java.util.Optional; -import javax.annotation.Nullable; -import org.jooq.DSLContext; - -/** - * Provides the ability to read and write secrets to a backing store. Assumes that secret payloads - * are always strings. See {@link SecretCoordinate} for more information on how secrets are - * identified. - */ -@SuppressWarnings("PMD.MissingOverride") -public interface SecretPersistence extends ReadOnlySecretPersistence { - - /** - * Performs any initialization prior to utilization of the persistence object. This exists to make - * it possible to create instances within a dependency management framework, where any - * initialization logic should not be present in a constructor. - * - * @throws Exception if unable to perform the initialization. - */ - default void initialize() throws Exception {} - - Optional read(final SecretCoordinate coordinate); - - void write(final SecretCoordinate coordinate, final String payload); - - static Optional getLongLived(final @Nullable DSLContext dslContext, final Configs configs) { - switch (configs.getSecretPersistenceType()) { - case TESTING_CONFIG_DB_TABLE -> { - final Database configDatabase = new Database(dslContext); - return Optional.of(new LocalTestingSecretPersistence(configDatabase)); - } - case GOOGLE_SECRET_MANAGER -> { - return Optional.of(GoogleSecretManagerPersistence.getLongLived(configs.getSecretStoreGcpProjectId(), configs.getSecretStoreGcpCredentials())); - } - case VAULT -> { - return Optional.of(new VaultSecretPersistence(configs.getVaultAddress(), configs.getVaultPrefix(), configs.getVaultToken())); - } - case AWS_SECRET_MANAGER -> { - return Optional.of(new AWSSecretManagerPersistence(configs.getAwsAccessKey(), configs.getAwsSecretAccessKey())); - } - default -> { - return Optional.empty(); - } - } - } - - static SecretsHydrator getSecretsHydrator(final @Nullable DSLContext dslContext, final Configs configs) { - final var persistence = getLongLived(dslContext, configs); - - if (persistence.isPresent()) { - return new RealSecretsHydrator(persistence.get()); - } else { - return new NoOpSecretsHydrator(); - } - } - - static Optional getEphemeral(final DSLContext dslContext, final Configs configs) { - switch (configs.getSecretPersistenceType()) { - case TESTING_CONFIG_DB_TABLE -> { - final Database configDatabase = new Database(dslContext); - return Optional.of(new LocalTestingSecretPersistence(configDatabase)); - } - case GOOGLE_SECRET_MANAGER -> { - return Optional.of(GoogleSecretManagerPersistence.getEphemeral(configs.getSecretStoreGcpProjectId(), configs.getSecretStoreGcpCredentials())); - } - case VAULT -> { - return Optional.of(new VaultSecretPersistence(configs.getVaultAddress(), configs.getVaultPrefix(), configs.getVaultToken())); - } - case AWS_SECRET_MANAGER -> { - return Optional.of(new AWSSecretManagerPersistence(configs.getAwsAccessKey(), configs.getAwsSecretAccessKey())); - } - default -> { - return Optional.empty(); - } - } - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java deleted file mode 100644 index 829803dad23cf..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java +++ /dev/null @@ -1,395 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.databind.node.TextNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.constants.AirbyteSecretConstants; -import io.airbyte.commons.json.JsonPaths; -import io.airbyte.commons.json.JsonSchemas; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.MoreIterators; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Supplier; -import javax.annotation.Nullable; - -/** - * Contains most of the core logic surrounding secret coordinate extraction and insertion. - * - * These are the three main helpers provided by this class: - * {@link SecretsHelpers#splitConfig(UUID, JsonNode, JsonNode)} - * {@link SecretsHelpers#splitAndUpdateConfig(UUID, JsonNode, JsonNode, JsonNode, ReadOnlySecretPersistence)} - * {@link SecretsHelpers#combineConfig(JsonNode, ReadOnlySecretPersistence)} - * - * Here's an overview on some terminology used in this class: - * - * A "full config" represents an entire config as specified by an end user. - * - * A "partial config" represents a config where any string that was specified as an airbyte_secret - * in the specification is replaced by a JSON object {"_secret": "secret coordinate"} that can later - * be used to reconstruct the "full config". - * - * A {@link SecretPersistence} provides the ability to read and write secrets to a backing store - * such as Google Secrets Manager. - * - * A {@link SecretCoordinate} is a reference to a specific secret at a specific version in a - * {@link SecretPersistence}. - */ -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -public class SecretsHelpers { - - public static final String COORDINATE_FIELD = "_secret"; - - /** - * Used to separate secrets out of some configuration. This will output a partial config that - * includes pointers to secrets instead of actual secret values and a map that can be used to update - * a {@link SecretPersistence} at coordinates with values from the full config. - * - * @param workspaceId workspace used for this config - * @param fullConfig config including secrets - * @param spec specification for the config - * @return a partial config + a map of coordinates to secret payloads - */ - public static SplitSecretConfig splitConfig(final UUID workspaceId, - final JsonNode fullConfig, - final JsonNode spec) { - return internalSplitAndUpdateConfig( - UUID::randomUUID, - workspaceId, - (coordinate) -> Optional.empty(), - Jsons.emptyObject(), - fullConfig, - spec); - } - - /** - * Used to separate secrets out of a configuration and output a partial config that includes - * pointers to secrets instead of actual secret values and a map that can be used to update a - * {@link SecretPersistence} at coordinates with values from the full config. If a previous config - * for this configuration is provided, this method attempts to use the same base coordinates to - * refer to the same secret and increment the version of the coordinate used to reference a secret. - * - * @param workspaceId workspace used for this config - * @param oldPartialConfig previous partial config for this specific configuration - * @param newFullConfig new config containing secrets that will be used to update the partial config - * @param spec specification that should match both the previous partial config after filling in - * coordinates and the new full config. - * @param secretReader provides a way to determine if a secret is the same or updated at a specific - * location in a config - * @return a partial config + a map of coordinates to secret payloads - */ - public static SplitSecretConfig splitAndUpdateConfig(final UUID workspaceId, - final JsonNode oldPartialConfig, - final JsonNode newFullConfig, - final JsonNode spec, - final ReadOnlySecretPersistence secretReader) { - return internalSplitAndUpdateConfig( - UUID::randomUUID, - workspaceId, - secretReader, - oldPartialConfig, - newFullConfig, - spec); - } - - /** - * Replaces {"_secret": "full_coordinate"} objects in the partial config with the string secret - * payloads loaded from the secret persistence at those coordinates. - * - * @param partialConfig configuration containing secret coordinates (references to secrets) - * @param secretPersistence secret storage mechanism - * @return full config including actual secret values - */ - public static JsonNode combineConfig(final JsonNode partialConfig, final ReadOnlySecretPersistence secretPersistence) { - final var config = partialConfig.deepCopy(); - - // if the entire config is a secret coordinate object - if (config.has(COORDINATE_FIELD)) { - final var coordinateNode = config.get(COORDINATE_FIELD); - final var coordinate = getCoordinateFromTextNode(coordinateNode); - return new TextNode(getOrThrowSecretValue(secretPersistence, coordinate)); - } - - // otherwise iterate through all object fields - config.fields().forEachRemaining(field -> { - final var fieldName = field.getKey(); - final var fieldNode = field.getValue(); - - if (fieldNode instanceof ArrayNode) { - for (int i = 0; i < fieldNode.size(); i++) { - ((ArrayNode) fieldNode).set(i, combineConfig(fieldNode.get(i), secretPersistence)); - } - } else if (fieldNode instanceof ObjectNode) { - ((ObjectNode) config).replace(fieldName, combineConfig(fieldNode, secretPersistence)); - } - }); - - return config; - } - - /** - * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy - * fixture creation. - */ - @VisibleForTesting - public static SplitSecretConfig splitConfig(final Supplier uuidSupplier, - final UUID workspaceId, - final JsonNode fullConfig, - final JsonNode spec) { - return internalSplitAndUpdateConfig(uuidSupplier, workspaceId, (coordinate) -> Optional.empty(), Jsons.emptyObject(), fullConfig, - spec); - } - - /** - * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy - * fixture creation. - */ - @VisibleForTesting - public static SplitSecretConfig splitAndUpdateConfig(final Supplier uuidSupplier, - final UUID workspaceId, - final JsonNode oldPartialConfig, - final JsonNode newFullConfig, - final JsonNode spec, - final ReadOnlySecretPersistence secretReader) { - return internalSplitAndUpdateConfig(uuidSupplier, workspaceId, secretReader, oldPartialConfig, newFullConfig, spec); - } - - /** - * This returns all the unique path to the airbyte secrets based on a schema spec. The path will be - * return in an ascending alphabetical order. - */ - public static List getSortedSecretPaths(final JsonNode spec) { - return JsonSchemas.collectPathsThatMeetCondition( - spec, - node -> MoreIterators.toList(node.fields()) - .stream() - .anyMatch(field -> AirbyteSecretConstants.AIRBYTE_SECRET_FIELD.equals(field.getKey()))) - .stream() - .map(JsonPaths::mapJsonSchemaPathToJsonPath) - .distinct() - .sorted() - .toList(); - } - - private static Optional getExistingCoordinateIfExists(final JsonNode json) { - if (json != null && json.has(COORDINATE_FIELD)) { - return Optional.ofNullable(json.get(COORDINATE_FIELD).asText()); - } else { - return Optional.empty(); - } - } - - private static SecretCoordinate getOrCreateCoordinate(final ReadOnlySecretPersistence secretReader, - final UUID workspaceId, - final Supplier uuidSupplier, - final JsonNode newJson, - final JsonNode persistedJson) { - - final Optional existingCoordinateOptional = getExistingCoordinateIfExists(persistedJson); - return getCoordinate(newJson.asText(), secretReader, workspaceId, uuidSupplier, existingCoordinateOptional.orElse(null)); - } - - /** - * Internal method used to support both "split config" and "split and update config" operations. - * - * For splits that don't have a prior partial config (such as when a connector is created for a - * source or destination for the first time), the secret reader and old partial config can be set to - * empty (see {@link SecretsHelpers#splitConfig(UUID, JsonNode, JsonNode)}). - * - * IMPORTANT: This is used recursively. In the process, the partial config, full config, and spec - * inputs will represent internal json structures, not the entire configs/specs. - * - * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy - * fixture creation - * @param workspaceId workspace that will contain the source or destination this config will be - * stored for - * @param secretReader provides a way to determine if a secret is the same or updated at a specific - * location in a config - * @param persistedPartialConfig previous partial config for this specific configuration - * @param newFullConfig new config containing secrets that will be used to update the partial config - * @param spec config specification - * @return a partial config + a map of coordinates to secret payloads - */ - @VisibleForTesting - static SplitSecretConfig internalSplitAndUpdateConfig(final Supplier uuidSupplier, - final UUID workspaceId, - final ReadOnlySecretPersistence secretReader, - final JsonNode persistedPartialConfig, - final JsonNode newFullConfig, - final JsonNode spec) { - var fullConfigCopy = newFullConfig.deepCopy(); - final var secretMap = new HashMap(); - - final List paths = getSortedSecretPaths(spec); - - for (final String path : paths) { - fullConfigCopy = JsonPaths.replaceAt(fullConfigCopy, path, (json, pathOfNode) -> { - final Optional persistedNode = JsonPaths.getSingleValue(persistedPartialConfig, pathOfNode); - final SecretCoordinate coordinate = getOrCreateCoordinate( - secretReader, - workspaceId, - uuidSupplier, - json, - persistedNode.orElse(null)); - - secretMap.put(coordinate, json.asText()); - - return Jsons.jsonNode(Map.of(COORDINATE_FIELD, coordinate.getFullCoordinate())); - }); - } - - return new SplitSecretConfig(fullConfigCopy, secretMap); - } - - /** - * Extracts a secret value from the persistence and throws an exception if the secret is not - * available. - * - * @param secretPersistence storage layer for secrets - * @param coordinate reference to a secret in the persistence - * @throws RuntimeException when a secret at that coordinate is not available in the persistence - * @return a json string containing the secret value or a JSON - */ - private static String getOrThrowSecretValue(final ReadOnlySecretPersistence secretPersistence, - final SecretCoordinate coordinate) { - final var secretValue = secretPersistence.read(coordinate); - - if (secretValue.isEmpty()) { - throw new RuntimeException(String.format("That secret was not found in the store! Coordinate: %s", coordinate.getFullCoordinate())); - } - return secretValue.get(); - } - - private static SecretCoordinate getCoordinateFromTextNode(final JsonNode node) { - return SecretCoordinate.fromFullCoordinate(node.asText()); - } - - /** - * Determines which coordinate base and version to use based off of an old version that may exist in - * the secret persistence. - * - * If the secret does not exist in the persistence, version 1 will be used. - * - * If the new secret value is the same as the old version stored in the persistence, the returned - * coordinate will be the same as the previous version. - * - * If the new secret value is different from the old version stored in the persistence, the returned - * coordinate will increase the version. - * - * @param newSecret new value of a secret provides a way to determine if a secret is the same or - * updated at a specific location in a config - * @param workspaceId workspace used for this config - * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy - * fixture creation. - * @param oldSecretFullCoordinate a nullable full coordinate (base+version) retrieved from the - * previous config - * @return a coordinate (versioned reference to where the secret is stored in the persistence) - */ - protected static SecretCoordinate getCoordinate( - final String newSecret, - final ReadOnlySecretPersistence secretReader, - final UUID workspaceId, - final Supplier uuidSupplier, - final @Nullable String oldSecretFullCoordinate) { - return getSecretCoordinate("airbyte_workspace_", newSecret, secretReader, workspaceId, uuidSupplier, oldSecretFullCoordinate); - } - - public static String getCoordinatorBase(final String secretBasePrefix, final UUID secretBaseId, final Supplier uuidSupplier) { - return secretBasePrefix + secretBaseId + "_secret_" + uuidSupplier.get(); - } - - private static SecretCoordinate getSecretCoordinate(final String secretBasePrefix, - final String newSecret, - final ReadOnlySecretPersistence secretReader, - final UUID secretBaseId, - final Supplier uuidSupplier, - final @Nullable String oldSecretFullCoordinate) { - String coordinateBase = null; - Long version = null; - - if (oldSecretFullCoordinate != null) { - final var oldCoordinate = SecretCoordinate.fromFullCoordinate(oldSecretFullCoordinate); - coordinateBase = oldCoordinate.getCoordinateBase(); - final var oldSecretValue = secretReader.read(oldCoordinate); - if (oldSecretValue.isPresent()) { - if (oldSecretValue.get().equals(newSecret)) { - version = oldCoordinate.getVersion(); - } else { - version = oldCoordinate.getVersion() + 1; - } - } - } - - if (coordinateBase == null) { - // IMPORTANT: format of this cannot be changed without introducing migrations for secrets - // persistences - coordinateBase = getCoordinatorBase(secretBasePrefix, secretBaseId, uuidSupplier); - } - - if (version == null) { - version = 1L; - } - - return new SecretCoordinate(coordinateBase, version); - } - - /** - * This method takes in the key (JSON key or HMAC key) of a workspace service account as a secret - * and generates a co-ordinate for the secret so that the secret can be written in secret - * persistence at the generated co-ordinate - * - * @param newSecret The JSON key or HMAC key value - * @param secretReader To read the value from secret persistence for comparison with the new value - * @param workspaceId of the service account - * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy * - * fixture creation. - * @param oldSecretCoordinate a nullable full coordinate (base+version) retrieved from the * - * previous config - * @param keyType HMAC ot JSON key - * @return a coordinate (versioned reference to where the secret is stored in the persistence) - */ - public static SecretCoordinateToPayload convertServiceAccountCredsToSecret(final String newSecret, - final ReadOnlySecretPersistence secretReader, - final UUID workspaceId, - final Supplier uuidSupplier, - final @Nullable JsonNode oldSecretCoordinate, - final String keyType) { - final String oldSecretFullCoordinate = - (oldSecretCoordinate != null && oldSecretCoordinate.has(COORDINATE_FIELD)) ? oldSecretCoordinate.get(COORDINATE_FIELD).asText() - : null; - final SecretCoordinate coordinateForStagingConfig = getSecretCoordinate("service_account_" + keyType + "_", - newSecret, - secretReader, - workspaceId, - uuidSupplier, - oldSecretFullCoordinate); - return new SecretCoordinateToPayload(coordinateForStagingConfig, - newSecret, - Jsons.jsonNode(Map.of(COORDINATE_FIELD, - coordinateForStagingConfig.getFullCoordinate()))); - } - - /** - * Takes in the secret coordinate in form of a JSON and fetches the secret from the store - * - * @param secretCoordinateAsJson The co-ordinate at which we expect the secret value to be present - * in the secret persistence - * @param readOnlySecretPersistence The secret persistence - * @return Original secret value as JsonNode - */ - public static JsonNode hydrateSecretCoordinate(final JsonNode secretCoordinateAsJson, - final ReadOnlySecretPersistence readOnlySecretPersistence) { - final var secretCoordinate = getCoordinateFromTextNode(secretCoordinateAsJson.get(COORDINATE_FIELD)); - return Jsons.deserialize(getOrThrowSecretValue(readOnlySecretPersistence, secretCoordinate)); - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHydrator.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHydrator.java deleted file mode 100644 index 1cf943c5de41a..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHydrator.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; - -/** - * Adds secrets to a partial config. - */ -public interface SecretsHydrator { - - /** - * Adds secrets to a partial config. - * - * @param partialConfig partial config (without secrets) - * @return full config with secrets - */ - JsonNode hydrate(JsonNode partialConfig); - - /** - * Takes in the secret coordinate in form of a JSON and fetches the secret from the store - * - * @param secretCoordinate The co-ordinate of the secret in the store in JSON format - * @return original secret value - */ - JsonNode hydrateSecretCoordinate(final JsonNode secretCoordinate); - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SplitSecretConfig.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SplitSecretConfig.java deleted file mode 100644 index 84c01c0d08663..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SplitSecretConfig.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Map; - -/** - * Data class that provides a way to store the output of a {@link SecretsHelpers} "split" operation - * which takes a "full config" (including secrets) and creates a "partial config" (secrets removed - * and has coordinate pointers to a persistence layer). - * - * The split methods don't actually update the persistence layer itself. The coordinate to secret - * payload map in this class allows the system calling "split" to update the persistence with those - * new coordinate values. - */ -public class SplitSecretConfig { - - private final JsonNode partialConfig; - private final Map coordinateToPayload; - - public SplitSecretConfig(final JsonNode partialConfig, final Map coordinateToPayload) { - this.partialConfig = partialConfig; - this.coordinateToPayload = coordinateToPayload; - } - - public JsonNode getPartialConfig() { - return partialConfig; - } - - public Map getCoordinateToPayload() { - return coordinateToPayload; - } - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistence.java deleted file mode 100644 index 7609a336d96f5..0000000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistence.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.bettercloud.vault.Vault; -import com.bettercloud.vault.VaultConfig; -import com.bettercloud.vault.VaultException; -import io.airbyte.commons.lang.Exceptions; -import java.util.HashMap; -import java.util.Optional; -import lombok.extern.slf4j.Slf4j; -import lombok.val; - -@Slf4j -final public class VaultSecretPersistence implements SecretPersistence { - - private final String SECRET_KEY = "value"; - private final Vault vault; - private final String pathPrefix; - - public VaultSecretPersistence(final String address, final String prefix, final String token) { - this.vault = Exceptions.toRuntime(() -> getVaultClient(address, token)); - this.pathPrefix = prefix; - } - - @Override - public Optional read(final SecretCoordinate coordinate) { - try { - val response = vault.logical().read(pathPrefix + coordinate.getFullCoordinate()); - val restResponse = response.getRestResponse(); - val responseCode = restResponse.getStatus(); - final Boolean isErrorResponse = responseCode / 100 != 2; - - if (isErrorResponse) { - log.error("Vault failed on read. Response code: " + responseCode); - return Optional.empty(); - } - val data = response.getData(); - return Optional.of(data.get(SECRET_KEY)); - } catch (final VaultException e) { - return Optional.empty(); - } - } - - @Override - public void write(final SecretCoordinate coordinate, final String payload) { - try { - val newSecret = new HashMap(); - newSecret.put(SECRET_KEY, payload); - vault.logical().write(pathPrefix + coordinate.getFullCoordinate(), newSecret); - } catch (final VaultException e) { - log.error("Vault failed on write", e); - } - } - - private static Vault getVaultClient(final String address, final String token) throws VaultException { - val config = new VaultConfig() - .address(address) - .token(token) - .engineVersion(2) - .build(); - return new Vault(config); - } - -} diff --git a/airbyte-config/config-persistence/src/test-integration/java/io/airbyte/config/persistence/GoogleSecretManagerPersistenceIntegrationTest.java b/airbyte-config/config-persistence/src/test-integration/java/io/airbyte/config/persistence/GoogleSecretManagerPersistenceIntegrationTest.java deleted file mode 100644 index 4602452403035..0000000000000 --- a/airbyte-config/config-persistence/src/test-integration/java/io/airbyte/config/persistence/GoogleSecretManagerPersistenceIntegrationTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.google.api.gax.rpc.NotFoundException; -import com.google.cloud.secretmanager.v1.SecretName; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.persistence.split_secrets.GoogleSecretManagerPersistence; -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import java.io.IOException; -import org.apache.commons.lang3.RandomUtils; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -/** - * Triggered as part of integration tests in CI. It uses credentials in Github to connect to the - * integration testing GCP project. - */ -@SuppressWarnings("PMD.EmptyCatchBlock") -class GoogleSecretManagerPersistenceIntegrationTest { - - private GoogleSecretManagerPersistence persistence; - private String baseCoordinate; - private final Configs configs = new EnvConfigs(); - - @BeforeEach - void setUp() { - persistence = GoogleSecretManagerPersistence.getEphemeral( - configs.getSecretStoreGcpProjectId(), - configs.getSecretStoreGcpCredentials()); - baseCoordinate = "GoogleSecretManagerPersistenceIntegrationTest_coordinate_" + RandomUtils.nextInt() % 20000; - } - - @AfterEach - void tearDown() throws IOException { - try (final var client = GoogleSecretManagerPersistence.getSecretManagerServiceClient(configs.getSecretStoreGcpCredentials())) { - // try to delete this so we aren't charged for the secret - // if this is missed due to some sort of failure the secret will be deleted after the ttl - try { - client.deleteSecret(SecretName.of( - configs.getSecretStoreGcpProjectId(), - baseCoordinate)); - } catch (final NotFoundException nfe) { - // do nothing - } - } - } - - @Test - void testReadWriteUpdate() { - final var coordinate1 = new SecretCoordinate(baseCoordinate, 1); - - // try reading non-existent value - final var firstRead = persistence.read(coordinate1); - assertTrue(firstRead.isEmpty()); - - // write - final var firstPayload = "abc"; - persistence.write(coordinate1, firstPayload); - final var secondRead = persistence.read(coordinate1); - assertTrue(secondRead.isPresent()); - assertEquals(firstPayload, secondRead.get()); - - // update - final var secondPayload = "def"; - final var coordinate2 = new SecretCoordinate(baseCoordinate, 2); - persistence.write(coordinate2, secondPayload); - final var thirdRead = persistence.read(coordinate2); - assertTrue(thirdRead.isPresent()); - assertEquals(secondPayload, thirdRead.get()); - } - -} diff --git a/airbyte-config/config-persistence/src/test-integration/java/io/airbyte/config/persistence/split_secrets/AWSSecretManagerPersistenceIntegrationTest.java b/airbyte-config/config-persistence/src/test-integration/java/io/airbyte/config/persistence/split_secrets/AWSSecretManagerPersistenceIntegrationTest.java deleted file mode 100644 index 2667aa1f33280..0000000000000 --- a/airbyte-config/config-persistence/src/test-integration/java/io/airbyte/config/persistence/split_secrets/AWSSecretManagerPersistenceIntegrationTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import java.util.Optional; -import org.apache.commons.lang3.RandomUtils; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class AWSSecretManagerPersistenceIntegrationTest { - - public String coordinate_base; - private AWSSecretManagerPersistence persistence; - private final Configs configs = new EnvConfigs(); - - @BeforeEach - void setup() { - persistence = new AWSSecretManagerPersistence(configs.getAwsAccessKey(), configs.getAwsSecretAccessKey()); - coordinate_base = "aws/airbyte/secret/integration/" + RandomUtils.nextInt() % 20000; - } - - @Test - void testReadWriteUpdate() throws InterruptedException { - SecretCoordinate secretCoordinate = new SecretCoordinate(coordinate_base, 1); - - // try reading a non-existent secret - Optional firstRead = persistence.read(secretCoordinate); - assertTrue(firstRead.isEmpty()); - - // write it - String payload = "foo-secret"; - persistence.write(secretCoordinate, payload); - persistence.cache.refreshNow(secretCoordinate.getCoordinateBase()); - Optional read2 = persistence.read(secretCoordinate); - assertTrue(read2.isPresent()); - assertEquals(payload, read2.get()); - - // update it - final var secondPayload = "bar-secret"; - final var coordinate2 = new SecretCoordinate(coordinate_base, 2); - persistence.write(coordinate2, secondPayload); - persistence.cache.refreshNow(secretCoordinate.getCoordinateBase()); - final var thirdRead = persistence.read(coordinate2); - assertTrue(thirdRead.isPresent()); - assertEquals(secondPayload, thirdRead.get()); - } - - @AfterEach - void tearDown() { - persistence.deleteSecret(new SecretCoordinate(coordinate_base, 1)); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionMigratorTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionMigratorTest.java deleted file mode 100644 index 3ca455ea40a11..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionMigratorTest.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.config.ConfigSchema.STANDARD_DESTINATION_DEFINITION; -import static io.airbyte.config.ConfigSchema.STANDARD_SOURCE_DEFINITION; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.ReleaseStage; -import io.airbyte.config.StandardSourceDefinition.SourceType; -import io.airbyte.config.persistence.ActorDefinitionMigrator.ConnectorInfo; -import io.airbyte.db.ExceptionWrappingDatabase; -import java.sql.SQLException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class ActorDefinitionMigratorTest extends BaseConfigDatabaseTest { - - public static final String DEFAULT_PROTOCOL_VERSION = "0.2.0"; - protected static final StandardSourceDefinition SOURCE_POSTGRES = new StandardSourceDefinition() - .withName("Postgres") - .withSourceDefinitionId(UUID.fromString("decd338e-5647-4c0b-adf4-da0e75f5a750")) - .withDockerRepository("airbyte/source-postgres") - .withDockerImageTag("0.3.11") - .withDocumentationUrl("https://docs.airbyte.io/integrations/sources/postgres") - .withIcon("postgresql.svg") - .withSourceType(SourceType.DATABASE) - .withTombstone(false); - protected static final StandardSourceDefinition SOURCE_CUSTOM = new StandardSourceDefinition() - .withName("Custom") - .withSourceDefinitionId(UUID.fromString("baba338e-5647-4c0b-adf4-da0e75f5a750")) - .withDockerRepository("airbyte/custom") - .withDockerImageTag("0.3.11") - .withDocumentationUrl("https://docs.airbyte.io/integrations/sources/postgres") - .withIcon("postgresql.svg") - .withSourceType(SourceType.DATABASE) - .withCustom(true) - .withReleaseStage(ReleaseStage.CUSTOM) - .withTombstone(false); - protected static final StandardDestinationDefinition DESTINATION_S3 = new StandardDestinationDefinition() - .withName("S3") - .withDestinationDefinitionId(UUID.fromString("4816b78f-1489-44c1-9060-4b19d5fa9362")) - .withDockerRepository("airbyte/destination-s3") - .withDockerImageTag("0.1.12") - .withDocumentationUrl("https://docs.airbyte.io/integrations/destinations/s3") - .withProtocolVersion(DEFAULT_PROTOCOL_VERSION) - .withTombstone(false); - protected static final StandardDestinationDefinition DESTINATION_CUSTOM = new StandardDestinationDefinition() - .withName("Custom") - .withDestinationDefinitionId(UUID.fromString("baba338e-5647-4c0b-adf4-da0e75f5a750")) - .withDockerRepository("airbyte/custom") - .withDockerImageTag("0.3.11") - .withDocumentationUrl("https://docs.airbyte.io/integrations/sources/postgres") - .withIcon("postgresql.svg") - .withCustom(true) - .withReleaseStage(StandardDestinationDefinition.ReleaseStage.CUSTOM) - .withTombstone(false); - - private ActorDefinitionMigrator migrator; - private ConfigRepository configRepository; - - @BeforeEach - void setup() throws SQLException { - truncateAllTables(); - - migrator = new ActorDefinitionMigrator(new ExceptionWrappingDatabase(database)); - configRepository = new ConfigRepository(database, migrator, null); - } - - private void writeSource(final StandardSourceDefinition source) throws Exception { - configRepository.writeStandardSourceDefinition(source); - } - - @Test - void testGetConnectorRepositoryToInfoMap() throws Exception { - final String connectorRepository = "airbyte/duplicated-connector"; - final String oldVersion = "0.1.10"; - final String newVersion = DEFAULT_PROTOCOL_VERSION; - final StandardSourceDefinition source1 = new StandardSourceDefinition() - .withSourceDefinitionId(UUID.randomUUID()) - .withName("source-1") - .withDockerRepository(connectorRepository) - .withDockerImageTag(oldVersion); - final StandardSourceDefinition source2 = new StandardSourceDefinition() - .withSourceDefinitionId(UUID.randomUUID()) - .withName("source-2") - .withDockerRepository(connectorRepository) - .withDockerImageTag(newVersion); - writeSource(source1); - writeSource(source2); - final Map result = database.query(ctx -> migrator.getConnectorRepositoryToInfoMap(ctx)); - // when there are duplicated connector definitions, the one with the latest version should be - // retrieved - assertEquals(newVersion, result.get(connectorRepository).dockerImageTag); - } - - @Test - void testHasNewVersion() { - assertTrue(ActorDefinitionMigrator.hasNewVersion("0.1.99", DEFAULT_PROTOCOL_VERSION)); - assertFalse(ActorDefinitionMigrator.hasNewVersion("invalid_version", "0.1.2")); - } - - @Test - void testHasNewPatchVersion() { - assertFalse(ActorDefinitionMigrator.hasNewPatchVersion("0.1.99", DEFAULT_PROTOCOL_VERSION)); - assertFalse(ActorDefinitionMigrator.hasNewPatchVersion("invalid_version", "0.3.1")); - assertTrue(ActorDefinitionMigrator.hasNewPatchVersion("0.1.0", "0.1.3")); - } - - @Test - void testGetNewFields() { - final JsonNode o1 = Jsons.deserialize("{ \"field1\": 1, \"field2\": 2 }"); - final JsonNode o2 = Jsons.deserialize("{ \"field1\": 1, \"field3\": 3 }"); - assertEquals(Collections.emptySet(), ActorDefinitionMigrator.getNewFields(o1, o1)); - assertEquals(Collections.singleton("field3"), ActorDefinitionMigrator.getNewFields(o1, o2)); - assertEquals(Collections.singleton("field2"), ActorDefinitionMigrator.getNewFields(o2, o1)); - } - - @Test - void testGetDefinitionWithNewFields() { - final JsonNode current = Jsons.deserialize("{ \"field1\": 1, \"field2\": 2 }"); - final JsonNode latest = Jsons.deserialize("{ \"field1\": 1, \"field3\": 3, \"field4\": 4 }"); - final Set newFields = Set.of("field3"); - - assertEquals(current, ActorDefinitionMigrator.getDefinitionWithNewFields(current, latest, Collections.emptySet())); - - final JsonNode currentWithNewFields = Jsons.deserialize("{ \"field1\": 1, \"field2\": 2, \"field3\": 3 }"); - assertEquals(currentWithNewFields, ActorDefinitionMigrator.getDefinitionWithNewFields(current, latest, newFields)); - } - - @Test - void testActorDefinitionReleaseDate() throws Exception { - final UUID definitionId = UUID.randomUUID(); - final String connectorRepository = "airbyte/test-connector"; - - // when the record does not exist, it is inserted - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withSourceDefinitionId(definitionId) - .withDockerRepository(connectorRepository) - .withDockerImageTag("0.1.2") - .withName("random-name") - .withProtocolVersion(DEFAULT_PROTOCOL_VERSION) - .withTombstone(false); - writeSource(sourceDef); - assertEquals(sourceDef, configRepository.getStandardSourceDefinition(sourceDef.getSourceDefinitionId())); - } - - @Test - void filterCustomSource() { - final Map sourceMap = new HashMap<>(); - final String nonCustomKey = "non-custom"; - final String customKey = "custom"; - sourceMap.put(nonCustomKey, new ConnectorInfo("id", Jsons.jsonNode(SOURCE_POSTGRES))); - sourceMap.put(customKey, new ConnectorInfo("id", Jsons.jsonNode(SOURCE_CUSTOM))); - - final Map filteredSrcMap = migrator.filterCustomConnector(sourceMap, STANDARD_SOURCE_DEFINITION); - - assertThat(filteredSrcMap).containsOnlyKeys(nonCustomKey); - } - - @Test - void filterCustomDestination() { - final Map sourceMap = new HashMap<>(); - final String nonCustomKey = "non-custom"; - final String customKey = "custom"; - sourceMap.put(nonCustomKey, new ConnectorInfo("id", Jsons.jsonNode(DESTINATION_S3))); - sourceMap.put(customKey, new ConnectorInfo("id", Jsons.jsonNode(DESTINATION_CUSTOM))); - - final Map filteredDestMap = migrator.filterCustomConnector(sourceMap, STANDARD_DESTINATION_DEFINITION); - - assertThat(filteredDestMap).containsOnlyKeys(nonCustomKey); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java deleted file mode 100644 index 2392cbf549347..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java +++ /dev/null @@ -1,300 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; - -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.Geography; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -class ActorDefinitionPersistenceTest extends BaseConfigDatabaseTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - - private ConfigRepository configRepository; - - @BeforeEach - void setup() throws SQLException { - truncateAllTables(); - - configRepository = spy(new ConfigRepository( - database, - new ActorDefinitionMigrator(new ExceptionWrappingDatabase(database)), - mock(StandardSyncPersistence.class))); - } - - @Test - void testSourceDefinitionWithNullTombstone() throws JsonValidationException, ConfigNotFoundException, IOException { - assertReturnsSrcDef(createBaseSourceDef()); - } - - @Test - void testSourceDefinitionWithTrueTombstone() throws JsonValidationException, ConfigNotFoundException, IOException { - assertReturnsSrcDef(createBaseSourceDef().withTombstone(true)); - } - - @Test - void testSourceDefinitionWithFalseTombstone() throws JsonValidationException, ConfigNotFoundException, IOException { - assertReturnsSrcDef(createBaseSourceDef().withTombstone(false)); - } - - private void assertReturnsSrcDef(final StandardSourceDefinition srcDef) throws ConfigNotFoundException, IOException, JsonValidationException { - configRepository.writeStandardSourceDefinition(srcDef); - assertEquals(srcDef, configRepository.getStandardSourceDefinition(srcDef.getSourceDefinitionId())); - } - - @Test - void testSourceDefinitionFromSource() throws JsonValidationException, IOException { - final StandardWorkspace workspace = createBaseStandardWorkspace(); - final StandardSourceDefinition srcDef = createBaseSourceDef().withTombstone(false); - final SourceConnection source = createSource(srcDef.getSourceDefinitionId(), workspace.getWorkspaceId()); - configRepository.writeStandardWorkspaceNoSecrets(workspace); - configRepository.writeStandardSourceDefinition(srcDef); - configRepository.writeSourceConnectionNoSecrets(source); - - assertEquals(srcDef, configRepository.getSourceDefinitionFromSource(source.getSourceId())); - } - - @Test - void testSourceDefinitionsFromConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = createBaseStandardWorkspace(); - final StandardSourceDefinition srcDef = createBaseSourceDef().withTombstone(false); - final SourceConnection source = createSource(srcDef.getSourceDefinitionId(), workspace.getWorkspaceId()); - configRepository.writeStandardWorkspaceNoSecrets(workspace); - configRepository.writeStandardSourceDefinition(srcDef); - configRepository.writeSourceConnectionNoSecrets(source); - - final UUID connectionId = UUID.randomUUID(); - final StandardSync connection = new StandardSync() - .withSourceId(source.getSourceId()) - .withConnectionId(connectionId); - - // todo (cgardens) - remove this mock and replace with record in db - doReturn(connection) - .when(configRepository) - .getStandardSync(connectionId); - - assertEquals(srcDef, configRepository.getSourceDefinitionFromConnection(connectionId)); - } - - @ParameterizedTest - @ValueSource(ints = {0, 1, 2, 10}) - void testListStandardSourceDefsHandlesTombstoneSourceDefs(final int numSrcDefs) throws JsonValidationException, IOException { - final List allSourceDefinitions = new ArrayList<>(); - final List notTombstoneSourceDefinitions = new ArrayList<>(); - for (int i = 0; i < numSrcDefs; i++) { - final boolean isTombstone = i % 2 == 0; // every other is tombstone - final StandardSourceDefinition sourceDefinition = createBaseSourceDef().withTombstone(isTombstone); - allSourceDefinitions.add(sourceDefinition); - if (!isTombstone) { - notTombstoneSourceDefinitions.add(sourceDefinition); - } - configRepository.writeStandardSourceDefinition(sourceDefinition); - } - - final List returnedSrcDefsWithoutTombstone = configRepository.listStandardSourceDefinitions(false); - assertEquals(notTombstoneSourceDefinitions, returnedSrcDefsWithoutTombstone); - - final List returnedSrcDefsWithTombstone = configRepository.listStandardSourceDefinitions(true); - assertEquals(allSourceDefinitions, returnedSrcDefsWithTombstone); - } - - // todo add test for protocol version behavior - @Test - void testListDestinationDefinitionsWithVersion() throws JsonValidationException, IOException { - final List allDestDefs = List.of( - createBaseDestDef().withProtocolVersion(null), - createBaseDestDef().withProtocolVersion(null).withSpec(new ConnectorSpecification().withProtocolVersion("0.3.1")), - createBaseDestDef().withProtocolVersion("0.4.0").withSpec(new ConnectorSpecification().withProtocolVersion("0.4.1")), - createBaseDestDef().withProtocolVersion("0.5.0").withSpec(new ConnectorSpecification())); - - for (final StandardDestinationDefinition destDef : allDestDefs) { - configRepository.writeStandardDestinationDefinition(destDef); - } - - final List destinationDefinitions = configRepository.listStandardDestinationDefinitions(false); - final List protocolVersions = destinationDefinitions.stream().map(StandardDestinationDefinition::getProtocolVersion).toList(); - assertEquals( - List.of( - AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION.serialize(), - AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION.serialize(), - "0.4.0", - "0.5.0"), - protocolVersions); - } - - @Test - void testListSourceDefinitionsWithVersion() throws JsonValidationException, IOException { - final List allSrcDefs = List.of( - createBaseSourceDef().withProtocolVersion(null), - createBaseSourceDef().withProtocolVersion(null).withSpec(new ConnectorSpecification().withProtocolVersion("0.6.0")), - createBaseSourceDef().withProtocolVersion("0.7.0").withSpec(new ConnectorSpecification().withProtocolVersion("0.7.1")), - createBaseSourceDef().withProtocolVersion("0.8.0").withSpec(new ConnectorSpecification())); - - for (final StandardSourceDefinition srcDef : allSrcDefs) { - configRepository.writeStandardSourceDefinition(srcDef); - } - - final List sourceDefinitions = configRepository.listStandardSourceDefinitions(false); - final List protocolVersions = sourceDefinitions.stream().map(StandardSourceDefinition::getProtocolVersion).toList(); - assertEquals( - List.of( - AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION.serialize(), - AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION.serialize(), - "0.7.0", - "0.8.0"), - protocolVersions); - } - - @Test - void testDestinationDefinitionWithNullTombstone() throws JsonValidationException, ConfigNotFoundException, IOException { - assertReturnsDestDef(createBaseDestDef()); - } - - @Test - void testDestinationDefinitionWithTrueTombstone() throws JsonValidationException, ConfigNotFoundException, IOException { - assertReturnsDestDef(createBaseDestDef().withTombstone(true)); - } - - @Test - void testDestinationDefinitionWithFalseTombstone() throws JsonValidationException, ConfigNotFoundException, IOException { - assertReturnsDestDef(createBaseDestDef().withTombstone(false)); - } - - void assertReturnsDestDef(final StandardDestinationDefinition destDef) throws ConfigNotFoundException, IOException, JsonValidationException { - configRepository.writeStandardDestinationDefinition(destDef); - assertEquals(destDef, configRepository.getStandardDestinationDefinition(destDef.getDestinationDefinitionId())); - } - - @Test - void testDestinationDefinitionFromDestination() throws JsonValidationException, IOException { - final StandardWorkspace workspace = createBaseStandardWorkspace(); - final StandardDestinationDefinition destDef = createBaseDestDef().withTombstone(false); - final DestinationConnection dest = createDest(destDef.getDestinationDefinitionId(), workspace.getWorkspaceId()); - configRepository.writeStandardWorkspaceNoSecrets(workspace); - configRepository.writeStandardDestinationDefinition(destDef); - configRepository.writeDestinationConnectionNoSecrets(dest); - - assertEquals(destDef, configRepository.getDestinationDefinitionFromDestination(dest.getDestinationId())); - } - - @Test - void testDestinationDefinitionsFromConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = createBaseStandardWorkspace(); - final StandardDestinationDefinition destDef = createBaseDestDef().withTombstone(false); - final DestinationConnection dest = createDest(destDef.getDestinationDefinitionId(), workspace.getWorkspaceId()); - configRepository.writeStandardWorkspaceNoSecrets(workspace); - configRepository.writeStandardDestinationDefinition(destDef); - configRepository.writeDestinationConnectionNoSecrets(dest); - - final UUID connectionId = UUID.randomUUID(); - final StandardSync connection = new StandardSync() - .withDestinationId(dest.getDestinationId()) - .withConnectionId(connectionId); - - // todo (cgardens) - remove this mock and replace with record in db - doReturn(connection) - .when(configRepository) - .getStandardSync(connectionId); - - assertEquals(destDef, configRepository.getDestinationDefinitionFromConnection(connectionId)); - } - - @ParameterizedTest - @ValueSource(ints = {0, 1, 2, 10}) - void testListStandardDestDefsHandlesTombstoneDestDefs(final int numDestinationDefinitions) throws JsonValidationException, IOException { - final List allDestinationDefinitions = new ArrayList<>(); - final List notTombstoneDestinationDefinitions = new ArrayList<>(); - for (int i = 0; i < numDestinationDefinitions; i++) { - final boolean isTombstone = i % 2 == 0; // every other is tombstone - final StandardDestinationDefinition destinationDefinition = createBaseDestDef().withTombstone(isTombstone); - allDestinationDefinitions.add(destinationDefinition); - if (!isTombstone) { - notTombstoneDestinationDefinitions.add(destinationDefinition); - } - configRepository.writeStandardDestinationDefinition(destinationDefinition); - } - - final List returnedDestDefsWithoutTombstone = configRepository.listStandardDestinationDefinitions(false); - assertEquals(notTombstoneDestinationDefinitions, returnedDestDefsWithoutTombstone); - - final List returnedDestDefsWithTombstone = configRepository.listStandardDestinationDefinitions(true); - assertEquals(allDestinationDefinitions, returnedDestDefsWithTombstone); - } - - @SuppressWarnings("SameParameterValue") - private static SourceConnection createSource(final UUID sourceDefId, final UUID workspaceId) { - return new SourceConnection() - .withSourceId(UUID.randomUUID()) - .withSourceDefinitionId(sourceDefId) - .withWorkspaceId(workspaceId) - .withName("source"); - } - - @SuppressWarnings("SameParameterValue") - private static DestinationConnection createDest(final UUID destDefId, final UUID workspaceId) { - return new DestinationConnection() - .withDestinationId(UUID.randomUUID()) - .withDestinationDefinitionId(destDefId) - .withWorkspaceId(workspaceId) - .withName("dest"); - } - - private static StandardSourceDefinition createBaseSourceDef() { - final UUID id = UUID.randomUUID(); - - return new StandardSourceDefinition() - .withName("source-def-" + id) - .withDockerRepository("source-image-" + id) - .withDockerImageTag("0.0.1") - .withSourceDefinitionId(id) - .withProtocolVersion("0.2.0") - .withTombstone(false); - } - - private static StandardDestinationDefinition createBaseDestDef() { - final UUID id = UUID.randomUUID(); - - return new StandardDestinationDefinition() - .withName("source-def-" + id) - .withDockerRepository("source-image-" + id) - .withDockerImageTag("0.0.1") - .withDestinationDefinitionId(id) - .withProtocolVersion("0.2.0") - .withTombstone(false); - } - - private static StandardWorkspace createBaseStandardWorkspace() { - return new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID) - .withName("workspace-a") - .withSlug("workspace-a-slug") - .withInitialSetupComplete(false) - .withTombstone(false) - .withDefaultGeography(Geography.AUTO); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/BaseConfigDatabaseTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/BaseConfigDatabaseTest.java deleted file mode 100644 index 09dc11bba8a65..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/BaseConfigDatabaseTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DataSourceFactory; -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; -import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; -import io.airbyte.db.instance.test.TestDatabaseProviders; -import io.airbyte.test.utils.DatabaseConnectionHelper; -import java.io.IOException; -import java.sql.SQLException; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.testcontainers.containers.PostgreSQLContainer; - -/** - * This class exists to abstract away the lifecycle of the test container database and the config - * database schema. This is ALL it intends to do. Any additional functionality belongs somewhere - * else. It is useful for test suites that need to interact directly with the database. - * - * This class sets up a test container database and runs the config database migrations against it - * to provide the most up-to-date schema. - * - * What this class is NOT designed to do: - *

      - *
    • test migration behavior, only should be used to test query behavior against the current - * schema.
    • - *
    • expose database details -- if you are attempting to expose container, dataSource, dslContext, - * something is wrong.
    • - *
    • add test fixtures or helpers--do NOT put "generic" resource helper methods (e.g. - * createTestSource())
    • - *
    - * - * This comment is emphatically worded, because it is tempting to add things to this class. It has - * already happened in 3 previous iterations, and each time it takes multiple engineering days to - * fix it. - * - * Usage: - *
      - *
    • Extend: Extend this class. By doing so, it will automatically create the test container db - * and run migrations against it at the start of the test suite (@BeforeAll).
    • - *
    • Use database: As part of the @BeforeAll the database field is set. This is the only field - * that the extending class can access. It's lifecycle is fully managed by this class.
    • - *
    • Reset schema: To reset the database in between tests, call truncateAllTables() as part - * of @BeforeEach. This is the only method that this class exposes externally. It is exposed in such - * a way, because most test suites need to declare their own @BeforeEach, so it is easier for them - * to simply call this method there, then trying to apply a more complex inheritance scheme.
    • - *
    - * - * Note: truncateAllTables() works by truncating each table in the db, if you add a new table, you - * will need to add it to that method for it work as expected. - */ -@SuppressWarnings({"PMD.MutableStaticState", "PMD.SignatureDeclareThrowsException"}) -class BaseConfigDatabaseTest { - - static Database database; - - // keep these private, do not expose outside this class! - private static PostgreSQLContainer container; - private static DataSource dataSource; - private static DSLContext dslContext; - - /** - * Create db test container, sets up java database resources, and runs migrations. Should not be - * called externally. It is not private because junit cannot access private methods. - * - * @throws DatabaseInitializationException - db fails to initialize - * @throws IOException - failure when interacting with db. - */ - @BeforeAll - static void dbSetup() throws DatabaseInitializationException, IOException { - createDbContainer(); - setDb(); - migrateDb(); - } - - /** - * Close all resources (container, data source, dsl context, database). Should not be called - * externally. It is not private because junit cannot access private methods. - * - * @throws Exception - exception while closing resources - */ - @AfterAll - static void dbDown() throws Exception { - dslContext.close(); - DataSourceFactory.close(dataSource); - container.close(); - } - - /** - * Truncates tables to reset them. Designed to be used in between tests. - * - * Note: NEW TABLES -- When a new table is added to the db, it will need to be added here. - * - * @throws SQLException - failure in truncate query. - */ - static void truncateAllTables() throws SQLException { - database.query(ctx -> ctx - .execute( - """ - TRUNCATE TABLE - actor, - actor_catalog, - actor_catalog_fetch_event, - actor_definition, - actor_definition_workspace_grant, - actor_oauth_parameter, - connection, - connection_operation, - operation, - state, - stream_reset, - workspace, - workspace_service_account - """)); - } - - private static void createDbContainer() { - container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName("airbyte") - .withUsername("docker") - .withPassword("docker"); - container.start(); - } - - private static void setDb() throws DatabaseInitializationException, IOException { - dataSource = DatabaseConnectionHelper.createDataSource(container); - dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(dataSource, dslContext); - database = databaseProviders.createNewConfigsDatabase(); - databaseProviders.createNewJobsDatabase(); - } - - private static void migrateDb() throws IOException, DatabaseInitializationException { - final Flyway flyway = FlywayFactory.create( - dataSource, - StreamResetPersistenceTest.class.getName(), - ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - new ConfigsDatabaseTestProvider(dslContext, flyway).create(true); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java deleted file mode 100644 index d488c2b90a7b8..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java +++ /dev/null @@ -1,741 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG_FETCH_EVENT; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION_WORKSPACE_GRANT; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION_OPERATION; -import static org.assertj.core.api.Assertions.assertThat; -import static org.jooq.impl.DSL.select; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.spy; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.ActorCatalogFetchEvent; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.Geography; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.SourceType; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigRepository.DestinationAndDefinition; -import io.airbyte.config.persistence.ConfigRepository.SourceAndDefinition; -import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -/** - * The tests in this class should be moved into separate test suites grouped by resource. Do NOT add - * new tests here. Add them to resource based test suites (e.g. WorkspacePersistenceTest). If one - * does not exist yet for that resource yet, create one and follow the pattern. - */ -@Deprecated -@SuppressWarnings({"PMD.CyclomaticComplexity", "PMD.NPathComplexity"}) -class ConfigRepositoryE2EReadWriteTest extends BaseConfigDatabaseTest { - - private final static String DOCKER_IMAGE_TAG = "1.2.0"; - private final static String CONFIG_HASH = "ConfigHash"; - - private ConfigRepository configRepository; - - @BeforeEach - void setup() throws IOException, JsonValidationException, SQLException { - configRepository = spy(new ConfigRepository( - database, - new ActorDefinitionMigrator(new ExceptionWrappingDatabase(database)), - new StandardSyncPersistence(database))); - for (final StandardWorkspace workspace : MockData.standardWorkspaces()) { - configRepository.writeStandardWorkspaceNoSecrets(workspace); - } - for (final StandardSourceDefinition sourceDefinition : MockData.standardSourceDefinitions()) { - configRepository.writeStandardSourceDefinition(sourceDefinition); - } - for (final StandardDestinationDefinition destinationDefinition : MockData.standardDestinationDefinitions()) { - configRepository.writeStandardDestinationDefinition(destinationDefinition); - } - for (final SourceConnection source : MockData.sourceConnections()) { - configRepository.writeSourceConnectionNoSecrets(source); - } - for (final DestinationConnection destination : MockData.destinationConnections()) { - configRepository.writeDestinationConnectionNoSecrets(destination); - } - for (final StandardSyncOperation operation : MockData.standardSyncOperations()) { - configRepository.writeStandardSyncOperation(operation); - } - for (final StandardSync sync : MockData.standardSyncs()) { - configRepository.writeStandardSync(sync); - } - - for (final SourceOAuthParameter oAuthParameter : MockData.sourceOauthParameters()) { - configRepository.writeSourceOAuthParam(oAuthParameter); - } - for (final DestinationOAuthParameter oAuthParameter : MockData.destinationOauthParameters()) { - configRepository.writeDestinationOAuthParam(oAuthParameter); - } - - database.transaction(ctx -> ctx.truncate(ACTOR_DEFINITION_WORKSPACE_GRANT).execute()); - } - - @Test - void testWorkspaceCountConnections() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - assertEquals(3, configRepository.countConnectionsForWorkspace(workspaceId)); - assertEquals(2, configRepository.countDestinationsForWorkspace(workspaceId)); - assertEquals(2, configRepository.countSourcesForWorkspace(workspaceId)); - } - - @Test - void testWorkspaceCountConnectionsDeprecated() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(1).getWorkspaceId(); - assertEquals(1, configRepository.countConnectionsForWorkspace(workspaceId)); - } - - @Test - void testFetchActorsUsingDefinition() throws IOException { - final UUID destinationDefinitionId = MockData.publicDestinationDefinition().getDestinationDefinitionId(); - final UUID sourceDefinitionId = MockData.publicSourceDefinition().getSourceDefinitionId(); - final List destinationConnections = configRepository.listDestinationsForDefinition( - destinationDefinitionId); - final List sourceConnections = configRepository.listSourcesForDefinition( - sourceDefinitionId); - - assertThat(destinationConnections) - .containsExactlyElementsOf(MockData.destinationConnections().stream().filter(d -> d.getDestinationDefinitionId().equals( - destinationDefinitionId) && !d.getTombstone()).collect(Collectors.toList())); - assertThat(sourceConnections).containsExactlyElementsOf(MockData.sourceConnections().stream().filter(d -> d.getSourceDefinitionId().equals( - sourceDefinitionId) && !d.getTombstone()).collect(Collectors.toList())); - } - - @Test - void testSimpleInsertActorCatalog() throws IOException, JsonValidationException, SQLException { - final String otherConfigHash = "OtherConfigHash"; - final StandardWorkspace workspace = MockData.standardWorkspaces().get(0); - - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withSourceDefinitionId(UUID.randomUUID()) - .withSourceType(SourceType.DATABASE) - .withDockerRepository("docker-repo") - .withDockerImageTag(DOCKER_IMAGE_TAG) - .withName("sourceDefinition"); - configRepository.writeStandardSourceDefinition(sourceDefinition); - - final SourceConnection source = new SourceConnection() - .withSourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .withSourceId(UUID.randomUUID()) - .withName("SomeConnector") - .withWorkspaceId(workspace.getWorkspaceId()) - .withConfiguration(Jsons.deserialize("{}")); - configRepository.writeSourceConnectionNoSecrets(source); - - final AirbyteCatalog actorCatalog = CatalogHelpers.createAirbyteCatalog("clothes", Field.of("name", JsonSchemaType.STRING)); - final AirbyteCatalog expectedActorCatalog = CatalogHelpers.createAirbyteCatalog("clothes", Field.of("name", JsonSchemaType.STRING)); - configRepository.writeActorCatalogFetchEvent( - actorCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); - - final Optional catalog = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); - assertTrue(catalog.isPresent()); - assertEquals(expectedActorCatalog, Jsons.object(catalog.get().getCatalog(), AirbyteCatalog.class)); - assertFalse(configRepository.getActorCatalog(source.getSourceId(), "1.3.0", CONFIG_HASH).isPresent()); - assertFalse(configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash).isPresent()); - - configRepository.writeActorCatalogFetchEvent(actorCatalog, source.getSourceId(), "1.3.0", CONFIG_HASH); - final Optional catalogNewConnectorVersion = - configRepository.getActorCatalog(source.getSourceId(), "1.3.0", CONFIG_HASH); - assertTrue(catalogNewConnectorVersion.isPresent()); - assertEquals(expectedActorCatalog, Jsons.object(catalogNewConnectorVersion.get().getCatalog(), AirbyteCatalog.class)); - - configRepository.writeActorCatalogFetchEvent(actorCatalog, source.getSourceId(), "1.2.0", otherConfigHash); - final Optional catalogNewConfig = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); - assertTrue(catalogNewConfig.isPresent()); - assertEquals(expectedActorCatalog, Jsons.object(catalogNewConfig.get().getCatalog(), AirbyteCatalog.class)); - - final int catalogDbEntry = database.query(ctx -> ctx.selectCount().from(ACTOR_CATALOG)).fetchOne().into(int.class); - assertEquals(1, catalogDbEntry); - - // Writing the previous catalog with v1 data types - configRepository.writeActorCatalogFetchEvent(expectedActorCatalog, source.getSourceId(), "1.2.0", otherConfigHash); - final Optional catalogV1NewConfig = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); - assertTrue(catalogV1NewConfig.isPresent()); - assertEquals(expectedActorCatalog, Jsons.object(catalogNewConfig.get().getCatalog(), AirbyteCatalog.class)); - - configRepository.writeActorCatalogFetchEvent(expectedActorCatalog, source.getSourceId(), "1.4.0", otherConfigHash); - final Optional catalogV1again = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); - assertTrue(catalogV1again.isPresent()); - assertEquals(expectedActorCatalog, Jsons.object(catalogNewConfig.get().getCatalog(), AirbyteCatalog.class)); - - final int catalogDbEntry2 = database.query(ctx -> ctx.selectCount().from(ACTOR_CATALOG)).fetchOne().into(int.class); - // TODO this should be 2 once we re-enable datatypes v1 - assertEquals(1, catalogDbEntry2); - } - - @Test - void testListWorkspaceStandardSyncAll() throws IOException { - final List expectedSyncs = copyWithV1Types(MockData.standardSyncs().subList(0, 4)); - final List actualSyncs = configRepository.listWorkspaceStandardSyncs( - MockData.standardWorkspaces().get(0).getWorkspaceId(), true); - - assertSyncsMatch(expectedSyncs, actualSyncs); - } - - @Test - void testListWorkspaceStandardSyncWithAllFiltering() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final StandardSyncQuery query = new StandardSyncQuery(workspaceId, List.of(MockData.SOURCE_ID_1), List.of(MockData.DESTINATION_ID_1), false); - final List expectedSyncs = copyWithV1Types( - MockData.standardSyncs().subList(0, 3).stream() - .filter(sync -> query.destinationId().contains(sync.getDestinationId())) - .filter(sync -> query.sourceId().contains(sync.getSourceId())) - .toList()); - final List actualSyncs = configRepository.listWorkspaceStandardSyncs(query); - - assertSyncsMatch(expectedSyncs, actualSyncs); - } - - @Test - void testListWorkspaceStandardSyncDestinationFiltering() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final StandardSyncQuery query = new StandardSyncQuery(workspaceId, null, List.of(MockData.DESTINATION_ID_1), false); - final List expectedSyncs = copyWithV1Types( - MockData.standardSyncs().subList(0, 3).stream() - .filter(sync -> query.destinationId().contains(sync.getDestinationId())) - .toList()); - final List actualSyncs = configRepository.listWorkspaceStandardSyncs(query); - - assertSyncsMatch(expectedSyncs, actualSyncs); - } - - @Test - void testListWorkspaceStandardSyncSourceFiltering() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final StandardSyncQuery query = new StandardSyncQuery(workspaceId, List.of(MockData.SOURCE_ID_2), null, false); - final List expectedSyncs = copyWithV1Types( - MockData.standardSyncs().subList(0, 3).stream() - .filter(sync -> query.sourceId().contains(sync.getSourceId())) - .toList()); - final List actualSyncs = configRepository.listWorkspaceStandardSyncs(query); - - assertSyncsMatch(expectedSyncs, actualSyncs); - } - - @Test - void testListWorkspaceStandardSyncExcludeDeleted() throws IOException { - final List expectedSyncs = copyWithV1Types(MockData.standardSyncs().subList(0, 3)); - final List actualSyncs = configRepository.listWorkspaceStandardSyncs(MockData.standardWorkspaces().get(0).getWorkspaceId(), false); - - assertSyncsMatch(expectedSyncs, actualSyncs); - } - - @Test - void testGetWorkspaceBySlug() throws IOException { - final StandardWorkspace workspace = MockData.standardWorkspaces().get(0); - final StandardWorkspace tombstonedWorkspace = MockData.standardWorkspaces().get(2); - final Optional retrievedWorkspace = configRepository.getWorkspaceBySlugOptional(workspace.getSlug(), false); - final Optional retrievedTombstonedWorkspaceNoTombstone = - configRepository.getWorkspaceBySlugOptional(tombstonedWorkspace.getSlug(), false); - final Optional retrievedTombstonedWorkspace = configRepository.getWorkspaceBySlugOptional(tombstonedWorkspace.getSlug(), true); - - assertTrue(retrievedWorkspace.isPresent()); - assertEquals(workspace, retrievedWorkspace.get()); - - assertFalse(retrievedTombstonedWorkspaceNoTombstone.isPresent()); - assertTrue(retrievedTombstonedWorkspace.isPresent()); - - assertEquals(tombstonedWorkspace, retrievedTombstonedWorkspace.get()); - } - - @Test - void testUpdateConnectionOperationIds() throws Exception { - final StandardSync sync = MockData.standardSyncs().get(0); - final List existingOperationIds = sync.getOperationIds(); - final UUID connectionId = sync.getConnectionId(); - - // this test only works as intended when there are multiple operationIds - assertTrue(existingOperationIds.size() > 1); - - // first, remove all associated operations - Set expectedOperationIds = Collections.emptySet(); - configRepository.updateConnectionOperationIds(connectionId, expectedOperationIds); - Set actualOperationIds = fetchOperationIdsForConnectionId(connectionId); - assertEquals(expectedOperationIds, actualOperationIds); - - // now, add back one operation - expectedOperationIds = Collections.singleton(existingOperationIds.get(0)); - configRepository.updateConnectionOperationIds(connectionId, expectedOperationIds); - actualOperationIds = fetchOperationIdsForConnectionId(connectionId); - assertEquals(expectedOperationIds, actualOperationIds); - - // finally, remove the first operation while adding back in the rest - expectedOperationIds = existingOperationIds.stream().skip(1).collect(Collectors.toSet()); - configRepository.updateConnectionOperationIds(connectionId, expectedOperationIds); - actualOperationIds = fetchOperationIdsForConnectionId(connectionId); - assertEquals(expectedOperationIds, actualOperationIds); - } - - private Set fetchOperationIdsForConnectionId(final UUID connectionId) throws SQLException { - return database.query(ctx -> ctx - .selectFrom(CONNECTION_OPERATION) - .where(CONNECTION_OPERATION.CONNECTION_ID.eq(connectionId)) - .fetchSet(CONNECTION_OPERATION.OPERATION_ID)); - } - - @Test - void testActorDefinitionWorkspaceGrantExists() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final UUID definitionId = MockData.standardSourceDefinitions().get(0).getSourceDefinitionId(); - - assertFalse(configRepository.actorDefinitionWorkspaceGrantExists(definitionId, workspaceId)); - - configRepository.writeActorDefinitionWorkspaceGrant(definitionId, workspaceId); - assertTrue(configRepository.actorDefinitionWorkspaceGrantExists(definitionId, workspaceId)); - - configRepository.deleteActorDefinitionWorkspaceGrant(definitionId, workspaceId); - assertFalse(configRepository.actorDefinitionWorkspaceGrantExists(definitionId, workspaceId)); - } - - @Test - void testListPublicSourceDefinitions() throws IOException { - final List actualDefinitions = configRepository.listPublicSourceDefinitions(false); - assertEquals(List.of(MockData.publicSourceDefinition()), actualDefinitions); - } - - @Test - void testListWorkspaceSources() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(1).getWorkspaceId(); - final List expectedSources = MockData.sourceConnections().stream() - .filter(source -> source.getWorkspaceId().equals(workspaceId)).collect(Collectors.toList()); - final List sources = configRepository.listWorkspaceSourceConnection(workspaceId); - assertThat(sources).hasSameElementsAs(expectedSources); - } - - @Test - void testListWorkspaceDestinations() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final List expectedDestinations = MockData.destinationConnections().stream() - .filter(destination -> destination.getWorkspaceId().equals(workspaceId)).collect(Collectors.toList()); - final List destinations = configRepository.listWorkspaceDestinationConnection(workspaceId); - assertThat(destinations).hasSameElementsAs(expectedDestinations); - } - - @Test - void testSourceDefinitionGrants() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final StandardSourceDefinition grantableDefinition1 = MockData.grantableSourceDefinition1(); - final StandardSourceDefinition grantableDefinition2 = MockData.grantableSourceDefinition2(); - final StandardSourceDefinition customDefinition = MockData.customSourceDefinition(); - - configRepository.writeActorDefinitionWorkspaceGrant(customDefinition.getSourceDefinitionId(), workspaceId); - configRepository.writeActorDefinitionWorkspaceGrant(grantableDefinition1.getSourceDefinitionId(), workspaceId); - final List actualGrantedDefinitions = configRepository - .listGrantedSourceDefinitions(workspaceId, false); - assertThat(actualGrantedDefinitions).hasSameElementsAs(List.of(grantableDefinition1, customDefinition)); - - final List> actualGrantableDefinitions = configRepository - .listGrantableSourceDefinitions(workspaceId, false); - assertThat(actualGrantableDefinitions).hasSameElementsAs(List.of( - Map.entry(grantableDefinition1, true), - Map.entry(grantableDefinition2, false))); - } - - @Test - void testListPublicDestinationDefinitions() throws IOException { - final List actualDefinitions = configRepository.listPublicDestinationDefinitions(false); - assertEquals(List.of(MockData.publicDestinationDefinition()), actualDefinitions); - } - - @Test - void testDestinationDefinitionGrants() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final StandardDestinationDefinition grantableDefinition1 = MockData.grantableDestinationDefinition1(); - final StandardDestinationDefinition grantableDefinition2 = MockData.grantableDestinationDefinition2(); - final StandardDestinationDefinition customDefinition = MockData.cusstomDestinationDefinition(); - - configRepository.writeActorDefinitionWorkspaceGrant(customDefinition.getDestinationDefinitionId(), workspaceId); - configRepository.writeActorDefinitionWorkspaceGrant(grantableDefinition1.getDestinationDefinitionId(), workspaceId); - final List actualGrantedDefinitions = configRepository - .listGrantedDestinationDefinitions(workspaceId, false); - assertThat(actualGrantedDefinitions).hasSameElementsAs(List.of(grantableDefinition1, customDefinition)); - - final List> actualGrantableDefinitions = configRepository - .listGrantableDestinationDefinitions(workspaceId, false); - assertThat(actualGrantableDefinitions).hasSameElementsAs(List.of( - Map.entry(grantableDefinition1, true), - Map.entry(grantableDefinition2, false))); - } - - @Test - void testWorkspaceCanUseDefinition() throws IOException { - final UUID workspaceId = MockData.standardWorkspaces().get(0).getWorkspaceId(); - final UUID otherWorkspaceId = MockData.standardWorkspaces().get(1).getWorkspaceId(); - final UUID publicDefinitionId = MockData.publicSourceDefinition().getSourceDefinitionId(); - final UUID grantableDefinition1Id = MockData.grantableSourceDefinition1().getSourceDefinitionId(); - final UUID grantableDefinition2Id = MockData.grantableSourceDefinition2().getSourceDefinitionId(); - final UUID customDefinitionId = MockData.customSourceDefinition().getSourceDefinitionId(); - - // Can use public definitions - assertTrue(configRepository.workspaceCanUseDefinition(publicDefinitionId, workspaceId)); - - // Can use granted definitions - configRepository.writeActorDefinitionWorkspaceGrant(grantableDefinition1Id, workspaceId); - assertTrue(configRepository.workspaceCanUseDefinition(grantableDefinition1Id, workspaceId)); - configRepository.writeActorDefinitionWorkspaceGrant(customDefinitionId, workspaceId); - assertTrue(configRepository.workspaceCanUseDefinition(customDefinitionId, workspaceId)); - - // Cannot use private definitions without grant - assertFalse(configRepository.workspaceCanUseDefinition(grantableDefinition2Id, workspaceId)); - - // Cannot use other workspace's grants - configRepository.writeActorDefinitionWorkspaceGrant(grantableDefinition2Id, otherWorkspaceId); - assertFalse(configRepository.workspaceCanUseDefinition(grantableDefinition2Id, workspaceId)); - - // Passing invalid IDs returns false - assertFalse(configRepository.workspaceCanUseDefinition(new UUID(0L, 0L), workspaceId)); - - // workspaceCanUseCustomDefinition can only be true for custom definitions - assertTrue(configRepository.workspaceCanUseCustomDefinition(customDefinitionId, workspaceId)); - assertFalse(configRepository.workspaceCanUseCustomDefinition(grantableDefinition1Id, workspaceId)); - } - - @Test - void testGetDestinationOAuthByDefinitionId() throws IOException { - - final DestinationOAuthParameter destinationOAuthParameter = MockData.destinationOauthParameters().get(0); - final Optional result = configRepository.getDestinationOAuthParamByDefinitionIdOptional( - destinationOAuthParameter.getWorkspaceId(), destinationOAuthParameter.getDestinationDefinitionId()); - assertTrue(result.isPresent()); - assertEquals(destinationOAuthParameter, result.get()); - } - - @Test - void testMissingDestinationOAuthByDefinitionId() throws IOException { - final UUID missingId = UUID.fromString("fc59cfa0-06de-4c8b-850b-46d4cfb65629"); - final DestinationOAuthParameter destinationOAuthParameter = MockData.destinationOauthParameters().get(0); - Optional result = - configRepository.getDestinationOAuthParamByDefinitionIdOptional(destinationOAuthParameter.getWorkspaceId(), missingId); - assertFalse(result.isPresent()); - - result = configRepository.getDestinationOAuthParamByDefinitionIdOptional(missingId, destinationOAuthParameter.getDestinationDefinitionId()); - assertFalse(result.isPresent()); - } - - @Test - void testGetSourceOAuthByDefinitionId() throws IOException { - final SourceOAuthParameter sourceOAuthParameter = MockData.sourceOauthParameters().get(0); - final Optional result = configRepository.getSourceOAuthParamByDefinitionIdOptional(sourceOAuthParameter.getWorkspaceId(), - sourceOAuthParameter.getSourceDefinitionId()); - assertTrue(result.isPresent()); - assertEquals(sourceOAuthParameter, result.get()); - } - - @Test - void testMissingSourceOAuthByDefinitionId() throws IOException { - final UUID missingId = UUID.fromString("fc59cfa0-06de-4c8b-850b-46d4cfb65629"); - final SourceOAuthParameter sourceOAuthParameter = MockData.sourceOauthParameters().get(0); - Optional result = - configRepository.getSourceOAuthParamByDefinitionIdOptional(sourceOAuthParameter.getWorkspaceId(), missingId); - assertFalse(result.isPresent()); - - result = configRepository.getSourceOAuthParamByDefinitionIdOptional(missingId, sourceOAuthParameter.getSourceDefinitionId()); - assertFalse(result.isPresent()); - } - - @Test - void testGetStandardSyncUsingOperation() throws IOException { - final UUID operationId = MockData.standardSyncOperations().get(0).getOperationId(); - final List expectedSyncs = copyWithV1Types(MockData.standardSyncs().subList(0, 3)); - final List actualSyncs = configRepository.listStandardSyncsUsingOperation(operationId); - - assertSyncsMatch(expectedSyncs, actualSyncs); - } - - private List copyWithV1Types(final List syncs) { - return syncs; - // TODO adjust with data types feature flag testing - // return syncs.stream() - // .map(standardSync -> { - // final StandardSync copiedStandardSync = Jsons.deserialize(Jsons.serialize(standardSync), - // StandardSync.class); - // copiedStandardSync.setCatalog(MockData.getConfiguredCatalogWithV1DataTypes()); - // return copiedStandardSync; - // }) - // .toList(); - } - - private void assertSyncsMatch(final List expectedSyncs, final List actualSyncs) { - assertEquals(expectedSyncs.size(), actualSyncs.size()); - - for (final StandardSync expected : expectedSyncs) { - - final Optional maybeActual = actualSyncs.stream().filter(s -> s.getConnectionId().equals(expected.getConnectionId())).findFirst(); - if (maybeActual.isEmpty()) { - Assertions.fail(String.format("Expected to find connectionId %s in result, but actual connectionIds are %s", - expected.getConnectionId(), - actualSyncs.stream().map(StandardSync::getConnectionId).collect(Collectors.toList()))); - } - final StandardSync actual = maybeActual.get(); - - // operationIds can be ordered differently in the query result than in the mock data, so they need - // to be verified separately - // from the rest of the sync. - assertThat(actual.getOperationIds()).hasSameElementsAs(expected.getOperationIds()); - - // now, clear operationIds so the rest of the sync can be compared - expected.setOperationIds(null); - actual.setOperationIds(null); - assertEquals(expected, actual); - } - } - - @Test - void testDeleteStandardSyncOperation() - throws IOException, JsonValidationException, ConfigNotFoundException { - final UUID deletedOperationId = MockData.standardSyncOperations().get(0).getOperationId(); - final List syncs = MockData.standardSyncs(); - configRepository.deleteStandardSyncOperation(deletedOperationId); - - for (final StandardSync sync : syncs) { - final StandardSync retrievedSync = configRepository.getStandardSync(sync.getConnectionId()); - for (final UUID operationId : sync.getOperationIds()) { - if (operationId.equals(deletedOperationId)) { - assertThat(retrievedSync.getOperationIds()).doesNotContain(deletedOperationId); - } else { - assertThat(retrievedSync.getOperationIds()).contains(operationId); - } - } - } - } - - @Test - void testGetSourceAndDefinitionsFromSourceIds() throws IOException { - final List sourceIds = MockData.sourceConnections().subList(0, 2).stream().map(SourceConnection::getSourceId).toList(); - - final List expected = List.of( - new SourceAndDefinition(MockData.sourceConnections().get(0), MockData.standardSourceDefinitions().get(0)), - new SourceAndDefinition(MockData.sourceConnections().get(1), MockData.standardSourceDefinitions().get(1))); - - final List actual = configRepository.getSourceAndDefinitionsFromSourceIds(sourceIds); - assertThat(actual).hasSameElementsAs(expected); - } - - @Test - void testGetDestinationAndDefinitionsFromDestinationIds() throws IOException { - final List destinationIds = MockData.destinationConnections().subList(0, 2).stream().map(DestinationConnection::getDestinationId).toList(); - - final List expected = List.of( - new DestinationAndDefinition(MockData.destinationConnections().get(0), MockData.standardDestinationDefinitions().get(0)), - new DestinationAndDefinition(MockData.destinationConnections().get(1), MockData.standardDestinationDefinitions().get(1))); - - final List actual = configRepository.getDestinationAndDefinitionsFromDestinationIds(destinationIds); - assertThat(actual).hasSameElementsAs(expected); - } - - @Test - void testGetGeographyForConnection() throws IOException { - final StandardSync sync = MockData.standardSyncs().get(0); - final Geography expected = sync.getGeography(); - final Geography actual = configRepository.getGeographyForConnection(sync.getConnectionId()); - - assertEquals(expected, actual); - } - - @Test - void testGetGeographyForWorkspace() throws IOException { - final StandardWorkspace workspace = MockData.standardWorkspaces().get(0); - final Geography expected = workspace.getDefaultGeography(); - final Geography actual = configRepository.getGeographyForWorkspace(workspace.getWorkspaceId()); - - assertEquals(expected, actual); - } - - @SuppressWarnings("OptionalGetWithoutIsPresent") - @Test - void testGetMostRecentActorCatalogFetchEventForSource() throws SQLException, IOException { - for (final ActorCatalog actorCatalog : MockData.actorCatalogs()) { - writeActorCatalog(database, Collections.singletonList(actorCatalog)); - } - - final OffsetDateTime now = OffsetDateTime.now(); - final OffsetDateTime yesterday = now.minusDays(1L); - - final List fetchEvents = MockData.actorCatalogFetchEventsSameSource(); - final ActorCatalogFetchEvent fetchEvent1 = fetchEvents.get(0); - final ActorCatalogFetchEvent fetchEvent2 = fetchEvents.get(1); - - database.transaction(ctx -> { - insertCatalogFetchEvent( - ctx, - fetchEvent1.getActorId(), - fetchEvent1.getActorCatalogId(), - yesterday); - insertCatalogFetchEvent( - ctx, - fetchEvent2.getActorId(), - fetchEvent2.getActorCatalogId(), - now); - // Insert a second identical copy to verify that the query can handle duplicates since the records - // are not guaranteed to be unique. - insertCatalogFetchEvent( - ctx, - fetchEvent2.getActorId(), - fetchEvent2.getActorCatalogId(), - now); - - return null; - }); - - final Optional result = - configRepository.getMostRecentActorCatalogFetchEventForSource(fetchEvent1.getActorId()); - - assertEquals(fetchEvent2.getActorCatalogId(), result.get().getActorCatalogId()); - } - - @Test - void testGetMostRecentActorCatalogFetchEventForSources() throws SQLException, IOException { - for (final ActorCatalog actorCatalog : MockData.actorCatalogs()) { - writeActorCatalog(database, Collections.singletonList(actorCatalog)); - } - - database.transaction(ctx -> { - MockData.actorCatalogFetchEventsForAggregationTest().forEach(actorCatalogFetchEvent -> insertCatalogFetchEvent( - ctx, - actorCatalogFetchEvent.getActorCatalogFetchEvent().getActorId(), - actorCatalogFetchEvent.getActorCatalogFetchEvent().getActorCatalogId(), - actorCatalogFetchEvent.getCreatedAt())); - - return null; - }); - - final Map result = - configRepository.getMostRecentActorCatalogFetchEventForSources(List.of(MockData.SOURCE_ID_1, - MockData.SOURCE_ID_2)); - - assertEquals(MockData.ACTOR_CATALOG_ID_1, result.get(MockData.SOURCE_ID_1).getActorCatalogId()); - assertEquals(MockData.ACTOR_CATALOG_ID_3, result.get(MockData.SOURCE_ID_2).getActorCatalogId()); - assertEquals(0, configRepository.getMostRecentActorCatalogFetchEventForSources(Collections.emptyList()).size()); - } - - @Test - void testGetMostRecentActorCatalogFetchEventWithDuplicates() throws SQLException, IOException { - // Tests that we can handle two fetch events in the db with the same actor id, actor catalog id, and - // timestamp e.g., from duplicate discoveries. - for (final ActorCatalog actorCatalog : MockData.actorCatalogs()) { - writeActorCatalog(database, Collections.singletonList(actorCatalog)); - } - - database.transaction(ctx -> { - // Insert the fetch events twice. - MockData.actorCatalogFetchEventsForAggregationTest().forEach(actorCatalogFetchEvent -> { - insertCatalogFetchEvent( - ctx, - actorCatalogFetchEvent.getActorCatalogFetchEvent().getActorId(), - actorCatalogFetchEvent.getActorCatalogFetchEvent().getActorCatalogId(), - actorCatalogFetchEvent.getCreatedAt()); - insertCatalogFetchEvent( - ctx, - actorCatalogFetchEvent.getActorCatalogFetchEvent().getActorId(), - actorCatalogFetchEvent.getActorCatalogFetchEvent().getActorCatalogId(), - actorCatalogFetchEvent.getCreatedAt()); - }); - return null; - }); - - final Map result = - configRepository.getMostRecentActorCatalogFetchEventForSources(List.of(MockData.SOURCE_ID_1, - MockData.SOURCE_ID_2)); - - assertEquals(MockData.ACTOR_CATALOG_ID_1, result.get(MockData.SOURCE_ID_1).getActorCatalogId()); - assertEquals(MockData.ACTOR_CATALOG_ID_3, result.get(MockData.SOURCE_ID_2).getActorCatalogId()); - } - - @Test - void testGetActorDefinitionsInUseToProtocolVersion() throws IOException { - final Set actorDefinitionIds = new HashSet<>(); - actorDefinitionIds.addAll(MockData.sourceConnections().stream().map(SourceConnection::getSourceDefinitionId).toList()); - actorDefinitionIds.addAll(MockData.destinationConnections().stream().map(DestinationConnection::getDestinationDefinitionId).toList()); - assertEquals(actorDefinitionIds, configRepository.getActorDefinitionToProtocolVersionMap().keySet()); - } - - private void insertCatalogFetchEvent(final DSLContext ctx, final UUID sourceId, final UUID catalogId, final OffsetDateTime creationDate) { - ctx.insertInto(ACTOR_CATALOG_FETCH_EVENT) - .columns( - ACTOR_CATALOG_FETCH_EVENT.ID, - ACTOR_CATALOG_FETCH_EVENT.ACTOR_ID, - ACTOR_CATALOG_FETCH_EVENT.ACTOR_CATALOG_ID, - ACTOR_CATALOG_FETCH_EVENT.CONFIG_HASH, - ACTOR_CATALOG_FETCH_EVENT.ACTOR_VERSION, - ACTOR_CATALOG_FETCH_EVENT.CREATED_AT, - ACTOR_CATALOG_FETCH_EVENT.MODIFIED_AT) - .values(UUID.randomUUID(), sourceId, catalogId, "", "", creationDate, creationDate) - .execute(); - } - - private static void writeActorCatalog(final Database database, final List configs) throws SQLException { - database.transaction(ctx -> { - writeActorCatalog(configs, ctx); - return null; - }); - } - - private static void writeActorCatalog(final List configs, final DSLContext ctx) { - final OffsetDateTime timestamp = OffsetDateTime.now(); - configs.forEach((actorCatalog) -> { - final boolean isExistingConfig = ctx.fetchExists(select() - .from(ACTOR_CATALOG) - .where(ACTOR_CATALOG.ID.eq(actorCatalog.getId()))); - - if (isExistingConfig) { - ctx.update(ACTOR_CATALOG) - .set(ACTOR_CATALOG.CATALOG, JSONB.valueOf(Jsons.serialize(actorCatalog.getCatalog()))) - .set(ACTOR_CATALOG.CATALOG_HASH, actorCatalog.getCatalogHash()) - .set(ACTOR_CATALOG.MODIFIED_AT, timestamp) - .where(ACTOR_CATALOG.ID.eq(actorCatalog.getId())) - .execute(); - } else { - ctx.insertInto(ACTOR_CATALOG) - .set(ACTOR_CATALOG.ID, actorCatalog.getId()) - .set(ACTOR_CATALOG.CATALOG, JSONB.valueOf(Jsons.serialize(actorCatalog.getCatalog()))) - .set(ACTOR_CATALOG.CATALOG_HASH, actorCatalog.getCatalogHash()) - .set(ACTOR_CATALOG.CREATED_AT, timestamp) - .set(ACTOR_CATALOG.MODIFIED_AT, timestamp) - .execute(); - } - }); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/HealthCheckPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/HealthCheckPersistenceTest.java deleted file mode 100644 index 25e00b533b609..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/HealthCheckPersistenceTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.db.Database; -import java.sql.SQLException; -import org.jooq.Result; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class HealthCheckPersistenceTest { - - private Database database; - private ConfigRepository configRepository; - - @BeforeEach - void beforeEach() throws Exception { - - database = mock(Database.class); - configRepository = new ConfigRepository(database); - } - - @Test - void testHealthCheckSuccess() throws SQLException { - final var mResult = mock(Result.class); - when(database.query(any())).thenReturn(mResult); - - final var check = configRepository.healthCheck(); - assertTrue(check); - } - - @Test - void testHealthCheckFailure() throws SQLException { - when(database.query(any())).thenThrow(RuntimeException.class); - - final var check = configRepository.healthCheck(); - assertFalse(check); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/MockData.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/MockData.java deleted file mode 100644 index 5982e3daa8f2f..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/MockData.java +++ /dev/null @@ -1,742 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.ActorCatalogFetchEvent; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.FieldSelectionData; -import io.airbyte.config.Geography; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.Notification; -import io.airbyte.config.Notification.NotificationType; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; -import io.airbyte.config.OperatorWebhook; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.Schedule; -import io.airbyte.config.Schedule.TimeUnit; -import io.airbyte.config.SlackNotificationConfiguration; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.SourceType; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.NonBreakingChangesPreference; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.config.StandardSyncState; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.State; -import io.airbyte.config.WebhookConfig; -import io.airbyte.config.WebhookOperationConfigs; -import io.airbyte.config.WorkspaceServiceAccount; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.AuthSpecification; -import io.airbyte.protocol.models.AuthSpecification.AuthType; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.DestinationSyncMode; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.SyncMode; -import java.net.URI; -import java.time.Instant; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; -import java.util.UUID; -import java.util.stream.Collectors; -import lombok.Data; - -public class MockData { - - public static final UUID WORKSPACE_ID_1 = UUID.randomUUID(); - private static final UUID WORKSPACE_ID_2 = UUID.randomUUID(); - private static final UUID WORKSPACE_ID_3 = UUID.randomUUID(); - private static final UUID WORKSPACE_CUSTOMER_ID = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID_1 = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID_2 = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID_3 = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID_4 = UUID.randomUUID(); - private static final UUID DESTINATION_DEFINITION_ID_1 = UUID.randomUUID(); - private static final UUID DESTINATION_DEFINITION_ID_2 = UUID.randomUUID(); - private static final UUID DESTINATION_DEFINITION_ID_3 = UUID.randomUUID(); - private static final UUID DESTINATION_DEFINITION_ID_4 = UUID.randomUUID(); - public static final UUID SOURCE_ID_1 = UUID.randomUUID(); - public static final UUID SOURCE_ID_2 = UUID.randomUUID(); - private static final UUID SOURCE_ID_3 = UUID.randomUUID(); - public static final UUID DESTINATION_ID_1 = UUID.randomUUID(); - public static final UUID DESTINATION_ID_2 = UUID.randomUUID(); - public static final UUID DESTINATION_ID_3 = UUID.randomUUID(); - private static final UUID OPERATION_ID_1 = UUID.randomUUID(); - private static final UUID OPERATION_ID_2 = UUID.randomUUID(); - private static final UUID OPERATION_ID_3 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_1 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_2 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_3 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_4 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_5 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_6 = UUID.randomUUID(); - private static final UUID SOURCE_OAUTH_PARAMETER_ID_1 = UUID.randomUUID(); - private static final UUID SOURCE_OAUTH_PARAMETER_ID_2 = UUID.randomUUID(); - private static final UUID DESTINATION_OAUTH_PARAMETER_ID_1 = UUID.randomUUID(); - private static final UUID DESTINATION_OAUTH_PARAMETER_ID_2 = UUID.randomUUID(); - public static final UUID ACTOR_CATALOG_ID_1 = UUID.randomUUID(); - private static final UUID ACTOR_CATALOG_ID_2 = UUID.randomUUID(); - public static final UUID ACTOR_CATALOG_ID_3 = UUID.randomUUID(); - private static final UUID ACTOR_CATALOG_FETCH_EVENT_ID_1 = UUID.randomUUID(); - private static final UUID ACTOR_CATALOG_FETCH_EVENT_ID_2 = UUID.randomUUID(); - private static final UUID ACTOR_CATALOG_FETCH_EVENT_ID_3 = UUID.randomUUID(); - - public static final String MOCK_SERVICE_ACCOUNT_1 = "{\n" - + " \"type\" : \"service_account\",\n" - + " \"project_id\" : \"random-gcp-project\",\n" - + " \"private_key_id\" : \"123a1234ab1a123ab12345678a1234ab1abc1a12\",\n" - + " \"private_key\" : \"-----BEGIN RSA PRIVATE KEY-----\\nMIIEoQIBAAKCAQBtkKBs9oe9pFhEWjBls9OrY0PXE/QN6nL4Bfw4+UqcBpTyItXo\\n3aBXuVqDIZ377zjbJUcYuc4NzAsLImy7VVT1XrdAkkCKQEMoA9pQgONA/3kD8Xff\\nSUGfdup8UJg925paaRhM7u81e3XKGwGyL/qcxpuHtfqimeWWfSPy5AawyOFl+l25\\nOqbm8PK4/QVqk4pcorQuISUkrehY0Ji0gVQF+ZeBvg7lvBtjNEl//eysGtcZvk7X\\nHqg+EIBqRjVNDsViHj0xeoDFcFgXDeWzxeQ0c7gMsDthfm4SjgaVFdQwsJUeoC6X\\nlwUoBbFIVVKW0n+SH+kxLc7mhaGjyRYJLS6tAgMBAAECggEAaowetlf4IR/VBoN+\\nVSjPSvg5XMr2pyG7tB597RngyGJOLjpaMx5zc1u4/ZSPghRdAh/6R71I+HnYs3dC\\nrdqJyCPXqV+Qi+F6bUtx3p+4X9kQ4hjMLcOboWuPFF1774vDSvCwxQAGd8gb//LL\\nb3DhEdzCGvOJTN7EOdhwQSAmsXsfj0qKlmm8vv0HBQDvjYYWhy/UcPry5sAGQ8KU\\nnUPTkz/OMS56nBIgKXgZtGRTP1Q7Q9a6oLmlvbDxuKGUByUPNlveZplzyWDO3RUN\\nNPt9dwgGk6rZK0umunGr0lq+WOK33Ue1RJy2VIvvV6dt32x20ehfVKND8N8q+wJ3\\neJQggQKBgQC//dOX8RwkmIloRzzmbu+qY8o44/F5gtxj8maR+OJhvbpFEID49bBr\\nzYqcMKfcgHJr6638CXVGSO66IiKtQcTMJ/Vd8TQVPcNPI1h/RD+wT/nkWX6R/0YH\\njwwNmikeUDH2/hLQlRZ8O45hc4frDGRMeHn3MSS2YsBDSl6YL/zHpQKBgQCSF9Ka\\nyCZmw5eS63G5/X9SVXbLRPuc6Fus+IbRPttOzSRviUXHaBjwwVEJgIKODx/eVXgD\\nA/OvFUmwIn73uZD/XgJrhkwAendaa+yhWKAkO5pO/EdAslxRmgxqTXfRcyslKBbo\\ns4YAgeYUgzOaMH4UxY4pJ7H6BLsFlboL+8BcaQKBgDSCM1Cm/M91eH8wnJNZW+r6\\nB+CvVueoxqX/MdZSf3fD8CHbdaqhZ3LUcEhvdjl0V9b0Sk1YON7UK5Z0p49DIZPE\\nifL7eQcmMTh/rkCAZfrOpMWzRE6hxoFiuiUuOHi17jRjILozTEcF8tbsRgwfA392\\no8Tbh/Lp5zOAL4bn+PaRAoGAZ2AgEJJsSe9BRB8CPF+aRoJfKvrHKIJqzHyXuVzH\\nBn22uI3kKHQKoeHJG/Ypa6hcHpFP+KJFPrDLkaz3NwfCCFFXWQqQoQ4Hgp43tPvn\\nZXwfdqChMrCDDuL4wgfLLxRVhVdWzpapzZYdXopwazzBGqWoMIr8LzRFum/2VCBy\\nP3ECgYBGqjuYud6gtrzaQwmMfcA0pSYsii96d2LKwWzjgcMzLxge59PIWXeQJqOb\\nh97m3qCkkPzbceD6Id8m/EyrNb04V8Zr0ERlcK/a4nRSHoIWQZY01lDSGhneRKn1\\nncBvRqCfz6ajf+zBg3zK0af98IHL0FI2NsNJLPrOBFMcthjx/g==\\n-----END RSA PRIVATE KEY-----\",\n" - + " \"client_email\" : \"a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com\",\n" - + " \"client_id\" : \"123456789012345678901\",\n" - + " \"auth_uri\" : \"https://blah.blah.com/x/blah1/blah\",\n" - + " \"token_uri\" : \"https://blah.blah.com/blah\",\n" - + " \"auth_provider_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah\",\n" - + " \"client_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah/a123/a1e5ac98-7531-48e1-943b-b46636%40random-gcp-project.abc.abcdefghijklmno.com\"\n" - + "}"; - - public static final String MOCK_SERVICE_ACCOUNT_2 = "{\n" - + " \"type\" : \"service_account-2\",\n" - + " \"project_id\" : \"random-gcp-project\",\n" - + " \"private_key_id\" : \"123a1234ab1a123ab12345678a1234ab1abc1a12\",\n" - + " \"private_key\" : \"-----BEGIN RSA PRIVATE KEY-----\\nMIIEoQIBAAKCAQBtkKBs9oe9pFhEWjBls9OrY0PXE/QN6nL4Bfw4+UqcBpTyItXo\\n3aBXuVqDIZ377zjbJUcYuc4NzAsLImy7VVT1XrdAkkCKQEMoA9pQgONA/3kD8Xff\\nSUGfdup8UJg925paaRhM7u81e3XKGwGyL/qcxpuHtfqimeWWfSPy5AawyOFl+l25\\nOqbm8PK4/QVqk4pcorQuISUkrehY0Ji0gVQF+ZeBvg7lvBtjNEl//eysGtcZvk7X\\nHqg+EIBqRjVNDsViHj0xeoDFcFgXDeWzxeQ0c7gMsDthfm4SjgaVFdQwsJUeoC6X\\nlwUoBbFIVVKW0n+SH+kxLc7mhaGjyRYJLS6tAgMBAAECggEAaowetlf4IR/VBoN+\\nVSjPSvg5XMr2pyG7tB597RngyGJOLjpaMx5zc1u4/ZSPghRdAh/6R71I+HnYs3dC\\nrdqJyCPXqV+Qi+F6bUtx3p+4X9kQ4hjMLcOboWuPFF1774vDSvCwxQAGd8gb//LL\\nb3DhEdzCGvOJTN7EOdhwQSAmsXsfj0qKlmm8vv0HBQDvjYYWhy/UcPry5sAGQ8KU\\nnUPTkz/OMS56nBIgKXgZtGRTP1Q7Q9a6oLmlvbDxuKGUByUPNlveZplzyWDO3RUN\\nNPt9dwgGk6rZK0umunGr0lq+WOK33Ue1RJy2VIvvV6dt32x20ehfVKND8N8q+wJ3\\neJQggQKBgQC//dOX8RwkmIloRzzmbu+qY8o44/F5gtxj8maR+OJhvbpFEID49bBr\\nzYqcMKfcgHJr6638CXVGSO66IiKtQcTMJ/Vd8TQVPcNPI1h/RD+wT/nkWX6R/0YH\\njwwNmikeUDH2/hLQlRZ8O45hc4frDGRMeHn3MSS2YsBDSl6YL/zHpQKBgQCSF9Ka\\nyCZmw5eS63G5/X9SVXbLRPuc6Fus+IbRPttOzSRviUXHaBjwwVEJgIKODx/eVXgD\\nA/OvFUmwIn73uZD/XgJrhkwAendaa+yhWKAkO5pO/EdAslxRmgxqTXfRcyslKBbo\\ns4YAgeYUgzOaMH4UxY4pJ7H6BLsFlboL+8BcaQKBgDSCM1Cm/M91eH8wnJNZW+r6\\nB+CvVueoxqX/MdZSf3fD8CHbdaqhZ3LUcEhvdjl0V9b0Sk1YON7UK5Z0p49DIZPE\\nifL7eQcmMTh/rkCAZfrOpMWzRE6hxoFiuiUuOHi17jRjILozTEcF8tbsRgwfA392\\no8Tbh/Lp5zOAL4bn+PaRAoGAZ2AgEJJsSe9BRB8CPF+aRoJfKvrHKIJqzHyXuVzH\\nBn22uI3kKHQKoeHJG/Ypa6hcHpFP+KJFPrDLkaz3NwfCCFFXWQqQoQ4Hgp43tPvn\\nZXwfdqChMrCDDuL4wgfLLxRVhVdWzpapzZYdXopwazzBGqWoMIr8LzRFum/2VCBy\\nP3ECgYBGqjuYud6gtrzaQwmMfcA0pSYsii96d2LKwWzjgcMzLxge59PIWXeQJqOb\\nh97m3qCkkPzbceD6Id8m/EyrNb04V8Zr0ERlcK/a4nRSHoIWQZY01lDSGhneRKn1\\nncBvRqCfz6ajf+zBg3zK0af98IHL0FI2NsNJLPrOBFMcthjx/g==\\n-----END RSA PRIVATE KEY-----\",\n" - + " \"client_email\" : \"a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com\",\n" - + " \"client_id\" : \"123456789012345678901\",\n" - + " \"auth_uri\" : \"https://blah.blah.com/x/blah1/blah\",\n" - + " \"token_uri\" : \"https://blah.blah.com/blah\",\n" - + " \"auth_provider_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah\",\n" - + " \"client_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah/a123/a1e5ac98-7531-48e1-943b-b46636%40random-gcp-project.abc.abcdefghijklmno.com\"\n" - + "}"; - - public static final JsonNode HMAC_SECRET_PAYLOAD_1 = Jsons.jsonNode(sortMap( - Map.of("access_id", "ABCD1A1ABCDEFG1ABCDEFGH1ABC12ABCDEF1ABCDE1ABCDE1ABCDE12ABCDEF", "secret", "AB1AbcDEF//ABCDeFGHijKlmNOpqR1ABC1aBCDeF"))); - public static final JsonNode HMAC_SECRET_PAYLOAD_2 = Jsons.jsonNode(sortMap( - Map.of("access_id", "ABCD1A1ABCDEFG1ABCDEFGH1ABC12ABCDEF1ABCDE1ABCDE1ABCDE12ABCDEX", "secret", "AB1AbcDEF//ABCDeFGHijKlmNOpqR1ABC1aBCDeX"))); - - private static final Instant NOW = Instant.parse("2021-12-15T20:30:40.00Z"); - - private static final String CONNECTION_SPECIFICATION = "{\"name\":\"John\", \"age\":30, \"car\":null}"; - private static final UUID OPERATION_ID_4 = UUID.randomUUID(); - private static final UUID WEBHOOK_CONFIG_ID = UUID.randomUUID(); - private static final String WEBHOOK_OPERATION_EXECUTION_URL = "test-webhook-url"; - private static final String WEBHOOK_OPERATION_EXECUTION_BODY = "test-webhook-body"; - public static final String CONFIG_HASH = "1394"; - public static final String CONNECTOR_VERSION = "1.2.0"; - - public static List standardWorkspaces() { - final Notification notification = new Notification() - .withNotificationType(NotificationType.SLACK) - .withSendOnFailure(true) - .withSendOnSuccess(true) - .withSlackConfiguration(new SlackNotificationConfiguration().withWebhook("webhook-url")); - - final StandardWorkspace workspace1 = new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID_1) - .withCustomerId(WORKSPACE_CUSTOMER_ID) - .withName("test-workspace") - .withSlug("random-string") - .withEmail("abc@xyz.com") - .withInitialSetupComplete(true) - .withAnonymousDataCollection(true) - .withNews(true) - .withSecurityUpdates(true) - .withDisplaySetupWizard(true) - .withTombstone(false) - .withNotifications(Collections.singletonList(notification)) - .withFirstCompletedSync(true) - .withFeedbackDone(true) - .withDefaultGeography(Geography.US) - .withWebhookOperationConfigs(Jsons.jsonNode( - new WebhookOperationConfigs().withWebhookConfigs(List.of(new WebhookConfig().withId(WEBHOOK_CONFIG_ID).withName("name"))))); - - final StandardWorkspace workspace2 = new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID_2) - .withName("Another Workspace") - .withSlug("another-workspace") - .withInitialSetupComplete(true) - .withTombstone(false) - .withDefaultGeography(Geography.AUTO); - - final StandardWorkspace workspace3 = new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID_3) - .withName("Tombstoned") - .withSlug("tombstoned") - .withInitialSetupComplete(true) - .withTombstone(true) - .withDefaultGeography(Geography.AUTO); - - return Arrays.asList(workspace1, workspace2, workspace3); - } - - public static StandardSourceDefinition publicSourceDefinition() { - return new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEFINITION_ID_1) - .withSourceType(SourceType.API) - .withName("random-source-1") - .withDockerImageTag("tag-1") - .withDockerRepository("repository-1") - .withDocumentationUrl("documentation-url-1") - .withIcon("icon-1") - .withSpec(connectorSpecification()) - .withProtocolVersion("0.2.1") - .withTombstone(false) - .withPublic(true) - .withCustom(false) - .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); - } - - public static StandardSourceDefinition grantableSourceDefinition1() { - return new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEFINITION_ID_2) - .withSourceType(SourceType.DATABASE) - .withName("random-source-2") - .withDockerImageTag("tag-2") - .withDockerRepository("repository-2") - .withDocumentationUrl("documentation-url-2") - .withIcon("icon-2") - .withTombstone(false) - .withPublic(false) - .withCustom(false); - } - - public static StandardSourceDefinition grantableSourceDefinition2() { - return new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEFINITION_ID_3) - .withSourceType(SourceType.DATABASE) - .withName("random-source-3") - .withDockerImageTag("tag-3") - .withDockerRepository("repository-3") - .withDocumentationUrl("documentation-url-3") - .withProtocolVersion("0.2.2") - .withIcon("icon-3") - .withTombstone(false) - .withPublic(false) - .withCustom(false); - } - - public static StandardSourceDefinition customSourceDefinition() { - return new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEFINITION_ID_4) - .withSourceType(SourceType.DATABASE) - .withName("random-source-4") - .withDockerImageTag("tag-4") - .withDockerRepository("repository-4") - .withDocumentationUrl("documentation-url-4") - .withProtocolVersion("0.2.4") - .withIcon("icon-4") - .withTombstone(false) - .withPublic(false) - .withCustom(true); - } - - public static List standardSourceDefinitions() { - return Arrays.asList( - publicSourceDefinition(), - grantableSourceDefinition1(), - grantableSourceDefinition2(), - customSourceDefinition()); - } - - private static ConnectorSpecification connectorSpecification() { - return new ConnectorSpecification() - .withAuthSpecification(new AuthSpecification().withAuthType(AuthType.OAUTH_2_0)) - .withConnectionSpecification(Jsons.jsonNode(CONNECTION_SPECIFICATION)) - .withDocumentationUrl(URI.create("whatever")) - .withAdvancedAuth(null) - .withChangelogUrl(URI.create("whatever")) - .withSupportedDestinationSyncModes(Arrays.asList(DestinationSyncMode.APPEND, DestinationSyncMode.OVERWRITE, DestinationSyncMode.APPEND_DEDUP)) - .withSupportsDBT(true) - .withSupportsIncremental(true) - .withSupportsNormalization(true); - } - - public static StandardDestinationDefinition publicDestinationDefinition() { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_1) - .withName("random-destination-1") - .withDockerImageTag("tag-3") - .withDockerRepository("repository-3") - .withDocumentationUrl("documentation-url-3") - .withIcon("icon-3") - .withSpec(connectorSpecification()) - .withProtocolVersion("0.3.1") - .withTombstone(false) - .withPublic(true) - .withCustom(false) - .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); - } - - public static StandardDestinationDefinition grantableDestinationDefinition1() { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_2) - .withName("random-destination-2") - .withDockerImageTag("tag-4") - .withDockerRepository("repository-4") - .withDocumentationUrl("documentation-url-4") - .withIcon("icon-4") - .withSpec(connectorSpecification()) - .withTombstone(false) - .withPublic(false) - .withCustom(false); - } - - public static StandardDestinationDefinition grantableDestinationDefinition2() { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_3) - .withName("random-destination-3") - .withDockerImageTag("tag-33") - .withDockerRepository("repository-33") - .withDocumentationUrl("documentation-url-33") - .withIcon("icon-3") - .withSpec(connectorSpecification()) - .withTombstone(false) - .withPublic(false) - .withCustom(false); - } - - public static StandardDestinationDefinition cusstomDestinationDefinition() { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_4) - .withName("random-destination-4") - .withDockerImageTag("tag-44") - .withDockerRepository("repository-44") - .withDocumentationUrl("documentation-url-44") - .withIcon("icon-4") - .withSpec(connectorSpecification()) - .withProtocolVersion("0.3.2") - .withTombstone(false) - .withPublic(false) - .withCustom(true); - } - - public static List standardDestinationDefinitions() { - return Arrays.asList( - publicDestinationDefinition(), - grantableDestinationDefinition1(), - grantableDestinationDefinition2(), - cusstomDestinationDefinition()); - } - - public static List sourceConnections() { - final SourceConnection sourceConnection1 = new SourceConnection() - .withName("source-1") - .withTombstone(false) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_1) - .withWorkspaceId(WORKSPACE_ID_1) - .withConfiguration(Jsons.deserialize(CONNECTION_SPECIFICATION)) - .withSourceId(SOURCE_ID_1); - final SourceConnection sourceConnection2 = new SourceConnection() - .withName("source-2") - .withTombstone(false) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_2) - .withWorkspaceId(WORKSPACE_ID_1) - .withConfiguration(Jsons.deserialize(CONNECTION_SPECIFICATION)) - .withSourceId(SOURCE_ID_2); - final SourceConnection sourceConnection3 = new SourceConnection() - .withName("source-3") - .withTombstone(false) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_1) - .withWorkspaceId(WORKSPACE_ID_2) - .withConfiguration(Jsons.emptyObject()) - .withSourceId(SOURCE_ID_3); - return Arrays.asList(sourceConnection1, sourceConnection2, sourceConnection3); - } - - public static List destinationConnections() { - final DestinationConnection destinationConnection1 = new DestinationConnection() - .withName("destination-1") - .withTombstone(false) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_1) - .withWorkspaceId(WORKSPACE_ID_1) - .withConfiguration(Jsons.deserialize(CONNECTION_SPECIFICATION)) - .withDestinationId(DESTINATION_ID_1); - final DestinationConnection destinationConnection2 = new DestinationConnection() - .withName("destination-2") - .withTombstone(false) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_2) - .withWorkspaceId(WORKSPACE_ID_1) - .withConfiguration(Jsons.deserialize(CONNECTION_SPECIFICATION)) - .withDestinationId(DESTINATION_ID_2); - final DestinationConnection destinationConnection3 = new DestinationConnection() - .withName("destination-3") - .withTombstone(true) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_2) - .withWorkspaceId(WORKSPACE_ID_2) - .withConfiguration(Jsons.emptyObject()) - .withDestinationId(DESTINATION_ID_3); - return Arrays.asList(destinationConnection1, destinationConnection2, destinationConnection3); - } - - public static List sourceOauthParameters() { - final SourceOAuthParameter sourceOAuthParameter1 = new SourceOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPECIFICATION)) - .withWorkspaceId(WORKSPACE_ID_1) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_1) - .withOauthParameterId(SOURCE_OAUTH_PARAMETER_ID_1); - final SourceOAuthParameter sourceOAuthParameter2 = new SourceOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPECIFICATION)) - .withWorkspaceId(WORKSPACE_ID_1) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_2) - .withOauthParameterId(SOURCE_OAUTH_PARAMETER_ID_2); - return Arrays.asList(sourceOAuthParameter1, sourceOAuthParameter2); - } - - public static List destinationOauthParameters() { - final DestinationOAuthParameter destinationOAuthParameter1 = new DestinationOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPECIFICATION)) - .withWorkspaceId(WORKSPACE_ID_1) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_1) - .withOauthParameterId(DESTINATION_OAUTH_PARAMETER_ID_1); - final DestinationOAuthParameter destinationOAuthParameter2 = new DestinationOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPECIFICATION)) - .withWorkspaceId(WORKSPACE_ID_1) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_2) - .withOauthParameterId(DESTINATION_OAUTH_PARAMETER_ID_2); - return Arrays.asList(destinationOAuthParameter1, destinationOAuthParameter2); - } - - public static List standardSyncOperations() { - final OperatorDbt operatorDbt = new OperatorDbt() - .withDbtArguments("dbt-arguments") - .withDockerImage("image-tag") - .withGitRepoBranch("git-repo-branch") - .withGitRepoUrl("git-repo-url"); - final StandardSyncOperation standardSyncOperation1 = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(OPERATION_ID_1) - .withWorkspaceId(WORKSPACE_ID_1) - .withOperatorDbt(operatorDbt) - .withOperatorNormalization(null) - .withOperatorType(OperatorType.DBT); - final StandardSyncOperation standardSyncOperation2 = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(OPERATION_ID_2) - .withWorkspaceId(WORKSPACE_ID_1) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); - final StandardSyncOperation standardSyncOperation3 = new StandardSyncOperation() - .withName("operation-3") - .withTombstone(false) - .withOperationId(OPERATION_ID_3) - .withWorkspaceId(WORKSPACE_ID_2) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); - final StandardSyncOperation standardSyncOperation4 = new StandardSyncOperation() - .withName("webhook-operation") - .withTombstone(false) - .withOperationId(OPERATION_ID_4) - .withWorkspaceId(WORKSPACE_ID_1) - .withOperatorType(OperatorType.WEBHOOK) - .withOperatorDbt(null) - .withOperatorNormalization(null) - .withOperatorWebhook( - new OperatorWebhook() - .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(WEBHOOK_OPERATION_EXECUTION_URL) - .withExecutionBody(WEBHOOK_OPERATION_EXECUTION_BODY)); - return Arrays.asList(standardSyncOperation1, standardSyncOperation2, standardSyncOperation3, standardSyncOperation4); - } - - public static List standardSyncs() { - final ResourceRequirements resourceRequirements = new ResourceRequirements() - .withCpuRequest("1") - .withCpuLimit("1") - .withMemoryRequest("1") - .withMemoryLimit("1"); - final Schedule schedule = new Schedule().withTimeUnit(TimeUnit.DAYS).withUnits(1L); - final StandardSync standardSync1 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_1, OPERATION_ID_2)) - .withConnectionId(CONNECTION_ID_1) - .withSourceId(SOURCE_ID_1) - .withDestinationId(DESTINATION_ID_1) - .withCatalog(getConfiguredCatalog()) - .withFieldSelectionData(new FieldSelectionData().withAdditionalProperty("foo", true)) - .withName("standard-sync-1") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.ACTIVE) - .withSchedule(schedule) - .withGeography(Geography.AUTO) - .withBreakingChange(false) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE) - .withNotifySchemaChanges(true); - - final StandardSync standardSync2 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_1, OPERATION_ID_2)) - .withConnectionId(CONNECTION_ID_2) - .withSourceId(SOURCE_ID_1) - .withDestinationId(DESTINATION_ID_2) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-2") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.ACTIVE) - .withSchedule(schedule) - .withGeography(Geography.AUTO) - .withBreakingChange(false) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE) - .withNotifySchemaChanges(true); - - final StandardSync standardSync3 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_1, OPERATION_ID_2)) - .withConnectionId(CONNECTION_ID_3) - .withSourceId(SOURCE_ID_2) - .withDestinationId(DESTINATION_ID_1) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-3") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.DESTINATION) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.ACTIVE) - .withSchedule(schedule) - .withGeography(Geography.AUTO) - .withBreakingChange(false) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE) - .withNotifySchemaChanges(true); - - final StandardSync standardSync4 = new StandardSync() - .withOperationIds(Collections.emptyList()) - .withConnectionId(CONNECTION_ID_4) - .withSourceId(SOURCE_ID_2) - .withDestinationId(DESTINATION_ID_2) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-4") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.DEPRECATED) - .withSchedule(schedule) - .withGeography(Geography.AUTO) - .withBreakingChange(false) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE) - .withNotifySchemaChanges(true); - - final StandardSync standardSync5 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_3)) - .withConnectionId(CONNECTION_ID_5) - .withSourceId(SOURCE_ID_3) - .withDestinationId(DESTINATION_ID_3) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-5") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.ACTIVE) - .withSchedule(schedule) - .withGeography(Geography.AUTO) - .withBreakingChange(false) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE) - .withNotifySchemaChanges(true); - - final StandardSync standardSync6 = new StandardSync() - .withOperationIds(Arrays.asList()) - .withConnectionId(CONNECTION_ID_6) - .withSourceId(SOURCE_ID_3) - .withDestinationId(DESTINATION_ID_3) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-6") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.DEPRECATED) - .withSchedule(schedule) - .withGeography(Geography.AUTO) - .withBreakingChange(false) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE) - .withNotifySchemaChanges(true); - - return Arrays.asList(standardSync1, standardSync2, standardSync3, standardSync4, standardSync5, standardSync6); - } - - private static ConfiguredAirbyteCatalog getConfiguredCatalog() { - final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - "models", - "models_schema", - io.airbyte.protocol.models.Field.of("id", JsonSchemaType.NUMBER), - io.airbyte.protocol.models.Field.of("make_id", JsonSchemaType.NUMBER), - io.airbyte.protocol.models.Field.of("model", JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("id"))))); - return CatalogHelpers.toDefaultConfiguredCatalog(catalog); - } - - public static ConfiguredAirbyteCatalog getConfiguredCatalogWithV1DataTypes() { - final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - "models", - "models_schema", - io.airbyte.protocol.models.Field.of("id", JsonSchemaType.NUMBER_V1), - io.airbyte.protocol.models.Field.of("make_id", JsonSchemaType.NUMBER_V1), - io.airbyte.protocol.models.Field.of("model", JsonSchemaType.STRING_V1)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("id"))))); - return CatalogHelpers.toDefaultConfiguredCatalog(catalog); - } - - public static List standardSyncStates() { - final StandardSyncState standardSyncState1 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_1) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPECIFICATION))); - final StandardSyncState standardSyncState2 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_2) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPECIFICATION))); - final StandardSyncState standardSyncState3 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_3) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPECIFICATION))); - final StandardSyncState standardSyncState4 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_4) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPECIFICATION))); - return Arrays.asList(standardSyncState1, standardSyncState2, standardSyncState3, standardSyncState4); - } - - public static List actorCatalogs() { - final ActorCatalog actorCatalog1 = new ActorCatalog() - .withId(ACTOR_CATALOG_ID_1) - .withCatalog(Jsons.deserialize("{}")) - .withCatalogHash("TESTHASH"); - final ActorCatalog actorCatalog2 = new ActorCatalog() - .withId(ACTOR_CATALOG_ID_2) - .withCatalog(Jsons.deserialize("{}")) - .withCatalogHash("12345"); - final ActorCatalog actorCatalog3 = new ActorCatalog() - .withId(ACTOR_CATALOG_ID_3) - .withCatalog(Jsons.deserialize("{}")) - .withCatalogHash("SomeOtherHash"); - return Arrays.asList(actorCatalog1, actorCatalog2, actorCatalog3); - } - - public static List actorCatalogFetchEvents() { - final ActorCatalogFetchEvent actorCatalogFetchEvent1 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_1) - .withActorCatalogId(ACTOR_CATALOG_ID_1) - .withActorId(SOURCE_ID_1) - .withConfigHash("CONFIG_HASH") - .withConnectorVersion("1.0.0"); - final ActorCatalogFetchEvent actorCatalogFetchEvent2 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_2) - .withActorCatalogId(ACTOR_CATALOG_ID_2) - .withActorId(SOURCE_ID_2) - .withConfigHash("1395") - .withConnectorVersion("1.42.0"); - return Arrays.asList(actorCatalogFetchEvent1, actorCatalogFetchEvent2); - } - - public static List actorCatalogFetchEventsSameSource() { - final ActorCatalogFetchEvent actorCatalogFetchEvent1 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_1) - .withActorCatalogId(ACTOR_CATALOG_ID_1) - .withActorId(SOURCE_ID_1) - .withConfigHash("CONFIG_HASH") - .withConnectorVersion("1.0.0"); - final ActorCatalogFetchEvent actorCatalogFetchEvent2 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_2) - .withActorCatalogId(ACTOR_CATALOG_ID_2) - .withActorId(SOURCE_ID_1) - .withConfigHash(CONFIG_HASH) - .withConnectorVersion(CONNECTOR_VERSION); - return Arrays.asList(actorCatalogFetchEvent1, actorCatalogFetchEvent2); - } - - @Data - public static class ActorCatalogFetchEventWithCreationDate { - - private final ActorCatalogFetchEvent actorCatalogFetchEvent; - private final OffsetDateTime createdAt; - - } - - public static List actorCatalogFetchEventsForAggregationTest() { - final OffsetDateTime now = OffsetDateTime.now(); - final OffsetDateTime yesterday = OffsetDateTime.now().minusDays(1l); - - final ActorCatalogFetchEvent actorCatalogFetchEvent1 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_1) - .withActorCatalogId(ACTOR_CATALOG_ID_1) - .withActorId(SOURCE_ID_1) - .withConfigHash("CONFIG_HASH") - .withConnectorVersion("1.0.0"); - final ActorCatalogFetchEvent actorCatalogFetchEvent2 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_2) - .withActorCatalogId(ACTOR_CATALOG_ID_2) - .withActorId(SOURCE_ID_2) - .withConfigHash(CONFIG_HASH) - .withConnectorVersion(CONNECTOR_VERSION); - final ActorCatalogFetchEvent actorCatalogFetchEvent3 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_3) - .withActorCatalogId(ACTOR_CATALOG_ID_3) - .withActorId(SOURCE_ID_2) - .withConfigHash(CONFIG_HASH) - .withConnectorVersion(CONNECTOR_VERSION); - final ActorCatalogFetchEvent actorCatalogFetchEvent4 = new ActorCatalogFetchEvent() - .withId(ACTOR_CATALOG_FETCH_EVENT_ID_3) - .withActorCatalogId(ACTOR_CATALOG_ID_3) - .withActorId(SOURCE_ID_3) - .withConfigHash(CONFIG_HASH) - .withConnectorVersion(CONNECTOR_VERSION); - return Arrays.asList( - new ActorCatalogFetchEventWithCreationDate(actorCatalogFetchEvent1, now), - new ActorCatalogFetchEventWithCreationDate(actorCatalogFetchEvent2, yesterday), - new ActorCatalogFetchEventWithCreationDate(actorCatalogFetchEvent3, now), - new ActorCatalogFetchEventWithCreationDate(actorCatalogFetchEvent4, now)); - } - - public static List workspaceServiceAccounts() { - final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount() - .withWorkspaceId(WORKSPACE_ID_1) - .withHmacKey(HMAC_SECRET_PAYLOAD_1) - .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") - .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") - .withJsonCredential(Jsons.deserialize(MOCK_SERVICE_ACCOUNT_1)); - - return Arrays.asList(workspaceServiceAccount); - } - - private static Map sortMap(final Map originalMap) { - return originalMap.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> newValue, TreeMap::new)); - } - - public static Instant now() { - return NOW; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java deleted file mode 100644 index ae7fc05e14376..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.config.persistence.MockData.HMAC_SECRET_PAYLOAD_1; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.WorkspaceServiceAccount; -import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; -import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class SecretsRepositoryReaderTest { - - private static final UUID UUID1 = UUID.randomUUID(); - - private static final SecretCoordinate COORDINATE = new SecretCoordinate("pointer", 2); - private static final String SECRET = "abc"; - private static final JsonNode PARTIAL_CONFIG = - Jsons.deserialize(String.format("{ \"username\": \"airbyte\", \"password\": { \"_secret\": \"%s\" } }", COORDINATE.getFullCoordinate())); - private static final JsonNode FULL_CONFIG = Jsons.deserialize(String.format("{ \"username\": \"airbyte\", \"password\": \"%s\"}", SECRET)); - private static final String KEY = "_secret"; - private static final String SERVICE_ACCT_EMAIL = "a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com"; - private static final String SERVICE_ACCT_ID = "a1e5ac98-7531-48e1-943b-b46636"; - - private static final SourceConnection SOURCE_WITH_PARTIAL_CONFIG = new SourceConnection() - .withSourceId(UUID1) - .withConfiguration(PARTIAL_CONFIG); - private static final SourceConnection SOURCE_WITH_FULL_CONFIG = Jsons.clone(SOURCE_WITH_PARTIAL_CONFIG) - .withConfiguration(FULL_CONFIG); - - private static final DestinationConnection DESTINATION_WITH_PARTIAL_CONFIG = new DestinationConnection() - .withDestinationId(UUID1) - .withConfiguration(PARTIAL_CONFIG); - private static final DestinationConnection DESTINATION_WITH_FULL_CONFIG = Jsons.clone(DESTINATION_WITH_PARTIAL_CONFIG) - .withConfiguration(FULL_CONFIG); - - private ConfigRepository configRepository; - private SecretsRepositoryReader secretsRepositoryReader; - private MemorySecretPersistence secretPersistence; - - @BeforeEach - void setup() { - configRepository = mock(ConfigRepository.class); - secretPersistence = new MemorySecretPersistence(); - final SecretsHydrator secretsHydrator = new RealSecretsHydrator(secretPersistence); - secretsRepositoryReader = new SecretsRepositoryReader(configRepository, secretsHydrator); - } - - @Test - void testGetSourceWithSecrets() throws JsonValidationException, ConfigNotFoundException, IOException { - secretPersistence.write(COORDINATE, SECRET); - when(configRepository.getSourceConnection(UUID1)).thenReturn(SOURCE_WITH_PARTIAL_CONFIG); - assertEquals(SOURCE_WITH_FULL_CONFIG, secretsRepositoryReader.getSourceConnectionWithSecrets(UUID1)); - } - - @Test - void testListSourcesWithSecrets() throws JsonValidationException, IOException { - secretPersistence.write(COORDINATE, SECRET); - when(configRepository.listSourceConnection()).thenReturn(List.of(SOURCE_WITH_PARTIAL_CONFIG)); - assertEquals(List.of(SOURCE_WITH_FULL_CONFIG), secretsRepositoryReader.listSourceConnectionWithSecrets()); - } - - @Test - void testGetDestinationWithSecrets() throws JsonValidationException, ConfigNotFoundException, IOException { - secretPersistence.write(COORDINATE, SECRET); - when(configRepository.getDestinationConnection(UUID1)).thenReturn(DESTINATION_WITH_PARTIAL_CONFIG); - assertEquals(DESTINATION_WITH_FULL_CONFIG, secretsRepositoryReader.getDestinationConnectionWithSecrets(UUID1)); - } - - @Test - void testListDestinationsWithSecrets() throws JsonValidationException, IOException { - secretPersistence.write(COORDINATE, SECRET); - when(configRepository.listDestinationConnection()).thenReturn(List.of(DESTINATION_WITH_PARTIAL_CONFIG)); - assertEquals(List.of(DESTINATION_WITH_FULL_CONFIG), secretsRepositoryReader.listDestinationConnectionWithSecrets()); - } - - @Test - void testReadingServiceAccount() throws JsonValidationException, ConfigNotFoundException, IOException { - final ConfigRepository configRepository = mock(ConfigRepository.class); - final SecretPersistence secretPersistence = mock(SecretPersistence.class); - final RealSecretsHydrator realSecretsHydrator = new RealSecretsHydrator(secretPersistence); - final SecretsRepositoryReader secretsRepositoryReader = - spy(new SecretsRepositoryReader(configRepository, realSecretsHydrator)); - - final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); - - final String jsonSecretPayload = MockData.MOCK_SERVICE_ACCOUNT_1; - - final SecretCoordinate secretCoordinateHmac = new SecretCoordinate( - "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); - - final SecretCoordinate secretCoordinateJson = new SecretCoordinate( - "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_6b894c2b-71dc-4481-bd9f-572402643cf9", 1); - - doReturn(new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withHmacKey(Jsons.jsonNode( - Map.of(KEY, secretCoordinateHmac.getFullCoordinate()))).withJsonCredential(Jsons.jsonNode( - Map.of(KEY, secretCoordinateJson.getFullCoordinate()))) - .withServiceAccountEmail(SERVICE_ACCT_EMAIL) - .withServiceAccountId(SERVICE_ACCT_ID)) - .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); - - doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(secretCoordinateHmac); - - doReturn(Optional.of(jsonSecretPayload)).when(secretPersistence).read(secretCoordinateJson); - - final WorkspaceServiceAccount actual = secretsRepositoryReader.getWorkspaceServiceAccountWithSecrets(workspaceId); - final WorkspaceServiceAccount expected = new WorkspaceServiceAccount().withWorkspaceId(workspaceId) - .withJsonCredential(Jsons.deserialize(jsonSecretPayload)).withHmacKey(HMAC_SECRET_PAYLOAD_1) - .withServiceAccountId(SERVICE_ACCT_ID) - .withServiceAccountEmail(SERVICE_ACCT_EMAIL); - assertEquals(expected, actual); - } - - @Test - void testReadingServiceAccountWithJsonNull() throws JsonValidationException, ConfigNotFoundException, IOException { - final ConfigRepository configRepository = mock(ConfigRepository.class); - final SecretPersistence secretPersistence = mock(SecretPersistence.class); - final RealSecretsHydrator realSecretsHydrator = new RealSecretsHydrator(secretPersistence); - final SecretsRepositoryReader secretsRepositoryReader = - spy(new SecretsRepositoryReader(configRepository, realSecretsHydrator)); - - final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); - - final SecretCoordinate secretCoordinateHmac = new SecretCoordinate( - "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); - - doReturn(new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withHmacKey(Jsons.jsonNode( - Map.of(KEY, secretCoordinateHmac.getFullCoordinate()))) - .withServiceAccountEmail(SERVICE_ACCT_EMAIL) - .withServiceAccountId(SERVICE_ACCT_ID)) - .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); - - doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(secretCoordinateHmac); - - final WorkspaceServiceAccount actual = secretsRepositoryReader.getWorkspaceServiceAccountWithSecrets(workspaceId); - final WorkspaceServiceAccount expected = new WorkspaceServiceAccount().withWorkspaceId(workspaceId) - .withHmacKey(HMAC_SECRET_PAYLOAD_1) - .withServiceAccountId(SERVICE_ACCT_ID) - .withServiceAccountEmail(SERVICE_ACCT_EMAIL); - assertEquals(expected, actual); - } - - @Test - void testReadingServiceAccountWithHmacNull() throws JsonValidationException, ConfigNotFoundException, IOException { - final ConfigRepository configRepository = mock(ConfigRepository.class); - final SecretPersistence secretPersistence = mock(SecretPersistence.class); - final RealSecretsHydrator realSecretsHydrator = new RealSecretsHydrator(secretPersistence); - final SecretsRepositoryReader secretsRepositoryReader = - spy(new SecretsRepositoryReader(configRepository, realSecretsHydrator)); - - final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); - - final String jsonSecretPayload = MockData.MOCK_SERVICE_ACCOUNT_1; - - final SecretCoordinate secretCoordinateJson = new SecretCoordinate( - "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_6b894c2b-71dc-4481-bd9f-572402643cf9", 1); - - doReturn(new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withJsonCredential(Jsons.jsonNode( - Map.of(KEY, secretCoordinateJson.getFullCoordinate()))) - .withServiceAccountEmail(SERVICE_ACCT_EMAIL) - .withServiceAccountId(SERVICE_ACCT_ID)) - .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); - - doReturn(Optional.of(jsonSecretPayload)).when(secretPersistence).read(secretCoordinateJson); - - final WorkspaceServiceAccount actual = secretsRepositoryReader.getWorkspaceServiceAccountWithSecrets(workspaceId); - final WorkspaceServiceAccount expected = new WorkspaceServiceAccount().withWorkspaceId(workspaceId) - .withJsonCredential(Jsons.deserialize(jsonSecretPayload)) - .withServiceAccountId(SERVICE_ACCT_ID) - .withServiceAccountEmail(SERVICE_ACCT_EMAIL); - assertEquals(expected, actual); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java deleted file mode 100644 index 271e2db8ba160..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java +++ /dev/null @@ -1,438 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.config.persistence.MockData.HMAC_SECRET_PAYLOAD_1; -import static io.airbyte.config.persistence.MockData.HMAC_SECRET_PAYLOAD_2; -import static io.airbyte.config.persistence.MockData.MOCK_SERVICE_ACCOUNT_1; -import static io.airbyte.config.persistence.MockData.MOCK_SERVICE_ACCOUNT_2; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.Geography; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.WebhookConfig; -import io.airbyte.config.WebhookOperationConfigs; -import io.airbyte.config.WorkspaceServiceAccount; -import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; -import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -@SuppressWarnings({"PMD.AvoidThrowingRawExceptionTypes", "PMD.UnusedPrivateField"}) -class SecretsRepositoryWriterTest { - - private static final UUID UUID1 = UUID.randomUUID(); - - private static final ConnectorSpecification SPEC = new ConnectorSpecification() - .withConnectionSpecification( - Jsons.deserialize(""" - { "properties": { "username": { "type": "string" }, "password": { "type": "string", "airbyte_secret": true } } } - """)); - - private static final String SECRET = "abc"; - private static final JsonNode FULL_CONFIG = Jsons.deserialize(String.format(""" - { "username": "airbyte", "password": "%s"}""", SECRET)); - - private static final SourceConnection SOURCE_WITH_FULL_CONFIG = new SourceConnection() - .withSourceId(UUID1) - .withSourceDefinitionId(UUID.randomUUID()) - .withConfiguration(FULL_CONFIG); - - private static final DestinationConnection DESTINATION_WITH_FULL_CONFIG = new DestinationConnection() - .withDestinationId(UUID1) - .withConfiguration(FULL_CONFIG); - - private static final StandardSourceDefinition SOURCE_DEF = new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_WITH_FULL_CONFIG.getSourceDefinitionId()) - .withSpec(SPEC); - - private static final StandardDestinationDefinition DEST_DEF = new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_WITH_FULL_CONFIG.getDestinationDefinitionId()) - .withSpec(SPEC); - - private static final String PASSWORD_PROPERTY_NAME = "password"; - private static final String PASSWORD_FIELD_NAME = "_secret"; - private static final String TEST_EMAIL = "test-email"; - private static final String TEST_WORKSPACE_NAME = "test-workspace-name"; - private static final String TEST_WORKSPACE_SLUG = "test-workspace-slug"; - private static final String TEST_WEBHOOK_NAME = "test-webhook-name"; - private static final String TEST_AUTH_TOKEN = "test-auth-token"; - - private ConfigRepository configRepository; - private MemorySecretPersistence longLivedSecretPersistence; - private MemorySecretPersistence ephemeralSecretPersistence; - private SecretsRepositoryWriter secretsRepositoryWriter; - - private RealSecretsHydrator longLivedSecretsHydrator; - private SecretsRepositoryReader longLivedSecretsRepositoryReader; - private RealSecretsHydrator ephemeralSecretsHydrator; - private JsonSchemaValidator jsonSchemaValidator; - - @BeforeEach - void setup() { - configRepository = spy(mock(ConfigRepository.class)); - longLivedSecretPersistence = new MemorySecretPersistence(); - ephemeralSecretPersistence = new MemorySecretPersistence(); - jsonSchemaValidator = mock(JsonSchemaValidator.class); - - secretsRepositoryWriter = new SecretsRepositoryWriter( - configRepository, - jsonSchemaValidator, - Optional.of(longLivedSecretPersistence), - Optional.of(ephemeralSecretPersistence)); - - longLivedSecretsHydrator = new RealSecretsHydrator(longLivedSecretPersistence); - longLivedSecretsRepositoryReader = new SecretsRepositoryReader(configRepository, longLivedSecretsHydrator); - - ephemeralSecretsHydrator = new RealSecretsHydrator(ephemeralSecretPersistence); - } - - @Test - void testWriteSourceConnection() throws JsonValidationException, IOException, ConfigNotFoundException { - doThrow(ConfigNotFoundException.class).when(configRepository).getSourceConnection(UUID1); - - secretsRepositoryWriter.writeSourceConnection(SOURCE_WITH_FULL_CONFIG, SPEC); - final SecretCoordinate coordinate = getCoordinateFromSecretsStore(longLivedSecretPersistence); - - assertNotNull(coordinate); - final var partialSource = Jsons.clone(SOURCE_WITH_FULL_CONFIG).withConfiguration(injectCoordinate(coordinate.getFullCoordinate())); - verify(configRepository).writeSourceConnectionNoSecrets(partialSource); - verify(jsonSchemaValidator, times(1)).ensure(any(), any()); - final Optional persistedSecret = longLivedSecretPersistence.read(coordinate); - assertTrue(persistedSecret.isPresent()); - assertEquals(SECRET, persistedSecret.get()); - - // verify that the round trip works. - reset(configRepository); - when(configRepository.getSourceConnection(UUID1)).thenReturn(partialSource); - assertEquals(SOURCE_WITH_FULL_CONFIG, longLivedSecretsRepositoryReader.getSourceConnectionWithSecrets(UUID1)); - } - - @Test - void testWriteDestinationConnection() throws JsonValidationException, IOException, ConfigNotFoundException { - doThrow(ConfigNotFoundException.class).when(configRepository).getDestinationConnection(UUID1); - - secretsRepositoryWriter.writeDestinationConnection(DESTINATION_WITH_FULL_CONFIG, SPEC); - final SecretCoordinate coordinate = getCoordinateFromSecretsStore(longLivedSecretPersistence); - - assertNotNull(coordinate); - final var partialDestination = Jsons.clone(DESTINATION_WITH_FULL_CONFIG).withConfiguration(injectCoordinate(coordinate.getFullCoordinate())); - verify(configRepository).writeDestinationConnectionNoSecrets(partialDestination); - verify(jsonSchemaValidator, times(1)).ensure(any(), any()); - final var persistedSecret = longLivedSecretPersistence.read(coordinate); - assertTrue(persistedSecret.isPresent()); - assertEquals(SECRET, persistedSecret.get()); - - // verify that the round trip works. - reset(configRepository); - when(configRepository.getDestinationConnection(UUID1)).thenReturn(partialDestination); - assertEquals(DESTINATION_WITH_FULL_CONFIG, longLivedSecretsRepositoryReader.getDestinationConnectionWithSecrets(UUID1)); - } - - @Test - void testWriteSourceConnectionWithTombstone() throws JsonValidationException, IOException, ConfigNotFoundException { - doThrow(ConfigNotFoundException.class).when(configRepository).getSourceConnection(UUID1); - final var sourceWithTombstone = new SourceConnection() - .withSourceId(UUID1) - .withSourceDefinitionId(UUID.randomUUID()) - .withConfiguration(FULL_CONFIG) - .withTombstone(true); - - secretsRepositoryWriter.writeSourceConnection(sourceWithTombstone, SPEC); - final SecretCoordinate coordinate = getCoordinateFromSecretsStore(longLivedSecretPersistence); - - assertNotNull(coordinate); - final var partialSource = Jsons.clone(sourceWithTombstone).withConfiguration(injectCoordinate(coordinate.getFullCoordinate())); - verify(configRepository).writeSourceConnectionNoSecrets(partialSource); - verify(jsonSchemaValidator, times(0)).ensure(any(), any()); - final var persistedSecret = longLivedSecretPersistence.read(coordinate); - assertTrue(persistedSecret.isPresent()); - assertEquals(SECRET, persistedSecret.get()); - - // verify that the round trip works. - reset(configRepository); - when(configRepository.getSourceConnection(UUID1)).thenReturn(partialSource); - assertEquals(sourceWithTombstone, longLivedSecretsRepositoryReader.getSourceConnectionWithSecrets(UUID1)); - } - - @Test - void testWriteDestinationConnectionWithTombstone() throws JsonValidationException, IOException, ConfigNotFoundException { - doThrow(ConfigNotFoundException.class).when(configRepository).getDestinationConnection(UUID1); - final var destinationWithTombstone = new DestinationConnection() - .withDestinationId(UUID1) - .withConfiguration(FULL_CONFIG) - .withTombstone(true); - - secretsRepositoryWriter.writeDestinationConnection(destinationWithTombstone, SPEC); - final SecretCoordinate coordinate = getCoordinateFromSecretsStore(longLivedSecretPersistence); - - assertNotNull(coordinate); - final var partialDestination = Jsons.clone(destinationWithTombstone).withConfiguration(injectCoordinate(coordinate.getFullCoordinate())); - verify(configRepository).writeDestinationConnectionNoSecrets(partialDestination); - verify(jsonSchemaValidator, times(0)).ensure(any(), any()); - final Optional persistedSecret = longLivedSecretPersistence.read(coordinate); - assertTrue(persistedSecret.isPresent()); - assertEquals(SECRET, persistedSecret.get()); - - // verify that the round trip works. - reset(configRepository); - when(configRepository.getDestinationConnection(UUID1)).thenReturn(partialDestination); - assertEquals(destinationWithTombstone, longLivedSecretsRepositoryReader.getDestinationConnectionWithSecrets(UUID1)); - } - - @Test - void testStatefulSplitEphemeralSecrets() throws JsonValidationException, IOException, ConfigNotFoundException { - final JsonNode split = secretsRepositoryWriter.statefulSplitEphemeralSecrets( - SOURCE_WITH_FULL_CONFIG.getConfiguration(), - SPEC); - final SecretCoordinate coordinate = getCoordinateFromSecretsStore(ephemeralSecretPersistence); - - assertNotNull(coordinate); - final Optional persistedSecret = ephemeralSecretPersistence.read(coordinate); - assertTrue(persistedSecret.isPresent()); - assertEquals(SECRET, persistedSecret.get()); - - // verify that the round trip works. - assertEquals(SOURCE_WITH_FULL_CONFIG.getConfiguration(), ephemeralSecretsHydrator.hydrate(split)); - } - - // this only works if the secrets store has one secret. - private SecretCoordinate getCoordinateFromSecretsStore(final MemorySecretPersistence secretPersistence) { - return secretPersistence.getMap() - .keySet() - .stream() - .findFirst() - .orElse(null); - } - - private static JsonNode injectCoordinate(final String coordinate) { - return Jsons.deserialize(String.format("{ \"username\": \"airbyte\", \"password\": { \"_secret\": \"%s\" } }", coordinate)); - } - - @Test - void testWriteWorkspaceServiceAccount() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID workspaceId = UUID.randomUUID(); - - final String jsonSecretPayload = MOCK_SERVICE_ACCOUNT_1; - final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount() - .withWorkspaceId(workspaceId) - .withHmacKey(HMAC_SECRET_PAYLOAD_1) - .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") - .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") - .withJsonCredential(Jsons.deserialize(jsonSecretPayload)); - - doThrow(new ConfigNotFoundException(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, workspaceId.toString())) - .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); - secretsRepositoryWriter.writeServiceAccountJsonCredentials(workspaceServiceAccount); - - assertEquals(2, longLivedSecretPersistence.getMap().size()); - - String jsonPayloadInPersistence = null; - String hmacPayloadInPersistence = null; - - SecretCoordinate jsonSecretCoordinate = null; - SecretCoordinate hmacSecretCoordinate = null; - for (final Map.Entry entry : longLivedSecretPersistence.getMap().entrySet()) { - if (entry.getKey().getFullCoordinate().contains("json")) { - jsonSecretCoordinate = entry.getKey(); - jsonPayloadInPersistence = entry.getValue(); - } else if (entry.getKey().getFullCoordinate().contains("hmac")) { - hmacSecretCoordinate = entry.getKey(); - hmacPayloadInPersistence = entry.getValue(); - } else { - throw new RuntimeException(""); - } - } - - assertNotNull(jsonPayloadInPersistence); - assertNotNull(hmacPayloadInPersistence); - assertNotNull(jsonSecretCoordinate); - assertNotNull(hmacSecretCoordinate); - - assertEquals(jsonSecretPayload, jsonPayloadInPersistence); - assertEquals(HMAC_SECRET_PAYLOAD_1.toString(), hmacPayloadInPersistence); - - verify(configRepository).writeWorkspaceServiceAccountNoSecrets( - Jsons.clone(workspaceServiceAccount.withJsonCredential(Jsons.jsonNode(Map.of(PASSWORD_FIELD_NAME, jsonSecretCoordinate.getFullCoordinate()))) - .withHmacKey(Jsons.jsonNode(Map.of(PASSWORD_FIELD_NAME, hmacSecretCoordinate.getFullCoordinate()))))); - } - - @Test - void testWriteSameStagingConfiguration() throws JsonValidationException, ConfigNotFoundException, IOException { - final ConfigRepository configRepository = mock(ConfigRepository.class); - final SecretPersistence secretPersistence = mock(SecretPersistence.class); - final SecretsRepositoryWriter secretsRepositoryWriter = spy( - new SecretsRepositoryWriter(configRepository, Optional.of(secretPersistence), Optional.of(secretPersistence))); - - final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); - - final String jsonSecretPayload = MOCK_SERVICE_ACCOUNT_1; - final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withHmacKey( - HMAC_SECRET_PAYLOAD_1) - .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") - .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") - .withJsonCredential(Jsons.deserialize(jsonSecretPayload)); - - final SecretCoordinate jsonSecretCoordinate = new SecretCoordinate( - "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); - - final SecretCoordinate hmacSecretCoordinate = new SecretCoordinate( - "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); - - final WorkspaceServiceAccount cloned = Jsons.clone(workspaceServiceAccount) - .withJsonCredential(Jsons.jsonNode(Map.of(PASSWORD_FIELD_NAME, jsonSecretCoordinate.getFullCoordinate()))) - .withHmacKey(Jsons.jsonNode(Map.of(PASSWORD_FIELD_NAME, hmacSecretCoordinate.getFullCoordinate()))); - - doReturn(cloned).when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); - - doReturn(Optional.of(jsonSecretPayload)).when(secretPersistence).read(jsonSecretCoordinate); - doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(hmacSecretCoordinate); - secretsRepositoryWriter.writeServiceAccountJsonCredentials(workspaceServiceAccount); - - final ArgumentCaptor coordinates = ArgumentCaptor.forClass(SecretCoordinate.class); - final ArgumentCaptor payloads = ArgumentCaptor.forClass(String.class); - - verify(secretPersistence, times(2)).write(coordinates.capture(), payloads.capture()); - final List actualCoordinates = coordinates.getAllValues(); - assertEquals(2, actualCoordinates.size()); - assertThat(actualCoordinates, containsInAnyOrder(jsonSecretCoordinate, hmacSecretCoordinate)); - - final List actualPayload = payloads.getAllValues(); - assertEquals(2, actualPayload.size()); - assertThat(actualPayload, containsInAnyOrder(jsonSecretPayload, HMAC_SECRET_PAYLOAD_1.toString())); - - verify(secretPersistence).write(hmacSecretCoordinate, HMAC_SECRET_PAYLOAD_1.toString()); - verify(configRepository).writeWorkspaceServiceAccountNoSecrets( - cloned); - } - - @Test - void testWriteDifferentStagingConfiguration() throws JsonValidationException, ConfigNotFoundException, IOException { - final ConfigRepository configRepository = mock(ConfigRepository.class); - final SecretPersistence secretPersistence = mock(SecretPersistence.class); - final SecretsRepositoryWriter secretsRepositoryWriter = - spy(new SecretsRepositoryWriter(configRepository, Optional.of(secretPersistence), Optional.of(secretPersistence))); - - final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); - - final String jsonSecretOldPayload = MOCK_SERVICE_ACCOUNT_1; - final String jsonSecretNewPayload = MOCK_SERVICE_ACCOUNT_2; - - final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount() - .withWorkspaceId(workspaceId) - .withHmacKey(HMAC_SECRET_PAYLOAD_2) - .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") - .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") - .withJsonCredential(Jsons.deserialize(jsonSecretNewPayload)); - - final SecretCoordinate jsonSecretOldCoordinate = new SecretCoordinate( - "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); - - final SecretCoordinate hmacSecretOldCoordinate = new SecretCoordinate( - "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); - - final WorkspaceServiceAccount cloned = Jsons.clone(workspaceServiceAccount) - .withJsonCredential(Jsons.jsonNode(Map.of(PASSWORD_FIELD_NAME, jsonSecretOldCoordinate.getFullCoordinate()))) - .withHmacKey(Jsons.jsonNode(Map.of(PASSWORD_FIELD_NAME, hmacSecretOldCoordinate.getFullCoordinate()))); - - doReturn(cloned).when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); - - doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(hmacSecretOldCoordinate); - doReturn(Optional.of(jsonSecretOldPayload)).when(secretPersistence).read(jsonSecretOldCoordinate); - - secretsRepositoryWriter.writeServiceAccountJsonCredentials(workspaceServiceAccount); - - final SecretCoordinate jsonSecretNewCoordinate = new SecretCoordinate( - "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 2); - - final SecretCoordinate hmacSecretNewCoordinate = new SecretCoordinate( - "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 2); - - final ArgumentCaptor coordinates = ArgumentCaptor.forClass(SecretCoordinate.class); - final ArgumentCaptor payloads = ArgumentCaptor.forClass(String.class); - - verify(secretPersistence, times(2)).write(coordinates.capture(), payloads.capture()); - final List actualCoordinates = coordinates.getAllValues(); - assertEquals(2, actualCoordinates.size()); - assertThat(actualCoordinates, containsInAnyOrder(jsonSecretNewCoordinate, hmacSecretNewCoordinate)); - - final List actualPayload = payloads.getAllValues(); - assertEquals(2, actualPayload.size()); - assertThat(actualPayload, containsInAnyOrder(jsonSecretNewPayload, HMAC_SECRET_PAYLOAD_2.toString())); - - verify(configRepository).writeWorkspaceServiceAccountNoSecrets(Jsons.clone(workspaceServiceAccount).withJsonCredential(Jsons.jsonNode( - Map.of(PASSWORD_FIELD_NAME, jsonSecretNewCoordinate.getFullCoordinate()))).withHmacKey(Jsons.jsonNode( - Map.of(PASSWORD_FIELD_NAME, hmacSecretNewCoordinate.getFullCoordinate())))); - } - - @Test - @DisplayName("writeWorkspace should ensure that secret fields are replaced") - void testWriteWorkspaceSplitsAuthTokens() throws JsonValidationException, IOException { - final ConfigRepository configRepository = mock(ConfigRepository.class); - final SecretPersistence secretPersistence = mock(SecretPersistence.class); - final SecretsRepositoryWriter secretsRepositoryWriter = - spy(new SecretsRepositoryWriter(configRepository, jsonSchemaValidator, Optional.of(secretPersistence), Optional.of(secretPersistence))); - final var webhookConfigs = new WebhookOperationConfigs().withWebhookConfigs(List.of( - new WebhookConfig() - .withName(TEST_WEBHOOK_NAME) - .withAuthToken(TEST_AUTH_TOKEN) - .withId(UUID.randomUUID()))); - final var workspace = new StandardWorkspace() - .withWorkspaceId(UUID.randomUUID()) - .withCustomerId(UUID.randomUUID()) - .withEmail(TEST_EMAIL) - .withName(TEST_WORKSPACE_NAME) - .withSlug(TEST_WORKSPACE_SLUG) - .withInitialSetupComplete(false) - .withDisplaySetupWizard(true) - .withNews(false) - .withAnonymousDataCollection(false) - .withSecurityUpdates(false) - .withTombstone(false) - .withNotifications(Collections.emptyList()) - .withDefaultGeography(Geography.AUTO) - // Serialize it to a string, then deserialize it to a JsonNode. - .withWebhookOperationConfigs(Jsons.jsonNode(webhookConfigs)); - secretsRepositoryWriter.writeWorkspace(workspace); - final var workspaceArgumentCaptor = ArgumentCaptor.forClass(StandardWorkspace.class); - verify(configRepository, times(1)).writeStandardWorkspaceNoSecrets(workspaceArgumentCaptor.capture()); - assertFalse(Jsons.serialize(workspaceArgumentCaptor.getValue().getWebhookOperationConfigs()).contains(TEST_AUTH_TOKEN)); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java deleted file mode 100644 index 89eb464d32fce..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java +++ /dev/null @@ -1,412 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.ActorType; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.Geography; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.ReleaseStage; -import io.airbyte.config.StandardSourceDefinition.SourceType; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.NonBreakingChangesPreference; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class StandardSyncPersistenceTest extends BaseConfigDatabaseTest { - - record StandardSyncProtocolVersionFlag(UUID standardSyncId, boolean unsupportedProtocolVersion) {} - - private static final AirbyteProtocolVersionRange protocolRange_0_0 = new AirbyteProtocolVersionRange(new Version("0.0.0"), new Version("0.1.0")); - private static final AirbyteProtocolVersionRange protocolRange_0_1 = new AirbyteProtocolVersionRange(new Version("0.0.1"), new Version("1.0.0")); - private static final AirbyteProtocolVersionRange protocolRange_1_1 = new AirbyteProtocolVersionRange(new Version("1.0.0"), new Version("1.10.0")); - - private ConfigRepository configRepository; - private StandardSyncPersistence standardSyncPersistence; - - private StandardSourceDefinition sourceDef1; - private StandardSourceDefinition sourceDefAlpha; - private SourceConnection source1; - private SourceConnection source2; - private SourceConnection sourceAlpha; - private StandardDestinationDefinition destDef1; - private StandardDestinationDefinition destDef2; - private StandardDestinationDefinition destDefBeta; - private DestinationConnection destination1; - private DestinationConnection destination2; - private DestinationConnection destinationBeta; - - @BeforeEach - void beforeEach() throws Exception { - truncateAllTables(); - - standardSyncPersistence = new StandardSyncPersistence(database); - - // only used for creating records that sync depends on. - configRepository = new ConfigRepository(database); - } - - @Test - void testReadWrite() throws IOException, ConfigNotFoundException, JsonValidationException { - createBaseObjects(); - final StandardSync sync = createStandardSync(source1, destination1); - standardSyncPersistence.writeStandardSync(sync); - - final StandardSync expectedSync = Jsons.clone(sync) - .withNotifySchemaChanges(true) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE); - assertEquals(expectedSync, standardSyncPersistence.getStandardSync(sync.getConnectionId())); - } - - @Test - void testReadNotExists() { - assertThrows(ConfigNotFoundException.class, () -> standardSyncPersistence.getStandardSync(UUID.randomUUID())); - } - - @Test - void testList() throws IOException, JsonValidationException { - createBaseObjects(); - final StandardSync sync1 = createStandardSync(source1, destination1); - final StandardSync sync2 = createStandardSync(source1, destination2); - standardSyncPersistence.writeStandardSync(sync1); - standardSyncPersistence.writeStandardSync(sync2); - - final List expected = List.of( - Jsons.clone(sync1) - .withNotifySchemaChanges(true) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE), - Jsons.clone(sync2) - .withNotifySchemaChanges(true) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE)); - - assertEquals(expected, standardSyncPersistence.listStandardSync()); - } - - @Test - void testDelete() throws IOException, ConfigNotFoundException, JsonValidationException { - createBaseObjects(); - - final StandardSync sync1 = createStandardSync(source1, destination1); - final StandardSync sync2 = createStandardSync(source1, destination2); - - assertNotNull(standardSyncPersistence.getStandardSync(sync1.getConnectionId())); - assertNotNull(standardSyncPersistence.getStandardSync(sync2.getConnectionId())); - - standardSyncPersistence.deleteStandardSync(sync1.getConnectionId()); - - assertThrows(ConfigNotFoundException.class, () -> standardSyncPersistence.getStandardSync(sync1.getConnectionId())); - assertNotNull(standardSyncPersistence.getStandardSync(sync2.getConnectionId())); - } - - @Test - void testClearUnsupportedProtocolVersionFlagFromSource() throws IOException, JsonValidationException, SQLException { - createBaseObjects(); - - final StandardSync sync1 = createStandardSync(source1, destination1); - final StandardSync sync2 = createStandardSync(source1, destination2); - final List syncs = List.of(sync1, sync2); - - setProtocolVersionFlagForSyncs(List.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true))); - - // Only sync1 should be flipped since sync2 has dest2 with protocol v1 - standardSyncPersistence.clearUnsupportedProtocolVersionFlag(sourceDef1.getSourceDefinitionId(), ActorType.SOURCE, protocolRange_0_0); - assertEquals(Set.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true)), getProtocolVersionFlagForSyncs(syncs)); - - standardSyncPersistence.clearUnsupportedProtocolVersionFlag(sourceDef1.getSourceDefinitionId(), ActorType.SOURCE, protocolRange_0_1); - assertEquals(Set.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); - - // Making sure we updated the updated_at timestamp - final Optional> datetimes = database.query(ctx -> ctx - .select(CONNECTION.CREATED_AT, CONNECTION.UPDATED_AT).from(CONNECTION).where(CONNECTION.ID.eq(sync2.getConnectionId())) - .stream().findFirst() - .map(r -> new ImmutablePair<>(r.get(CONNECTION.CREATED_AT), r.get(CONNECTION.UPDATED_AT)))); - assertTrue(datetimes.isPresent()); - assertNotEquals(datetimes.get().getLeft(), datetimes.get().getRight()); - } - - @Test - void testClearUnsupportedProtocolVersionFlagFromSourceMultiFlipAtOnce() throws IOException, JsonValidationException, SQLException { - createBaseObjects(); - - final StandardSync sync1 = createStandardSync(source1, destination1); - final StandardSync sync2 = createStandardSync(source1, destination2); - final List syncs = List.of(sync1, sync2); - - setProtocolVersionFlagForSyncs(List.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true))); - - // Making sure we flip all the connections if more than one is impacted - standardSyncPersistence.clearUnsupportedProtocolVersionFlag(sourceDef1.getSourceDefinitionId(), ActorType.SOURCE, protocolRange_0_1); - assertEquals(Set.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); - } - - @Test - void testClearUnsupportedProtocolVersionFlagFromDest() throws IOException, JsonValidationException, SQLException { - createBaseObjects(); - - final StandardSync sync1 = createStandardSync(source1, destination2); - final StandardSync sync2 = createStandardSync(source2, destination2); - final List syncs = List.of(sync1, sync2); - - setProtocolVersionFlagForSyncs(List.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true))); - - // destDef1 is not tied to anything, there should be no change - standardSyncPersistence.clearUnsupportedProtocolVersionFlag(destDef1.getDestinationDefinitionId(), ActorType.DESTINATION, protocolRange_0_1); - assertEquals(Set.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true)), getProtocolVersionFlagForSyncs(syncs)); - - // Only sync1 should be flipped since sync2 has source1 with protocol v0 - standardSyncPersistence.clearUnsupportedProtocolVersionFlag(destDef2.getDestinationDefinitionId(), ActorType.DESTINATION, protocolRange_1_1); - assertEquals(Set.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); - - standardSyncPersistence.clearUnsupportedProtocolVersionFlag(destDef2.getDestinationDefinitionId(), ActorType.DESTINATION, protocolRange_0_1); - assertEquals(Set.of( - new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), - new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); - } - - @Test - void testGetAllStreamsForConnection() throws Exception { - createBaseObjects(); - final AirbyteStream airbyteStream = new AirbyteStream().withName("stream1").withNamespace("namespace1"); - final ConfiguredAirbyteStream configuredStream = new ConfiguredAirbyteStream().withStream(airbyteStream); - final AirbyteStream airbyteStream2 = new AirbyteStream().withName("stream2"); - final ConfiguredAirbyteStream configuredStream2 = new ConfiguredAirbyteStream().withStream(airbyteStream2); - final ConfiguredAirbyteCatalog configuredCatalog = new ConfiguredAirbyteCatalog().withStreams(List.of(configuredStream, configuredStream2)); - final StandardSync sync = createStandardSync(source1, destination1).withCatalog(configuredCatalog); - - standardSyncPersistence.writeStandardSync(sync); - - final List result = standardSyncPersistence.getAllStreamsForConnection(sync.getConnectionId()); - assertEquals(2, result.size()); - - assertTrue( - result.stream().anyMatch( - streamDescriptor -> "stream1".equals(streamDescriptor.getName()) && "namespace1".equals(streamDescriptor.getNamespace()))); - assertTrue( - result.stream().anyMatch( - streamDescriptor -> "stream2".equals(streamDescriptor.getName()) && streamDescriptor.getNamespace() == null)); - } - - @Test - void testConnectionHasAlphaOrBetaConnector() throws JsonValidationException, IOException { - createBaseObjects(); - - final StandardSync syncGa = createStandardSync(source1, destination1); - standardSyncPersistence.writeStandardSync(syncGa); - assertFalse(configRepository.getConnectionHasAlphaOrBetaConnector(syncGa.getConnectionId())); - - final StandardSync syncAlpha = createStandardSync(sourceAlpha, destination1); - standardSyncPersistence.writeStandardSync(syncAlpha); - assertTrue(configRepository.getConnectionHasAlphaOrBetaConnector(syncAlpha.getConnectionId())); - - final StandardSync syncBeta = createStandardSync(source1, destinationBeta); - standardSyncPersistence.writeStandardSync(syncBeta); - assertTrue(configRepository.getConnectionHasAlphaOrBetaConnector(syncBeta.getConnectionId())); - } - - private Set getProtocolVersionFlagForSyncs(final List standardSync) throws SQLException { - return database.query(ctx -> ctx - .select(CONNECTION.ID, CONNECTION.UNSUPPORTED_PROTOCOL_VERSION) - .from(CONNECTION) - .where(CONNECTION.ID.in(standardSync.stream().map(StandardSync::getConnectionId).toList())) - .fetch()) - .stream() - .map(r -> new StandardSyncProtocolVersionFlag(r.get(CONNECTION.ID), r.get(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION))) - .collect(Collectors.toSet()); - } - - private void setProtocolVersionFlagForSyncs(final List updates) throws SQLException { - final List setToTrue = - updates.stream().filter(s -> s.unsupportedProtocolVersion).map(StandardSyncProtocolVersionFlag::standardSyncId).toList(); - final List setToFalse = - updates.stream().filter(s -> !s.unsupportedProtocolVersion).map(StandardSyncProtocolVersionFlag::standardSyncId).toList(); - database.query(ctx -> { - if (!setToTrue.isEmpty()) { - ctx.update(CONNECTION) - .set(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION, true) - .where(CONNECTION.ID.in(setToTrue)) - .execute(); - } - if (!setToFalse.isEmpty()) { - ctx.update(CONNECTION) - .set(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION, false) - .where(CONNECTION.ID.in(setToFalse)) - .execute(); - } - return null; - }); - } - - private void createBaseObjects() throws IOException, JsonValidationException { - final UUID workspaceId = UUID.randomUUID(); - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withName("Another Workspace") - .withSlug("another-workspace") - .withInitialSetupComplete(true) - .withTombstone(false) - .withDefaultGeography(Geography.AUTO); - configRepository.writeStandardWorkspaceNoSecrets(workspace); - - sourceDef1 = createStandardSourceDefinition("0.2.2", ReleaseStage.GENERALLY_AVAILABLE); - source1 = createSourceConnection(workspaceId, sourceDef1); - - final StandardSourceDefinition sourceDef2 = createStandardSourceDefinition("1.1.0", ReleaseStage.GENERALLY_AVAILABLE); - source2 = createSourceConnection(workspaceId, sourceDef2); - - sourceDefAlpha = createStandardSourceDefinition("1.0.0", ReleaseStage.ALPHA); - sourceAlpha = createSourceConnection(workspaceId, sourceDefAlpha); - - destDef1 = createStandardDestDefinition("0.2.3", StandardDestinationDefinition.ReleaseStage.GENERALLY_AVAILABLE); - destination1 = createDestinationConnection(workspaceId, destDef1); - - destDef2 = createStandardDestDefinition("1.3.0", StandardDestinationDefinition.ReleaseStage.GENERALLY_AVAILABLE); - destination2 = createDestinationConnection(workspaceId, destDef2); - - destDefBeta = createStandardDestDefinition("1.3.0", StandardDestinationDefinition.ReleaseStage.BETA); - destinationBeta = createDestinationConnection(workspaceId, destDefBeta); - } - - private StandardSourceDefinition createStandardSourceDefinition(final String protocolVersion, final ReleaseStage releaseStage) - throws JsonValidationException, IOException { - final UUID sourceDefId = UUID.randomUUID(); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withSourceDefinitionId(sourceDefId) - .withSourceType(SourceType.API) - .withName("random-source-" + sourceDefId) - .withDockerImageTag("tag-1") - .withDockerRepository("repository-1") - .withDocumentationUrl("documentation-url-1") - .withIcon("icon-1") - .withSpec(new ConnectorSpecification()) - .withProtocolVersion(protocolVersion) - .withReleaseStage(releaseStage) - .withTombstone(false) - .withPublic(true) - .withCustom(false) - .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); - configRepository.writeStandardSourceDefinition(sourceDef); - return sourceDef; - } - - private StandardDestinationDefinition createStandardDestDefinition(final String protocolVersion, - final StandardDestinationDefinition.ReleaseStage releaseStage) - throws JsonValidationException, IOException { - final UUID destDefId = UUID.randomUUID(); - final StandardDestinationDefinition destDef = new StandardDestinationDefinition() - .withDestinationDefinitionId(destDefId) - .withName("random-destination-" + destDefId) - .withDockerImageTag("tag-3") - .withDockerRepository("repository-3") - .withDocumentationUrl("documentation-url-3") - .withIcon("icon-3") - .withSpec(new ConnectorSpecification()) - .withProtocolVersion(protocolVersion) - .withReleaseStage(releaseStage) - .withTombstone(false) - .withPublic(true) - .withCustom(false) - .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); - configRepository.writeStandardDestinationDefinition(destDef); - return destDef; - } - - private SourceConnection createSourceConnection(final UUID workspaceId, final StandardSourceDefinition sourceDef) throws IOException { - final UUID sourceId = UUID.randomUUID(); - final SourceConnection source = new SourceConnection() - .withName("source-" + sourceId) - .withTombstone(false) - .withConfiguration(Jsons.deserialize("{}")) - .withSourceDefinitionId(sourceDef.getSourceDefinitionId()) - .withWorkspaceId(workspaceId) - .withSourceId(sourceId); - configRepository.writeSourceConnectionNoSecrets(source); - return source; - } - - private DestinationConnection createDestinationConnection(final UUID workspaceId, final StandardDestinationDefinition destDef) - throws IOException { - final UUID destinationId = UUID.randomUUID(); - final DestinationConnection dest = new DestinationConnection() - .withName("source-" + destinationId) - .withTombstone(false) - .withConfiguration(Jsons.deserialize("{}")) - .withDestinationDefinitionId(destDef.getDestinationDefinitionId()) - .withWorkspaceId(workspaceId) - .withDestinationId(destinationId); - configRepository.writeDestinationConnectionNoSecrets(dest); - return dest; - } - - private StandardSync createStandardSync(final SourceConnection source, final DestinationConnection dest) throws IOException { - final UUID connectionId = UUID.randomUUID(); - final StandardSync sync = new StandardSync() - .withConnectionId(connectionId) - .withSourceId(source.getSourceId()) - .withDestinationId(dest.getDestinationId()) - .withName("standard-sync-" + connectionId) - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("") - .withPrefix("") - .withStatus(Status.ACTIVE) - .withGeography(Geography.AUTO) - .withNonBreakingChangesPreference(NonBreakingChangesPreference.IGNORE) - .withNotifySchemaChanges(true) - .withBreakingChange(false); - standardSyncPersistence.writeStandardSync(sync); - return sync; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java deleted file mode 100644 index 797ff3cbd247d..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java +++ /dev/null @@ -1,563 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.State; -import io.airbyte.config.StateType; -import io.airbyte.config.StateWrapper; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.sql.SQLException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class StatePersistenceTest extends BaseConfigDatabaseTest { - - private StatePersistence statePersistence; - private UUID connectionId; - private static final String STATE_ONE = "\"state1\""; - private static final String STATE_TWO = "\"state2\""; - private static final String STATE_WITH_NAMESPACE = "\"state s1.n1\""; - private static final String STREAM_STATE_2 = "\"state s2\""; - private static final String GLOBAL_STATE = "\"my global state\""; - private static final String STATE = "state"; - - @BeforeEach - void beforeEach() throws DatabaseInitializationException, IOException, JsonValidationException, SQLException { - truncateAllTables(); - - setupTestData(); - statePersistence = new StatePersistence(database); - } - - private void setupTestData() throws JsonValidationException, IOException { - final ConfigRepository configRepository = new ConfigRepository( - database, - new ActorDefinitionMigrator(new ExceptionWrappingDatabase(database)), - new StandardSyncPersistence(database)); - - final StandardWorkspace workspace = MockData.standardWorkspaces().get(0); - final StandardSourceDefinition sourceDefinition = MockData.publicSourceDefinition(); - final SourceConnection sourceConnection = MockData.sourceConnections().get(0); - final StandardDestinationDefinition destinationDefinition = MockData.publicDestinationDefinition(); - final DestinationConnection destinationConnection = MockData.destinationConnections().get(0); - // we don't need sync operations in this test suite, zero them out. - final StandardSync sync = Jsons.clone(MockData.standardSyncs().get(0)).withOperationIds(Collections.emptyList()); - - configRepository.writeStandardWorkspaceNoSecrets(workspace); - configRepository.writeStandardSourceDefinition(sourceDefinition); - configRepository.writeSourceConnectionNoSecrets(sourceConnection); - configRepository.writeStandardDestinationDefinition(destinationDefinition); - configRepository.writeDestinationConnectionNoSecrets(destinationConnection); - configRepository.writeStandardSync(sync); - - connectionId = sync.getConnectionId(); - } - - @Test - void testReadingNonExistingState() throws IOException { - Assertions.assertTrue(statePersistence.getCurrentState(UUID.randomUUID()).isEmpty()); - } - - @Test - void testLegacyReadWrite() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.LEGACY) - .withLegacyState(Jsons.deserialize("{\"woot\": \"legacy states is passthrough\"}")); - - // Initial write/read loop, making sure we read what we wrote - statePersistence.updateOrCreateState(connectionId, state0); - final Optional state1 = statePersistence.getCurrentState(connectionId); - - Assertions.assertTrue(state1.isPresent()); - Assertions.assertEquals(StateType.LEGACY, state1.get().getStateType()); - Assertions.assertEquals(state0.getLegacyState(), state1.get().getLegacyState()); - - // Updating a state - final JsonNode newStateJson = Jsons.deserialize("{\"woot\": \"new state\"}"); - final StateWrapper state2 = clone(state1.get()).withLegacyState(newStateJson); - statePersistence.updateOrCreateState(connectionId, state2); - final Optional state3 = statePersistence.getCurrentState(connectionId); - - Assertions.assertTrue(state3.isPresent()); - Assertions.assertEquals(StateType.LEGACY, state3.get().getStateType()); - Assertions.assertEquals(newStateJson, state3.get().getLegacyState()); - - // Deleting a state - final StateWrapper state4 = clone(state3.get()).withLegacyState(null); - statePersistence.updateOrCreateState(connectionId, state4); - Assertions.assertTrue(statePersistence.getCurrentState(connectionId).isEmpty()); - } - - @Test - void testLegacyMigrationToGlobal() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.LEGACY) - .withLegacyState(Jsons.deserialize("{\"woot\": \"legacy states is passthrough\"}")); - - statePersistence.updateOrCreateState(connectionId, state0); - - final StateWrapper newGlobalState = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize("\"woot\"")) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) - .withStreamState(Jsons.deserialize(STATE_ONE)), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1")) - .withStreamState(Jsons.deserialize(STATE_TWO)))))); - statePersistence.updateOrCreateState(connectionId, newGlobalState); - final StateWrapper storedGlobalState = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals(newGlobalState, storedGlobalState); - } - - @Test - void testLegacyMigrationToStream() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.LEGACY) - .withLegacyState(Jsons.deserialize("{\"woot\": \"legacy states is passthrough\"}")); - - statePersistence.updateOrCreateState(connectionId, state0); - - final StateWrapper newStreamState = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize(STATE_WITH_NAMESPACE))), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(Jsons.deserialize(STREAM_STATE_2))))); - statePersistence.updateOrCreateState(connectionId, newStreamState); - final StateWrapper storedStreamState = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals(newStreamState, storedStreamState); - } - - @Test - void testGlobalReadWrite() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize(GLOBAL_STATE)) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) - .withStreamState(Jsons.deserialize(STATE_ONE)), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1")) - .withStreamState(Jsons.deserialize(STATE_TWO)))))); - - // Initial write/read loop, making sure we read what we wrote - statePersistence.updateOrCreateState(connectionId, state0); - final Optional state1 = statePersistence.getCurrentState(connectionId); - Assertions.assertTrue(state1.isPresent()); - assertEquals(state0, state1.get()); - - // Updating a state - final StateWrapper state2 = clone(state1.get()); - state2.getGlobal() - .getGlobal().withSharedState(Jsons.deserialize("\"updated shared state\"")) - .getStreamStates().get(1).withStreamState(Jsons.deserialize("\"updated state2\"")); - statePersistence.updateOrCreateState(connectionId, state2); - final Optional state3 = statePersistence.getCurrentState(connectionId); - - Assertions.assertTrue(state3.isPresent()); - assertEquals(state2, state3.get()); - - // Updating a state with name and namespace - final StateWrapper state4 = clone(state1.get()); - state4.getGlobal().getGlobal() - .getStreamStates().get(0).withStreamState(Jsons.deserialize("\"updated state1\"")); - statePersistence.updateOrCreateState(connectionId, state4); - final Optional state5 = statePersistence.getCurrentState(connectionId); - - Assertions.assertTrue(state5.isPresent()); - assertEquals(state4, state5.get()); - } - - @Test - void testGlobalPartialReset() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize(GLOBAL_STATE)) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) - .withStreamState(Jsons.deserialize(STATE_ONE)), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1")) - .withStreamState(Jsons.deserialize(STATE_TWO)))))); - - // Set the initial state - statePersistence.updateOrCreateState(connectionId, state0); - - // incomplete reset does not remove the state - final StateWrapper incompletePartialReset = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize(GLOBAL_STATE)) - .withStreamStates(List.of( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1")) - .withStreamState(Jsons.deserialize(STATE_TWO)))))); - statePersistence.updateOrCreateState(connectionId, incompletePartialReset); - final StateWrapper incompletePartialResetResult = statePersistence.getCurrentState(connectionId).orElseThrow(); - Assertions.assertEquals(state0, incompletePartialResetResult); - - // The good partial reset - final StateWrapper partialReset = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize(GLOBAL_STATE)) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) - .withStreamState(Jsons.deserialize(STATE_ONE)), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1")) - .withStreamState(null))))); - statePersistence.updateOrCreateState(connectionId, partialReset); - final StateWrapper partialResetResult = statePersistence.getCurrentState(connectionId).orElseThrow(); - - Assertions.assertEquals(partialReset.getGlobal().getGlobal().getSharedState(), - partialResetResult.getGlobal().getGlobal().getSharedState()); - // {"name": "s1"} should have been removed from the stream states - Assertions.assertEquals(1, partialResetResult.getGlobal().getGlobal().getStreamStates().size()); - Assertions.assertEquals(partialReset.getGlobal().getGlobal().getStreamStates().get(0), - partialResetResult.getGlobal().getGlobal().getStreamStates().get(0)); - } - - @Test - void testGlobalFullReset() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize(GLOBAL_STATE)) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) - .withStreamState(Jsons.deserialize(STATE_ONE)), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1")) - .withStreamState(Jsons.deserialize(STATE_TWO)))))); - - final StateWrapper fullReset = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(null) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) - .withStreamState(null), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1")) - .withStreamState(null))))); - - statePersistence.updateOrCreateState(connectionId, state0); - statePersistence.updateOrCreateState(connectionId, fullReset); - final Optional fullResetResult = statePersistence.getCurrentState(connectionId); - Assertions.assertTrue(fullResetResult.isEmpty()); - } - - @Test - void testGlobalStateAllowsEmptyNameAndNamespace() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize(GLOBAL_STATE)) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("")) - .withStreamState(Jsons.deserialize("\"empty name state\"")), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("").withNamespace("")) - .withStreamState(Jsons.deserialize("\"empty name and namespace state\"")))))); - - statePersistence.updateOrCreateState(connectionId, state0); - final StateWrapper state1 = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals(state0, state1); - } - - @Test - void testStreamReadWrite() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize(STATE_WITH_NAMESPACE))), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(Jsons.deserialize(STREAM_STATE_2))))); - - // Initial write/read loop, making sure we read what we wrote - statePersistence.updateOrCreateState(connectionId, state0); - final StateWrapper state1 = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals(state0, state1); - - // Updating a state - final StateWrapper state2 = clone(state1); - state2.getStateMessages().get(1).getStream().withStreamState(Jsons.deserialize("\"updated state s2\"")); - statePersistence.updateOrCreateState(connectionId, state2); - final StateWrapper state3 = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals(state2, state3); - - // Updating a state with name and namespace - final StateWrapper state4 = clone(state1); - state4.getStateMessages().get(0).getStream().withStreamState(Jsons.deserialize("\"updated state s1\"")); - statePersistence.updateOrCreateState(connectionId, state4); - final StateWrapper state5 = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals(state4, state5); - } - - @Test - void testStreamPartialUpdates() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize(STATE_WITH_NAMESPACE))), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(Jsons.deserialize(STREAM_STATE_2))))); - - statePersistence.updateOrCreateState(connectionId, state0); - - // Partial update - final StateWrapper partialUpdate = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Collections.singletonList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize("\"updated\""))))); - statePersistence.updateOrCreateState(connectionId, partialUpdate); - final StateWrapper partialUpdateResult = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals( - new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize("\"updated\""))), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(Jsons.deserialize(STREAM_STATE_2))))), - partialUpdateResult); - - // Partial Reset - final StateWrapper partialReset = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Collections.singletonList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(null)))); - statePersistence.updateOrCreateState(connectionId, partialReset); - final StateWrapper partialResetResult = statePersistence.getCurrentState(connectionId).orElseThrow(); - assertEquals( - new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(List.of( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize("\"updated\""))))), - partialResetResult); - } - - @Test - void testStreamFullReset() throws IOException { - final StateWrapper state0 = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize(STATE_WITH_NAMESPACE))), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(Jsons.deserialize(STREAM_STATE_2))))); - - statePersistence.updateOrCreateState(connectionId, state0); - - // Partial update - final StateWrapper fullReset = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(null)), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(null)))); - statePersistence.updateOrCreateState(connectionId, fullReset); - final Optional fullResetResult = statePersistence.getCurrentState(connectionId); - Assertions.assertTrue(fullResetResult.isEmpty()); - } - - @Test - void testInconsistentTypeUpdates() throws IOException, SQLException { - final StateWrapper streamState = new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(Arrays.asList( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) - .withStreamState(Jsons.deserialize(STATE_WITH_NAMESPACE))), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("s2")) - .withStreamState(Jsons.deserialize(STREAM_STATE_2))))); - statePersistence.updateOrCreateState(connectionId, streamState); - - Assertions.assertThrows(IllegalStateException.class, () -> { - final StateWrapper globalState = new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.deserialize(GLOBAL_STATE)) - .withStreamStates(Arrays.asList( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("")) - .withStreamState(Jsons.deserialize("\"empty name state\"")), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName("").withNamespace("")) - .withStreamState(Jsons.deserialize("\"empty name and namespace state\"")))))); - statePersistence.updateOrCreateState(connectionId, globalState); - }); - - // We should be guarded against those cases let's make sure we don't make things worse if we're in - // an inconsistent state - database.transaction(ctx -> { - ctx.insertInto(DSL.table(STATE)) - .columns(DSL.field("id"), DSL.field("connection_id"), DSL.field("type"), DSL.field(STATE)) - .values(UUID.randomUUID(), connectionId, io.airbyte.db.instance.configs.jooq.generated.enums.StateType.GLOBAL, JSONB.valueOf("{}")) - .execute(); - return null; - }); - Assertions.assertThrows(IllegalStateException.class, () -> statePersistence.updateOrCreateState(connectionId, streamState)); - Assertions.assertThrows(IllegalStateException.class, () -> statePersistence.getCurrentState(connectionId)); - } - - @Test - void testEnumsConversion() { - // Making sure StateType we write to the DB and the StateType from the protocols are aligned. - // Otherwise, we'll have to dig through runtime errors. - Assertions.assertTrue(Enums.isCompatible( - io.airbyte.db.instance.configs.jooq.generated.enums.StateType.class, - io.airbyte.config.StateType.class)); - } - - @Test - void testStatePersistenceLegacyWriteConsistency() throws IOException, SQLException { - final JsonNode jsonState = Jsons.deserialize("{\"my\": \"state\"}"); - final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.LEGACY).withLegacyState(jsonState); - statePersistence.updateOrCreateState(connectionId, stateWrapper); - - // Making sure we still follow the legacy format - final List readStates = database.transaction(ctx -> ctx.selectFrom(STATE) - .where(DSL.field("connection_id").eq(connectionId)) - .fetch().map(r -> Jsons.deserialize(r.get(DSL.field(STATE, JSONB.class)).data(), State.class)) - .stream() - .toList()); - Assertions.assertEquals(1, readStates.size()); - - Assertions.assertEquals(readStates.get(0).getState(), stateWrapper.getLegacyState()); - } - - private StateWrapper clone(final StateWrapper state) { - return switch (state.getStateType()) { - case LEGACY -> new StateWrapper() - .withLegacyState(Jsons.deserialize(Jsons.serialize(state.getLegacyState()))) - .withStateType(state.getStateType()); - case STREAM -> new StateWrapper() - .withStateMessages( - state.getStateMessages().stream().map(msg -> Jsons.deserialize(Jsons.serialize(msg), AirbyteStateMessage.class)).toList()) - .withStateType(state.getStateType()); - case GLOBAL -> new StateWrapper() - .withGlobal(Jsons.deserialize(Jsons.serialize(state.getGlobal()), AirbyteStateMessage.class)) - .withStateType(state.getStateType()); - }; - } - - private void assertEquals(final StateWrapper lhs, final StateWrapper rhs) { - Assertions.assertEquals(Jsons.serialize(lhs), Jsons.serialize(rhs)); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StreamResetPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StreamResetPersistenceTest.java deleted file mode 100644 index 62944ea326b57..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StreamResetPersistenceTest.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.spy; - -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class StreamResetPersistenceTest extends BaseConfigDatabaseTest { - - static StreamResetPersistence streamResetPersistence; - private static final Logger LOGGER = LoggerFactory.getLogger(StreamResetPersistenceTest.class); - - @BeforeEach - public void setup() throws Exception { - truncateAllTables(); - - streamResetPersistence = spy(new StreamResetPersistence(database)); - } - - @Test - void testCreateSameResetTwiceOnlyCreateItOnce() throws Exception { - final UUID connectionId = UUID.randomUUID(); - final StreamDescriptor streamDescriptor1 = new StreamDescriptor().withName("n1").withNamespace("ns2"); - final StreamDescriptor streamDescriptor2 = new StreamDescriptor().withName("n2"); - - streamResetPersistence.createStreamResets(connectionId, List.of(streamDescriptor1, streamDescriptor2)); - - final List result = streamResetPersistence.getStreamResets(connectionId); - LOGGER.info(database.query(ctx -> ctx.selectFrom("stream_reset").fetch().toString())); - assertEquals(2, result.size()); - - streamResetPersistence.createStreamResets(connectionId, List.of(streamDescriptor1)); - LOGGER.info(database.query(ctx -> ctx.selectFrom("stream_reset").fetch().toString())); - assertEquals(2, streamResetPersistence.getStreamResets(connectionId).size()); - - streamResetPersistence.createStreamResets(connectionId, List.of(streamDescriptor2)); - LOGGER.info(database.query(ctx -> ctx.selectFrom("stream_reset").fetch().toString())); - assertEquals(2, streamResetPersistence.getStreamResets(connectionId).size()); - } - - @Test - void testCreateAndGetAndDeleteStreamResets() throws Exception { - final List streamResetList = new ArrayList<>(); - final StreamDescriptor streamDescriptor1 = new StreamDescriptor().withName("stream_name_1").withNamespace("stream_namespace_1"); - final StreamDescriptor streamDescriptor2 = new StreamDescriptor().withName("stream_name_2"); - streamResetList.add(streamDescriptor1); - streamResetList.add(streamDescriptor2); - final UUID uuid = UUID.randomUUID(); - streamResetPersistence.createStreamResets(uuid, streamResetList); - - final List result = streamResetPersistence.getStreamResets(uuid); - assertEquals(2, result.size()); - assertTrue( - result.stream().anyMatch( - streamDescriptor -> "stream_name_1".equals(streamDescriptor.getName()) && "stream_namespace_1".equals(streamDescriptor.getNamespace()))); - assertTrue( - result.stream().anyMatch(streamDescriptor -> "stream_name_2".equals(streamDescriptor.getName()) && streamDescriptor.getNamespace() == null)); - - streamResetPersistence.createStreamResets(uuid, List.of(new StreamDescriptor().withName("stream_name_3").withNamespace("stream_namespace_2"))); - streamResetPersistence.deleteStreamResets(uuid, result); - - final List resultAfterDeleting = streamResetPersistence.getStreamResets(uuid); - assertEquals(1, resultAfterDeleting.size()); - - assertTrue( - resultAfterDeleting.stream().anyMatch( - streamDescriptor -> "stream_name_3".equals(streamDescriptor.getName()) && "stream_namespace_2".equals(streamDescriptor.getNamespace()))); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java deleted file mode 100644 index 63fcda9fe04f2..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.config.Geography; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; -import io.airbyte.config.OperatorWebhook; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class SyncOperationPersistenceTest extends BaseConfigDatabaseTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final UUID WEBHOOK_CONFIG_ID = UUID.randomUUID(); - private static final String WEBHOOK_OPERATION_EXECUTION_URL = "test-webhook-url"; - private static final String WEBHOOK_OPERATION_EXECUTION_BODY = "test-webhook-body"; - - private ConfigRepository configRepository; - - private static final StandardSyncOperation DBT_OP = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(UUID.randomUUID()) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(new OperatorDbt() - .withDbtArguments("dbt-arguments") - .withDockerImage("image-tag") - .withGitRepoBranch("git-repo-branch") - .withGitRepoUrl("git-repo-url")) - .withOperatorNormalization(null) - .withOperatorType(OperatorType.DBT); - private static final StandardSyncOperation NORMALIZATION_OP = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(UUID.randomUUID()) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); - private static final StandardSyncOperation WEBHOOK_OP = new StandardSyncOperation() - .withName("webhook-operation") - .withTombstone(false) - .withOperationId(UUID.randomUUID()) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorType(OperatorType.WEBHOOK) - .withOperatorDbt(null) - .withOperatorNormalization(null) - .withOperatorWebhook( - new OperatorWebhook() - .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(WEBHOOK_OPERATION_EXECUTION_URL) - .withExecutionBody(WEBHOOK_OPERATION_EXECUTION_BODY)); - private static final List OPS = List.of(DBT_OP, NORMALIZATION_OP, WEBHOOK_OP); - - @BeforeEach - void beforeEach() throws Exception { - truncateAllTables(); - - configRepository = new ConfigRepository(database); - createWorkspace(); - - for (final StandardSyncOperation op : OPS) { - configRepository.writeStandardSyncOperation(op); - } - } - - @Test - void testReadWrite() throws IOException, ConfigNotFoundException, JsonValidationException { - for (final StandardSyncOperation op : OPS) { - assertEquals(op, configRepository.getStandardSyncOperation(op.getOperationId())); - } - } - - @Test - void testReadNotExists() { - assertThrows(ConfigNotFoundException.class, () -> configRepository.getStandardSyncOperation(UUID.randomUUID())); - } - - @Test - void testList() throws IOException, JsonValidationException { - assertEquals(OPS, configRepository.listStandardSyncOperations()); - } - - @Test - void testDelete() throws IOException, ConfigNotFoundException, JsonValidationException { - for (final StandardSyncOperation op : OPS) { - assertEquals(op, configRepository.getStandardSyncOperation(op.getOperationId())); - configRepository.deleteStandardSyncOperation(op.getOperationId()); - assertThrows(ConfigNotFoundException.class, () -> configRepository.getStandardSyncOperation(UUID.randomUUID())); - - } - } - - private void createWorkspace() throws IOException, JsonValidationException { - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID) - .withName("Another Workspace") - .withSlug("another-workspace") - .withInitialSetupComplete(true) - .withTombstone(false) - .withDefaultGeography(Geography.AUTO); - configRepository.writeStandardWorkspaceNoSecrets(workspace); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ValidatingConfigPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ValidatingConfigPersistenceTest.java deleted file mode 100644 index 048e5aa86654e..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ValidatingConfigPersistenceTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.google.common.collect.Sets; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.time.Instant; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class ValidatingConfigPersistenceTest { - - public static final UUID UUID_1 = new UUID(0, 1); - public static final Instant INSTANT = Instant.now(); - public static final StandardSourceDefinition SOURCE_1 = new StandardSourceDefinition(); - - static { - SOURCE_1.withSourceDefinitionId(UUID_1).withName("apache storm"); - } - - public static final UUID UUID_2 = new UUID(0, 2); - public static final StandardSourceDefinition SOURCE_2 = new StandardSourceDefinition(); - - static { - SOURCE_2.withSourceDefinitionId(UUID_2).withName("apache storm"); - } - - private JsonSchemaValidator schemaValidator; - - private ValidatingConfigPersistence configPersistence; - private ConfigPersistence decoratedConfigPersistence; - private static final String ERROR_MESSAGE = "error"; - - @BeforeEach - void setUp() { - schemaValidator = mock(JsonSchemaValidator.class); - - decoratedConfigPersistence = mock(ConfigPersistence.class); - configPersistence = new ValidatingConfigPersistence(decoratedConfigPersistence, schemaValidator); - } - - @Test - void testWriteConfigSuccess() throws IOException, JsonValidationException { - configPersistence.writeConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), SOURCE_1); - final Map aggregatedSource = new HashMap<>(); - aggregatedSource.put(UUID_1.toString(), SOURCE_1); - verify(decoratedConfigPersistence).writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, aggregatedSource); - } - - @Test - void testWriteConfigsSuccess() throws IOException, JsonValidationException { - final Map sourceDefinitionById = new HashMap<>(); - sourceDefinitionById.put(UUID_1.toString(), SOURCE_1); - sourceDefinitionById.put(UUID_2.toString(), SOURCE_2); - - configPersistence.writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefinitionById); - verify(decoratedConfigPersistence).writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefinitionById); - } - - @Test - void testWriteConfigFailure() throws JsonValidationException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - assertThrows(JsonValidationException.class, - () -> configPersistence.writeConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), SOURCE_1)); - - verifyNoInteractions(decoratedConfigPersistence); - } - - @Test - void testWriteConfigsFailure() throws JsonValidationException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - - final Map sourceDefinitionById = new HashMap<>(); - sourceDefinitionById.put(UUID_1.toString(), SOURCE_1); - sourceDefinitionById.put(UUID_2.toString(), SOURCE_2); - - assertThrows(JsonValidationException.class, - () -> configPersistence.writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefinitionById)); - - verifyNoInteractions(decoratedConfigPersistence); - } - - @Test - void testGetConfigSuccess() throws IOException, JsonValidationException, ConfigNotFoundException { - when(decoratedConfigPersistence.getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(SOURCE_1); - final StandardSourceDefinition actualConfig = configPersistence - .getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class); - - assertEquals(SOURCE_1, actualConfig); - } - - @Test - void testGetConfigFailure() throws IOException, JsonValidationException, ConfigNotFoundException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(SOURCE_1); - - assertThrows( - JsonValidationException.class, - () -> configPersistence.getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)); - } - - @Test - void testListConfigsSuccess() throws JsonValidationException, IOException { - when(decoratedConfigPersistence.listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(SOURCE_1, SOURCE_2)); - - final List actualConfigs = configPersistence - .listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class); - - assertEquals( - Sets.newHashSet(SOURCE_1, SOURCE_2), - Sets.newHashSet(actualConfigs)); - } - - @Test - void testListConfigsFailure() throws JsonValidationException, IOException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(SOURCE_1, SOURCE_2)); - - assertThrows(JsonValidationException.class, () -> configPersistence - .listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)); - } - - @Test - void testGetConfigWithMetadataSuccess() throws IOException, JsonValidationException, ConfigNotFoundException { - when(decoratedConfigPersistence.getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(withMetadata(SOURCE_1)); - final ConfigWithMetadata actualConfig = configPersistence - .getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class); - - assertEquals(withMetadata(SOURCE_1), actualConfig); - } - - @Test - void testGetConfigWithMetadataFailure() throws IOException, JsonValidationException, ConfigNotFoundException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(withMetadata(SOURCE_1)); - - assertThrows( - JsonValidationException.class, - () -> configPersistence.getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)); - } - - @Test - void testListConfigsWithMetadataSuccess() throws JsonValidationException, IOException { - when(decoratedConfigPersistence.listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(withMetadata(SOURCE_1), withMetadata(SOURCE_2))); - - final List> actualConfigs = configPersistence - .listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class); - - // noinspection unchecked - assertEquals( - Sets.newHashSet(withMetadata(SOURCE_1), withMetadata(SOURCE_2)), - Sets.newHashSet(actualConfigs)); - } - - @Test - void testListConfigsWithMetadataFailure() throws JsonValidationException, IOException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(withMetadata(SOURCE_1), withMetadata(SOURCE_2))); - - assertThrows(JsonValidationException.class, () -> configPersistence - .listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)); - } - - private static ConfigWithMetadata withMetadata(final StandardSourceDefinition sourceDef) { - return new ConfigWithMetadata<>(sourceDef.getSourceDefinitionId().toString(), - ConfigSchema.STANDARD_SOURCE_DEFINITION.name(), - INSTANT, - INSTANT, - sourceDef); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java deleted file mode 100644 index bd954c6cda388..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; -import io.airbyte.db.instance.configs.jooq.generated.enums.NamespaceDefinitionType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import org.jooq.JSONB; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class WorkspaceFilterTest extends BaseConfigDatabaseTest { - - private static final UUID SRC_DEF_ID = UUID.randomUUID(); - private static final UUID DST_DEF_ID = UUID.randomUUID(); - private static final UUID ACTOR_ID_0 = UUID.randomUUID(); - private static final UUID ACTOR_ID_1 = UUID.randomUUID(); - private static final UUID ACTOR_ID_2 = UUID.randomUUID(); - private static final UUID ACTOR_ID_3 = UUID.randomUUID(); - private static final UUID CONN_ID_0 = UUID.randomUUID(); - private static final UUID CONN_ID_1 = UUID.randomUUID(); - private static final UUID CONN_ID_2 = UUID.randomUUID(); - private static final UUID CONN_ID_3 = UUID.randomUUID(); - private static final UUID CONN_ID_4 = UUID.randomUUID(); - private static final UUID CONN_ID_5 = UUID.randomUUID(); - private static final UUID WORKSPACE_ID_0 = UUID.randomUUID(); - private static final UUID WORKSPACE_ID_1 = UUID.randomUUID(); - private static final UUID WORKSPACE_ID_2 = UUID.randomUUID(); - private static final UUID WORKSPACE_ID_3 = UUID.randomUUID(); - private ConfigRepository configRepository; - - @BeforeAll - static void setUpAll() throws SQLException { - // create actor_definition - database.transaction(ctx -> ctx.insertInto(ACTOR_DEFINITION, ACTOR_DEFINITION.ID, ACTOR_DEFINITION.NAME, ACTOR_DEFINITION.DOCKER_REPOSITORY, - ACTOR_DEFINITION.DOCKER_IMAGE_TAG, ACTOR_DEFINITION.SPEC, ACTOR_DEFINITION.ACTOR_TYPE, ACTOR_DEFINITION.RELEASE_STAGE) - .values(SRC_DEF_ID, "srcDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.source, ReleaseStage.beta) - .values(DST_DEF_ID, "dstDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.destination, ReleaseStage.generally_available) - .values(UUID.randomUUID(), "dstDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.destination, ReleaseStage.alpha) - .execute()); - - // create workspace - database.transaction(ctx -> ctx.insertInto(WORKSPACE, WORKSPACE.ID, WORKSPACE.NAME, WORKSPACE.SLUG, WORKSPACE.INITIAL_SETUP_COMPLETE) - .values(WORKSPACE_ID_0, "ws-0", "ws-0", true) - .values(WORKSPACE_ID_1, "ws-1", "ws-1", true) - .values(WORKSPACE_ID_2, "ws-2", "ws-2", true) - .values(WORKSPACE_ID_3, "ws-3", "ws-3", true) - .execute()); - // create actors - database.transaction( - ctx -> ctx.insertInto(ACTOR, ACTOR.WORKSPACE_ID, ACTOR.ID, ACTOR.ACTOR_DEFINITION_ID, ACTOR.NAME, ACTOR.CONFIGURATION, ACTOR.ACTOR_TYPE) - .values(WORKSPACE_ID_0, ACTOR_ID_0, SRC_DEF_ID, "ACTOR-0", JSONB.valueOf("{}"), ActorType.source) - .values(WORKSPACE_ID_1, ACTOR_ID_1, SRC_DEF_ID, "ACTOR-1", JSONB.valueOf("{}"), ActorType.source) - .values(WORKSPACE_ID_2, ACTOR_ID_2, DST_DEF_ID, "ACTOR-2", JSONB.valueOf("{}"), ActorType.source) - .values(WORKSPACE_ID_3, ACTOR_ID_3, DST_DEF_ID, "ACTOR-3", JSONB.valueOf("{}"), ActorType.source) - .execute()); - // create connections - database.transaction( - ctx -> ctx.insertInto(CONNECTION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL) - .values(ACTOR_ID_0, ACTOR_ID_1, CONN_ID_0, NamespaceDefinitionType.source, "CONN-0", JSONB.valueOf("{}"), true) - .values(ACTOR_ID_0, ACTOR_ID_2, CONN_ID_1, NamespaceDefinitionType.source, "CONN-1", JSONB.valueOf("{}"), true) - .values(ACTOR_ID_1, ACTOR_ID_2, CONN_ID_2, NamespaceDefinitionType.source, "CONN-2", JSONB.valueOf("{}"), true) - .values(ACTOR_ID_1, ACTOR_ID_2, CONN_ID_3, NamespaceDefinitionType.source, "CONN-3", JSONB.valueOf("{}"), true) - .values(ACTOR_ID_2, ACTOR_ID_3, CONN_ID_4, NamespaceDefinitionType.source, "CONN-4", JSONB.valueOf("{}"), true) - .values(ACTOR_ID_3, ACTOR_ID_1, CONN_ID_5, NamespaceDefinitionType.source, "CONN-5", JSONB.valueOf("{}"), true) - .execute()); - // create jobs - final OffsetDateTime currentTs = OffsetDateTime.now(); - database.transaction(ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.UPDATED_AT, JOBS.SCOPE) - .values(0L, currentTs.minusHours(0), CONN_ID_0.toString()) - .values(1L, currentTs.minusHours(5), CONN_ID_0.toString()) - .values(2L, currentTs.minusHours(10), CONN_ID_1.toString()) - .values(3L, currentTs.minusHours(15), CONN_ID_1.toString()) - .values(4L, currentTs.minusHours(20), CONN_ID_2.toString()) - .values(5L, currentTs.minusHours(30), CONN_ID_3.toString()) - .values(6L, currentTs.minusHours(40), CONN_ID_4.toString()) - .values(7L, currentTs.minusHours(50), CONN_ID_4.toString()) - .values(8L, currentTs.minusHours(70), CONN_ID_5.toString()) - .execute()); - } - - @BeforeEach - void beforeEach() { - configRepository = new ConfigRepository(database, new ActorDefinitionMigrator(new ExceptionWrappingDatabase(database)), null); - } - - @Test - @DisplayName("Should return a list of workspace IDs with most recently running jobs") - void testListWorkspacesByMostRecentlyRunningJobs() throws IOException { - final int timeWindowInHours = 48; - /* - * Following function is to filter workspace (IDs) with most recently running jobs within a given - * time window. Step 1: Filter on table JOBS where job's UPDATED_AT timestamp is within the given - * time window. Step 2: Trace back via CONNECTION table and ACTOR table. Step 3: Return workspace - * IDs from ACTOR table. - */ - final List actualResult = configRepository.listWorkspacesByMostRecentlyRunningJobs(timeWindowInHours); - /* - * With the test data provided above, expected outputs for each step: Step 1: `jobs` (IDs) OL, 1L, - * 2L, 3L, 4L, 5L and 6L. Step 2: `connections` (IDs) CONN_ID_0, CONN_ID_1, CONN_ID_2, CONN_ID_3, - * and CONN_ID_4 `actors` (IDs) ACTOR_ID_0, ACTOR_ID_1, and ACTOR_ID_2. Step 3: `workspaces` (IDs) - * WORKSPACE_ID_0, WORKSPACE_ID_1 and WORKSPACE_ID_2. - */ - final List expectedResult = new ArrayList<>(); - expectedResult.add(WORKSPACE_ID_0); - expectedResult.add(WORKSPACE_ID_1); - expectedResult.add(WORKSPACE_ID_2); - assertTrue(expectedResult.size() == actualResult.size() && expectedResult.containsAll(actualResult) && actualResult.containsAll(expectedResult)); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java deleted file mode 100644 index 41e1f60fed6a6..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.MoreBooleans; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.Geography; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.ReleaseStage; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -@SuppressWarnings({"PMD.LongVariable", "PMD.AvoidInstantiatingObjectsInLoops"}) -class WorkspacePersistenceTest extends BaseConfigDatabaseTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); - private static final UUID SOURCE_ID = UUID.randomUUID(); - private static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); - private static final UUID DESTINATION_ID = UUID.randomUUID(); - private static final JsonNode CONFIG = Jsons.jsonNode(ImmutableMap.of("key-a", "value-a")); - - private ConfigRepository configRepository; - - @BeforeEach - void setup() { - configRepository = spy(new ConfigRepository( - database, - new ActorDefinitionMigrator(new ExceptionWrappingDatabase(database)), - null)); - } - - @Test - void testGetWorkspace() throws ConfigNotFoundException, IOException, JsonValidationException { - configRepository.writeStandardWorkspaceNoSecrets(createBaseStandardWorkspace().withWorkspaceId(UUID.randomUUID())); - assertReturnsWorkspace(createBaseStandardWorkspace()); - } - - @Test - void testWorkspaceWithNullTombstone() throws ConfigNotFoundException, IOException, JsonValidationException { - assertReturnsWorkspace(createBaseStandardWorkspace()); - } - - @Test - void testWorkspaceWithFalseTombstone() throws ConfigNotFoundException, IOException, JsonValidationException { - assertReturnsWorkspace(createBaseStandardWorkspace().withTombstone(false)); - } - - @Test - void testWorkspaceWithTrueTombstone() throws ConfigNotFoundException, IOException, JsonValidationException { - assertReturnsWorkspace(createBaseStandardWorkspace().withTombstone(true)); - } - - private static StandardWorkspace createBaseStandardWorkspace() { - return new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID) - .withName("workspace-a") - .withSlug("workspace-a-slug") - .withInitialSetupComplete(false) - .withTombstone(false) - .withDefaultGeography(Geography.AUTO); - } - - private static SourceConnection createBaseSource() { - return new SourceConnection() - .withSourceId(SOURCE_ID) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withName("source-a") - .withTombstone(false) - .withConfiguration(CONFIG) - .withWorkspaceId(WORKSPACE_ID); - } - - private static DestinationConnection createBaseDestination() { - return new DestinationConnection() - .withDestinationId(DESTINATION_ID) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID) - .withName("destination-a") - .withTombstone(false) - .withConfiguration(CONFIG) - .withWorkspaceId(WORKSPACE_ID); - } - - private static StandardSourceDefinition createSourceDefinition(final ReleaseStage releaseStage) { - return new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withTombstone(false) - .withName("source-definition-a") - .withDockerRepository("dockerhub") - .withDockerImageTag("some-tag") - .withReleaseStage(releaseStage); - } - - private static StandardDestinationDefinition createDestinationDefinition(final StandardDestinationDefinition.ReleaseStage releaseStage) { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID) - .withTombstone(false) - .withName("destination-definition-a") - .withDockerRepository("dockerhub") - .withDockerImageTag("some-tag") - .withReleaseStage(releaseStage); - } - - private void persistConnectorsWithReleaseStages( - final ReleaseStage sourceReleaseStage, - final StandardDestinationDefinition.ReleaseStage destinationReleaseStage) - throws JsonValidationException, IOException { - - configRepository.writeStandardSourceDefinition(createSourceDefinition(sourceReleaseStage)); - configRepository.writeStandardDestinationDefinition(createDestinationDefinition(destinationReleaseStage)); - configRepository.writeSourceConnectionNoSecrets(createBaseSource()); - configRepository.writeDestinationConnectionNoSecrets(createBaseDestination()); - } - - void assertReturnsWorkspace(final StandardWorkspace workspace) throws ConfigNotFoundException, IOException, JsonValidationException { - configRepository.writeStandardWorkspaceNoSecrets(workspace); - - final StandardWorkspace expectedWorkspace = Jsons.clone(workspace); - /* - * tombstone defaults to false in the db, so if the passed in workspace does not have it set, we - * expected the workspace returned from the db to have it set to false. - */ - if (workspace.getTombstone() == null) { - expectedWorkspace.withTombstone(false); - } - - assertEquals(workspace, configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testWorkspaceByConnectionId(final boolean isTombstone) throws ConfigNotFoundException, IOException, JsonValidationException { - final UUID connectionId = UUID.randomUUID(); - final UUID sourceId = UUID.randomUUID(); - final StandardSync mSync = new StandardSync() - .withSourceId(sourceId); - final SourceConnection mSourceConnection = new SourceConnection() - .withWorkspaceId(WORKSPACE_ID); - final StandardWorkspace mWorkflow = new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID); - - doReturn(mSync) - .when(configRepository) - .getStandardSync(connectionId); - doReturn(mSourceConnection) - .when(configRepository) - .getSourceConnection(sourceId); - doReturn(mWorkflow) - .when(configRepository) - .getStandardWorkspaceNoSecrets(WORKSPACE_ID, isTombstone); - - configRepository.getStandardWorkspaceFromConnection(connectionId, isTombstone); - - verify(configRepository).getStandardWorkspaceNoSecrets(WORKSPACE_ID, isTombstone); - } - - @Test - void testUpdateFeedback() throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = createBaseStandardWorkspace(); - - configRepository.writeStandardWorkspaceNoSecrets(workspace); - - assertFalse(MoreBooleans.isTruthy(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false).getFeedbackDone())); - configRepository.setFeedback(workspace.getWorkspaceId()); - assertTrue(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false).getFeedbackDone()); - } - - @Test - void testWorkspaceHasAlphaOrBetaConnector() throws JsonValidationException, IOException { - final StandardWorkspace workspace = createBaseStandardWorkspace(); - - configRepository.writeStandardWorkspaceNoSecrets(workspace); - - persistConnectorsWithReleaseStages(ReleaseStage.GENERALLY_AVAILABLE, StandardDestinationDefinition.ReleaseStage.GENERALLY_AVAILABLE); - assertFalse(configRepository.getWorkspaceHasAlphaOrBetaConnector(WORKSPACE_ID)); - - persistConnectorsWithReleaseStages(ReleaseStage.ALPHA, StandardDestinationDefinition.ReleaseStage.GENERALLY_AVAILABLE); - assertTrue(configRepository.getWorkspaceHasAlphaOrBetaConnector(WORKSPACE_ID)); - - persistConnectorsWithReleaseStages(ReleaseStage.GENERALLY_AVAILABLE, StandardDestinationDefinition.ReleaseStage.BETA); - assertTrue(configRepository.getWorkspaceHasAlphaOrBetaConnector(WORKSPACE_ID)); - - persistConnectorsWithReleaseStages(ReleaseStage.CUSTOM, StandardDestinationDefinition.ReleaseStage.CUSTOM); - assertFalse(configRepository.getWorkspaceHasAlphaOrBetaConnector(WORKSPACE_ID)); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessorTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessorTest.java deleted file mode 100644 index 40c6799788485..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessorTest.java +++ /dev/null @@ -1,643 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.constants.AirbyteSecretConstants; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.io.InputStream; -import java.util.stream.Stream; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -@SuppressWarnings({"PMD.CloseResource", "PMD.UseProperClassLoader", "PMD.JUnitTestsShouldIncludeAssert"}) -class JsonSecretsProcessorTest { - - private static final JsonNode SCHEMA_ONE_LAYER = Jsons.deserialize( - """ - { - "type": "object", "properties": { - "secret1": { - "type": "string", - "airbyte_secret": true - }, - "secret2": { - "type": "string", - "airbyte_secret": "true" - }, - "field1": { - "type": "string" - }, - "field2": { - "type": "number" - } - } - } - """); - - private static final JsonNode SCHEMA_INNER_OBJECT = Jsons.deserialize( - """ - { - "type": "object", - "properties": { - "warehouse": { - "type": "string" - }, - "loading_method": { - "type": "object", - "oneOf": [ - { - "properties": {} - }, - { - "properties": { - "s3_bucket_name": { - "type": "string" - }, - "secret_access_key": { - "type": "string", - "airbyte_secret": true - } - } - } - ] - } - } - }"""); - - private static final JsonNode ONE_OF_WITH_SAME_KEY_IN_SUB_SCHEMAS = Jsons.deserialize( - """ - { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "S3 Destination Spec", - "type": "object", - "required": [ - "client_id", - "format" - ], - "additionalProperties": false, - "properties": { - "client_id": { - "title": "client it", - "type": "string", - "default": "" - }, - "format": { - "title": "Output Format", - "type": "object", - "description": "Output data format", - "oneOf": [ - { - "title": "Avro: Apache Avro", - "required": ["format_type", "compression_codec"], - "properties": { - "format_type": { - "type": "string", - "enum": ["Avro"], - "default": "Avro" - }, - "compression_codec": { - "title": "Compression Codec", - "description": "The compression algorithm used to compress data. Default to no compression.", - "type": "object", - "oneOf": [ - { - "title": "no compression", - "required": ["codec"], - "properties": { - "codec": { - "type": "string", - "enum": ["no compression"], - "default": "no compression" - } - } - }, - { - "title": "Deflate", - "required": ["codec", "compression_level"], - "properties": { - "codec": { - "type": "string", - "enum": ["Deflate"], - "default": "Deflate" - }, - "compression_level": { - "type": "integer", - "default": 0, - "minimum": 0, - "maximum": 9 - } - } - } - ] - } - } - }, - { - "title": "Parquet: Columnar Storage", - "required": ["format_type"], - "properties": { - "format_type": { - "type": "string", - "enum": ["Parquet"], - "default": "Parquet" - }, - "compression_codec": { - "type": "string", - "enum": [ - "UNCOMPRESSED", - "GZIP" - ], - "default": "UNCOMPRESSED" - } - } - } - ] - } - } - }"""); - - private static final String FIELD_1 = "field1"; - private static final String VALUE_1 = "value1"; - private static final String FIELD_2 = "field2"; - private static final String ADDITIONAL_FIELD = "additional_field"; - private static final String DONT_COPY_ME = "dont_copy_me"; - private static final String DONT_TELL_ANYONE = "donttellanyone"; - private static final String SECRET_1 = "secret1"; - private static final String SECRET_2 = "secret2"; - private static final String NAME = "name"; - private static final String S3_BUCKET_NAME = "s3_bucket_name"; - private static final String SECRET_ACCESS_KEY = "secret_access_key"; - private static final String HOUSE = "house"; - private static final String WAREHOUSE = "warehouse"; - private static final String LOADING_METHOD = "loading_method"; - private static final String ARRAY = "array"; - private static final String ARRAY_OF_ONEOF = "array_of_oneof"; - private static final String NESTED_OBJECT = "nested_object"; - private static final String NESTED_ONEOF = "nested_oneof"; - private static final String ONE_OF_SECRET = "oneof_secret"; - private static final String ONE_OF = "oneof"; - private static final String OPTIONAL_PASSWORD = "optional_password"; - private static final String POSTGRES_SSH_KEY = "postgres_ssh_key"; - private static final String SIMPLE = "simple"; - - private JsonSecretsProcessor processor; - - @BeforeEach - public void setup() { - processor = JsonSecretsProcessor.builder() - .copySecrets(true) - .build(); - } - - @Test - void testCopySecrets() { - final JsonNode src = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(ADDITIONAL_FIELD, DONT_COPY_ME) - .put(SECRET_1, DONT_TELL_ANYONE) - .put(SECRET_2, "updateme") - .build()); - - final JsonNode dst = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(SECRET_1, AirbyteSecretConstants.SECRETS_MASK) - .put(SECRET_2, "newvalue") - .build()); - - final JsonNode actual = processor.copySecrets(src, dst, SCHEMA_ONE_LAYER); - - final JsonNode expected = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(SECRET_1, DONT_TELL_ANYONE) - .put(SECRET_2, "newvalue") - .build()); - - assertEquals(expected, actual); - } - - @Test - void testCopySecretsNotInSrc() { - final JsonNode src = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(ADDITIONAL_FIELD, DONT_COPY_ME) - .build()); - - final JsonNode dst = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(SECRET_1, AirbyteSecretConstants.SECRETS_MASK) - .build()); - - final JsonNode expected = dst.deepCopy(); - final JsonNode actual = processor.copySecrets(src, dst, SCHEMA_ONE_LAYER); - - assertEquals(expected, actual); - } - - @Test - void testCopySecretInnerObject() { - final JsonNode srcOneOf = Jsons.jsonNode(ImmutableMap.builder() - .put(S3_BUCKET_NAME, NAME) - .put(SECRET_ACCESS_KEY, "secret") - .put(ADDITIONAL_FIELD, DONT_COPY_ME) - .build()); - final JsonNode src = Jsons.jsonNode(ImmutableMap.builder() - .put(WAREHOUSE, HOUSE) - .put("loading_method", srcOneOf).build()); - - final JsonNode dstOneOf = Jsons.jsonNode(ImmutableMap.builder() - .put(S3_BUCKET_NAME, NAME) - .put(SECRET_ACCESS_KEY, AirbyteSecretConstants.SECRETS_MASK) - .build()); - final JsonNode dst = Jsons.jsonNode(ImmutableMap.builder() - .put(WAREHOUSE, HOUSE) - .put(LOADING_METHOD, dstOneOf).build()); - - final JsonNode actual = processor.copySecrets(src, dst, SCHEMA_INNER_OBJECT); - - final JsonNode expectedOneOf = Jsons.jsonNode(ImmutableMap.builder() - .put(S3_BUCKET_NAME, NAME) - .put(SECRET_ACCESS_KEY, "secret").build()); - final JsonNode expected = Jsons.jsonNode(ImmutableMap.builder() - .put(WAREHOUSE, HOUSE) - .put(LOADING_METHOD, expectedOneOf).build()); - - assertEquals(expected, actual); - } - - @Test - void testCopySecretNotInSrcInnerObject() { - final JsonNode src = Jsons.jsonNode(ImmutableMap.builder() - .put(WAREHOUSE, HOUSE).build()); - - final JsonNode dstOneOf = Jsons.jsonNode(ImmutableMap.builder() - .put(S3_BUCKET_NAME, NAME) - .put(SECRET_ACCESS_KEY, AirbyteSecretConstants.SECRETS_MASK) - .build()); - final JsonNode dst = Jsons.jsonNode(ImmutableMap.builder() - .put(WAREHOUSE, HOUSE) - .put(LOADING_METHOD, dstOneOf).build()); - - final JsonNode actual = processor.copySecrets(src, dst, SCHEMA_INNER_OBJECT); - final JsonNode expected = dst.deepCopy(); - - assertEquals(expected, actual); - } - - // test the case where multiple sub schemas of a oneOf contain the same key but a different type. - @Test - void testHandlesSameKeyInOneOf() { - final JsonNode compressionCodecObject = Jsons.jsonNode(ImmutableMap.of( - "codec", "no compression")); - final JsonNode avroConfig = Jsons.jsonNode(ImmutableMap.of( - "format_type", "Avro", - "compression_codec", compressionCodecObject)); - final JsonNode src = Jsons.jsonNode(ImmutableMap.of( - "client_id", "whatever", - "format", avroConfig)); - - final JsonNode parquetConfig = Jsons.jsonNode(ImmutableMap.of( - "format_type", "Parquet", - "compression_codec", "GZIP")); - final JsonNode dst = Jsons.jsonNode(ImmutableMap.of( - "client_id", "whatever", - "format", parquetConfig)); - - processor.copySecrets(src, dst, ONE_OF_WITH_SAME_KEY_IN_SUB_SCHEMAS); - } - - private static Stream scenarioProvider() { - return Stream.of( - Arguments.of(ARRAY, true), - Arguments.of(ARRAY, false), - Arguments.of(ARRAY_OF_ONEOF, true), - Arguments.of(ARRAY_OF_ONEOF, false), - Arguments.of(NESTED_OBJECT, true), - Arguments.of(NESTED_OBJECT, false), - Arguments.of(NESTED_ONEOF, true), - Arguments.of(NESTED_ONEOF, false), - Arguments.of(ONE_OF, true), - Arguments.of(ONE_OF, false), - Arguments.of(ONE_OF_SECRET, true), - Arguments.of(ONE_OF_SECRET, false), - Arguments.of(OPTIONAL_PASSWORD, true), - Arguments.of(OPTIONAL_PASSWORD, false), - Arguments.of(POSTGRES_SSH_KEY, true), - Arguments.of(POSTGRES_SSH_KEY, false), - Arguments.of(SIMPLE, true), - Arguments.of(SIMPLE, false), - Arguments.of("enum", false)); - } - - @ParameterizedTest - @MethodSource("scenarioProvider") - void testSecretScenario(final String folder, final boolean partial) throws IOException { - final ObjectMapper objectMapper = new ObjectMapper(); - - final InputStream specIs = getClass().getClassLoader().getResourceAsStream(folder + "/spec.json"); - final JsonNode specs = objectMapper.readTree(specIs); - - final String inputFilePath = folder + (partial ? "/partial_config.json" : "/full_config.json"); - final InputStream inputIs = getClass().getClassLoader().getResourceAsStream(inputFilePath); - final JsonNode input = objectMapper.readTree(inputIs); - - final String expectedFilePath = folder + "/expected.json"; - final InputStream expectedIs = getClass().getClassLoader().getResourceAsStream(expectedFilePath); - final JsonNode expected = objectMapper.readTree(expectedIs); - - final JsonNode actual = processor.prepareSecretsForOutput(input, specs); - assertEquals(expected, actual); - } - - // todo (cgardens) - example of a case that is not properly handled. we should explicitly call out - // that this type of jsonschema object is not allowed to do secrets. - // private static Stream scenarioProvider2() { - // return Stream.of( - // Arguments.of("array2", true), - // Arguments.of("array2", false)); - // } - // - // @ParameterizedTest - // @MethodSource("scenarioProvider2") - // void testSecretScenario2(final String folder, final boolean partial) throws IOException { - // final ObjectMapper objectMapper = new ObjectMapper(); - // - // final InputStream specIs = getClass().getClassLoader().getResourceAsStream(folder + - // "/spec.json"); - // final JsonNode specs = objectMapper.readTree(specIs); - // - // final String inputFilePath = folder + (partial ? "/partial_config.json" : "/full_config.json"); - // final InputStream inputIs = getClass().getClassLoader().getResourceAsStream(inputFilePath); - // final JsonNode input = objectMapper.readTree(inputIs); - // - // final String expectedFilePath = folder + "/expected.json"; - // final InputStream expectedIs = getClass().getClassLoader().getResourceAsStream(expectedFilePath); - // final JsonNode expected = objectMapper.readTree(expectedIs); - // - // final JsonNode actual = Secrets.maskAllSecrets(input, specs); - // assertEquals(expected, actual); - // } - - @Test - void copiesSecretsInNestedNonCombinationNode() throws JsonProcessingException { - final ObjectMapper objectMapper = new ObjectMapper(); - - final JsonNode source = objectMapper.readTree( - """ - { - "top_level": { - "a_secret": "hunter2" - } - } - """); - final JsonNode dest = objectMapper.readTree( - """ - { - "top_level": { - "a_secret": "**********" - } - } - """); - final JsonNode schema = objectMapper.readTree( - """ - { - "type": "object", - "properties": { - "top_level": { - "type": "object", - "properties": { - "a_secret": { - "type": "string", - "airbyte_secret": true - } - } - } - } - } - """); - - final JsonNode copied = processor.copySecrets(source, dest, schema); - - final JsonNode expected = objectMapper.readTree( - """ - { - "top_level": { - "a_secret": "hunter2" - } - } - """); - assertEquals(expected, copied); - } - - @Test - void doesNotCopySecretsInNestedNonCombinationNodeWhenDestinationMissing() throws JsonProcessingException { - final ObjectMapper objectMapper = new ObjectMapper(); - - final JsonNode source = objectMapper.readTree( - """ - { - "top_level": { - "a_secret": "hunter2" - } - } - """); - final JsonNode dest = objectMapper.readTree( - """ - { - "top_level": { - } - } - """); - final JsonNode schema = objectMapper.readTree( - """ - { - "type": "object", - "properties": { - "top_level": { - "type": "object", - "properties": { - "a_secret": { - "type": "string", - "airbyte_secret": true - } - } - } - } - } - """); - - final JsonNode copied = processor.copySecrets(source, dest, schema); - - final JsonNode expected = objectMapper.readTree( - """ - { - "top_level": { - } - } - """); - assertEquals(expected, copied); - } - - @Test - void testCopySecretsWithTopLevelOneOf() { - final JsonNode schema = Jsons.deserialize(""" - { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "E2E Test Destination Spec", - "type": "object", - "oneOf": [ - { - "title": "Silent", - "required": ["type"], - "properties": { - "a_secret": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "title": "Throttled", - "required": ["type", "millis_per_record"], - "properties": { - "type": { - "type": "string", - "const": "THROTTLED", - "default": "THROTTLED" - }, - "millis_per_record": { - "description": "Number of milli-second to pause in between records.", - "type": "integer" - } - } - } - ] - } - """); - - final JsonNode source = Jsons.deserialize(""" - { - "type": "THROTTLED", - "a_secret": "woot" - } - """); - - final JsonNode destination = Jsons.deserialize(""" - { - "type": "THROTTLED", - "a_secret": "**********" - } - """); - - final JsonNode result = processor.copySecrets(source, destination, schema); - final JsonNode expected = Jsons.deserialize(""" - { - "type": "THROTTLED", - "a_secret": "woot" - } - """); - - assertEquals(expected, result); - } - - @Nested - class NoOpTest { - - @BeforeEach - public void setup() { - processor = JsonSecretsProcessor.builder() - .copySecrets(false) - .build(); - } - - @Test - void testCopySecrets() { - final JsonNode src = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(ADDITIONAL_FIELD, DONT_COPY_ME) - .put(SECRET_1, DONT_TELL_ANYONE) - .put(SECRET_2, "updateme") - .build()); - - final JsonNode dst = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(SECRET_1, AirbyteSecretConstants.SECRETS_MASK) - .put(SECRET_2, "newvalue") - .build()); - - final JsonNode actual = processor.copySecrets(src, dst, SCHEMA_ONE_LAYER); - - final JsonNode expected = Jsons.jsonNode(ImmutableMap.builder() - .put(FIELD_1, VALUE_1) - .put(FIELD_2, 2) - .put(ADDITIONAL_FIELD, DONT_COPY_ME) - .put(SECRET_1, DONT_TELL_ANYONE) - .put(SECRET_2, "updateme") - .build()); - - assertEquals(expected, actual); - } - - private static Stream scenarioProvider() { - return Stream.of( - Arguments.of(ARRAY, true), - Arguments.of(ARRAY, false), - Arguments.of(ARRAY_OF_ONEOF, true), - Arguments.of(ARRAY_OF_ONEOF, false), - Arguments.of(NESTED_OBJECT, true), - Arguments.of(NESTED_OBJECT, false), - Arguments.of(NESTED_ONEOF, true), - Arguments.of(NESTED_ONEOF, false), - Arguments.of(ONE_OF, true), - Arguments.of(ONE_OF, false), - Arguments.of(OPTIONAL_PASSWORD, true), - Arguments.of(OPTIONAL_PASSWORD, false), - Arguments.of(POSTGRES_SSH_KEY, true), - Arguments.of(POSTGRES_SSH_KEY, false), - Arguments.of(SIMPLE, true), - Arguments.of(SIMPLE, false)); - } - - @ParameterizedTest - @MethodSource("scenarioProvider") - void testSecretScenario(final String folder, final boolean partial) throws IOException { - final ObjectMapper objectMapper = new ObjectMapper(); - - final InputStream specIs = getClass().getClassLoader().getResourceAsStream(folder + "/spec.json"); - final JsonNode specs = objectMapper.readTree(specIs); - - final String inputFilePath = folder + (partial ? "/partial_config.json" : "/full_config.json"); - final InputStream inputIs = getClass().getClassLoader().getResourceAsStream(inputFilePath); - final JsonNode input = objectMapper.readTree(inputIs); - - final String expectedFilePath = folder + "/expected.json"; - final InputStream expectedIs = getClass().getClassLoader().getResourceAsStream(expectedFilePath); - final JsonNode expected = objectMapper.readTree(expectedIs); - - final JsonNode actual = processor.prepareSecretsForOutput(input, specs); - - assertEquals(expected, actual); - } - - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateTest.java deleted file mode 100644 index 8f27fae17de28..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateTest.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import static org.junit.jupiter.api.Assertions.*; - -import org.junit.jupiter.api.Test; - -class SecretCoordinateTest { - - @Test - void testGetFullCoordinate() { - final var coordinate = new SecretCoordinate("some_base", 1); - assertEquals("some_base_v1", coordinate.getFullCoordinate()); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretsHelpersTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretsHelpersTest.java deleted file mode 100644 index dee22d2aec664..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretsHelpersTest.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.config.persistence.split_secrets.test_cases.ArrayOneOfTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.ArrayTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.NestedObjectTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.NestedOneOfTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.OneOfSecretTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.OneOfTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.OptionalPasswordTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.PostgresSshKeyTestCase; -import io.airbyte.config.persistence.split_secrets.test_cases.SimpleTestCase; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.regex.Pattern; -import java.util.stream.Stream; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -@SuppressWarnings({"PMD.JUnit5TestShouldBePackagePrivate", "PMD.UnusedPrivateMethod"}) -public class SecretsHelpersTest { - - public static final UUID WORKSPACE_ID = UUID.fromString("e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2"); - - // use a fixed sequence of UUIDs so it's easier to have static files for the test cases - public static final List UUIDS = List.of( - UUID.fromString("9eba44d8-51e7-48f1-bde2-619af0e42c22"), - UUID.fromString("2c2ef2b3-259a-4e73-96d1-f56dacee2e5e"), - UUID.fromString("1206db5b-b968-4df1-9a76-f3fcdae7e307"), - UUID.fromString("c03ef566-79a7-4e77-b6f3-d23d2528f25a"), - UUID.fromString("35f08b15-bfd9-44fe-a8c7-5aa9e156c0f5"), - UUID.fromString("159c0b6f-f9ae-48b4-b7f3-bcac4ba15743"), - UUID.fromString("71af9b74-4e61-4cff-830e-3bf1ec18fbc0"), - UUID.fromString("067a62fc-d007-44dd-a8f6-0fd10823713d"), - UUID.fromString("c4967ac9-0856-4733-a21e-1d51ca8f254d")); - - private static final String PROVIDE_TEST_CASES = "provideTestCases"; - - /** - * This is a bit of a non-standard way of specifying test case paramaterization for Junit, but it's - * intended to let you treat most of the JSON involved in the tests as static files. - */ - private static Stream provideTestCases() { - return Stream.of( - new OptionalPasswordTestCase(), - new SimpleTestCase(), - new NestedObjectTestCase(), - new OneOfTestCase(), - new OneOfSecretTestCase(), - new ArrayTestCase(), - new ArrayOneOfTestCase(), - new NestedOneOfTestCase(), - new PostgresSshKeyTestCase()).map(Arguments::of); - } - - @ParameterizedTest - @MethodSource(PROVIDE_TEST_CASES) - @SuppressWarnings({"PMD.JUnitTestsShouldIncludeAssert"}) - public void validateTestCases(final SecretsTestCase testCase) throws JsonValidationException { - final var validator = new JsonSchemaValidator(); - final var spec = testCase.getSpec().getConnectionSpecification(); - validator.ensure(spec, testCase.getFullConfig()); - validator.ensure(spec, testCase.getUpdateConfig()); - } - - @ParameterizedTest - @MethodSource(PROVIDE_TEST_CASES) - void testSplit(final SecretsTestCase testCase) { - final var uuidIterator = UUIDS.iterator(); - final var inputConfig = testCase.getFullConfig(); - final var inputConfigCopy = inputConfig.deepCopy(); - final var splitConfig = SecretsHelpers.splitConfig( - uuidIterator::next, - WORKSPACE_ID, - inputConfig, - testCase.getSpec().getConnectionSpecification()); - - assertEquals(testCase.getPartialConfig(), splitConfig.getPartialConfig()); - assertEquals(testCase.getFirstSecretMap(), splitConfig.getCoordinateToPayload()); - - // check that we didn't mutate the input configs - assertEquals(inputConfigCopy, inputConfig); - - // check that keys for Google Secrets Manger fit the requirements: - // A secret ID is a string with a maximum length of 255 characters and can contain - // uppercase and lowercase letters, numerals, and the hyphen (-) and underscore (_) characters. - // https://cloud.google.com/secret-manager/docs/reference/rpc/google.cloud.secretmanager.v1#createsecretrequest - final var gsmKeyCharacterPattern = Pattern.compile("^[a-zA-Z0-9_-]+$"); - - // sanity check pattern with a character that isn't allowed - assertFalse(gsmKeyCharacterPattern.matcher("/").matches()); - - // check every key for the pattern and max length - splitConfig.getCoordinateToPayload().keySet().forEach(key -> { - assertTrue(gsmKeyCharacterPattern.matcher(key.getFullCoordinate()).matches(), "Invalid character in key: " + key); - assertTrue(key.toString().length() <= 255, "Key is too long: " + key.toString().length()); - }); - } - - @ParameterizedTest - @MethodSource(PROVIDE_TEST_CASES) - void testSplitUpdate(final SecretsTestCase testCase) { - final var uuidIterator = UUIDS.iterator(); - final var inputPartialConfig = testCase.getPartialConfig(); - final var inputUpdateConfig = testCase.getUpdateConfig(); - final var inputPartialConfigCopy = inputPartialConfig.deepCopy(); - final var inputUpdateConfigCopy = inputUpdateConfig.deepCopy(); - final var secretPersistence = new MemorySecretPersistence(); - - for (final Map.Entry entry : testCase.getFirstSecretMap().entrySet()) { - secretPersistence.write(entry.getKey(), entry.getValue()); - } - - final var updatedSplit = SecretsHelpers.splitAndUpdateConfig( - uuidIterator::next, - WORKSPACE_ID, - inputPartialConfig, - inputUpdateConfig, - testCase.getSpec().getConnectionSpecification(), - secretPersistence::read); - - assertEquals(testCase.getUpdatedPartialConfig(), updatedSplit.getPartialConfig()); - assertEquals(testCase.getSecondSecretMap(), updatedSplit.getCoordinateToPayload()); - - // check that we didn't mutate the input configs - assertEquals(inputPartialConfigCopy, inputPartialConfig); - assertEquals(inputUpdateConfigCopy, inputUpdateConfig); - } - - @ParameterizedTest - @MethodSource(PROVIDE_TEST_CASES) - void testCombine(final SecretsTestCase testCase) { - final var secretPersistence = new MemorySecretPersistence(); - testCase.getPersistenceUpdater().accept(secretPersistence); - - final var inputPartialConfig = testCase.getPartialConfig(); - final var inputPartialConfigCopy = inputPartialConfig.deepCopy(); - final var actualCombinedConfig = SecretsHelpers.combineConfig(testCase.getPartialConfig(), secretPersistence); - - assertEquals(testCase.getFullConfig(), actualCombinedConfig); - - // check that we didn't mutate the input configs - assertEquals(inputPartialConfigCopy, inputPartialConfig); - } - - @Test - void testMissingSecretShouldThrowException() { - final var testCase = new SimpleTestCase(); - final var secretPersistence = new MemorySecretPersistence(); - - // intentionally do not seed the persistence with - // testCase.getPersistenceUpdater().accept(secretPersistence); - - assertThrows(RuntimeException.class, () -> SecretsHelpers.combineConfig(testCase.getPartialConfig(), secretPersistence)); - } - - @Test - void testUpdatingSecretsOneAtATime() { - final var uuidIterator = UUIDS.iterator(); - final var secretPersistence = new MemorySecretPersistence(); - final var testCase = new NestedObjectTestCase(); - - final var splitConfig = SecretsHelpers.splitConfig( - uuidIterator::next, - WORKSPACE_ID, - testCase.getFullConfig(), - testCase.getSpec().getConnectionSpecification()); - - assertEquals(testCase.getPartialConfig(), splitConfig.getPartialConfig()); - assertEquals(testCase.getFirstSecretMap(), splitConfig.getCoordinateToPayload()); - - for (final Map.Entry entry : splitConfig.getCoordinateToPayload().entrySet()) { - secretPersistence.write(entry.getKey(), entry.getValue()); - } - - final var updatedSplit1 = SecretsHelpers.splitAndUpdateConfig( - uuidIterator::next, - WORKSPACE_ID, - testCase.getPartialConfig(), - testCase.getFullConfigUpdate1(), - testCase.getSpec().getConnectionSpecification(), - secretPersistence::read); - - assertEquals(testCase.getUpdatedPartialConfigAfterUpdate1(), updatedSplit1.getPartialConfig()); - assertEquals(testCase.getSecretMapAfterUpdate1(), updatedSplit1.getCoordinateToPayload()); - - for (final Map.Entry entry : updatedSplit1.getCoordinateToPayload().entrySet()) { - secretPersistence.write(entry.getKey(), entry.getValue()); - } - - final var updatedSplit2 = SecretsHelpers.splitAndUpdateConfig( - uuidIterator::next, - WORKSPACE_ID, - updatedSplit1.getPartialConfig(), - testCase.getFullConfigUpdate2(), - testCase.getSpec().getConnectionSpecification(), - secretPersistence::read); - - assertEquals(testCase.getUpdatedPartialConfigAfterUpdate2(), updatedSplit2.getPartialConfig()); - assertEquals(testCase.getSecretMapAfterUpdate2(), updatedSplit2.getCoordinateToPayload()); - } - - @ParameterizedTest - @MethodSource(PROVIDE_TEST_CASES) - void testSecretPath(final SecretsTestCase testCase) throws IOException { - final JsonNode spec = testCase.getSpec().getConnectionSpecification(); - - final List secretsPaths = SecretsHelpers.getSortedSecretPaths(spec); - - Assertions.assertThat(secretsPaths).containsExactlyElementsOf(testCase.getExpectedSecretsPaths()); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretsTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretsTestCase.java deleted file mode 100644 index 17408242614a3..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/SecretsTestCase.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.function.Consumer; - -/** - * Provides an easy way of accessing a set of resource files in a specific directory when testing - * secrets-related helpers. - */ -public interface SecretsTestCase { - - String PREFIX = "airbyte_workspace_"; - String SECRET = "_secret_"; - - String getName(); - - Map getFirstSecretMap(); - - Map getSecondSecretMap(); - - Consumer getPersistenceUpdater(); - - default ConnectorSpecification getSpec() { - return Exceptions.toRuntime(() -> new ConnectorSpecification().withConnectionSpecification(getNodeResource(getName(), "spec.json"))); - } - - default JsonNode getFullConfig() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "full_config.json")); - } - - default JsonNode getPartialConfig() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "partial_config.json")); - } - - default JsonNode getSortedPartialConfig() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "partial_config.json")); - } - - default JsonNode getUpdateConfig() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "update_config.json")); - } - - default JsonNode getUpdatedPartialConfig() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "updated_partial_config.json")); - } - - default JsonNode getNodeResource(final String testCase, final String fileName) throws IOException { - return Jsons.deserialize(MoreResources.readResource(testCase + "/" + fileName)); - } - - default List getExpectedSecretsPaths() throws IOException { - return Arrays.stream( - MoreResources.readResource(getName() + "/" + "expectedPaths") - .trim() - .split(";")) - .sorted() - .toList(); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistenceTest.java deleted file mode 100644 index 85a58895593ae..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistenceTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import lombok.val; -import org.apache.commons.lang3.RandomUtils; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.vault.VaultContainer; - -class VaultSecretPersistenceTest { - - private VaultSecretPersistence persistence; - private String baseCoordinate; - - private VaultContainer vaultContainer; - - @BeforeEach - void setUp() { - vaultContainer = new VaultContainer("vault").withVaultToken("vault-dev-token-id"); - vaultContainer.start(); - - val vaultAddress = "http://" + vaultContainer.getHost() + ":" + vaultContainer.getFirstMappedPort(); - - persistence = new VaultSecretPersistence(vaultAddress, "secret/testing", "vault-dev-token-id"); - baseCoordinate = "VaultSecretPersistenceIntegrationTest_coordinate_" + RandomUtils.nextInt() % 20000; - } - - @AfterEach - void tearDown() { - vaultContainer.stop(); - } - - @Test - void testReadWriteUpdate() { - val coordinate1 = new SecretCoordinate(baseCoordinate, 1); - - // try reading non-existent value - val firstRead = persistence.read(coordinate1); - assertThat(firstRead.isEmpty()).isTrue(); - - // write - val firstPayload = "abc"; - persistence.write(coordinate1, firstPayload); - val secondRead = persistence.read(coordinate1); - assertThat(secondRead.isPresent()).isTrue(); - assertEquals(firstPayload, secondRead.get()); - - // update - val secondPayload = "def"; - val coordinate2 = new SecretCoordinate(baseCoordinate, 2); - persistence.write(coordinate2, secondPayload); - val thirdRead = persistence.read(coordinate2); - assertThat(thirdRead.isPresent()).isTrue(); - assertEquals(secondPayload, thirdRead.get()); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/ArrayOneOfTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/ArrayOneOfTestCase.java deleted file mode 100644 index df27aed951400..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/ArrayOneOfTestCase.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class ArrayOneOfTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "array_of_oneof"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), "hunter1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), "hunter2"); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter4"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), - "hunter1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), - "hunter2"); - }; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/ArrayTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/ArrayTestCase.java deleted file mode 100644 index 04e8e3a9df986..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/ArrayTestCase.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class ArrayTestCase implements SecretsTestCase { - - private final static String KEY1 = "key1"; - private final static String KEY2 = "key2"; - private final static String KEY3 = "key3"; - - @Override - public String getName() { - return "array"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), KEY1, - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), KEY2, - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(2), 1), KEY3, - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(3), 1), KEY1, - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(4), 1), KEY2, - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(5), 1), KEY3); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "key8", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "key9", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(2), 2), "key10", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(3), 2), "key5", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(4), 2), "key6", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(5), 2), "key7"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), - KEY1); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), - KEY2); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(2), 1), - KEY3); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(3), 1), - KEY1); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(4), 1), - KEY2); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(5), 1), - KEY3); - }; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/NestedObjectTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/NestedObjectTestCase.java deleted file mode 100644 index 08d91f1d15506..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/NestedObjectTestCase.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class NestedObjectTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "nested_object"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), "hunter1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), "hunter2"); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter4"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), - "hunter1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), - "hunter2"); - }; - } - - // the following helpers are for the custom test suite for evaluating updating individual secret - // versions - - public JsonNode getUpdatedPartialConfigAfterUpdate1() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "updated_partial_config_update1.json")); - } - - public JsonNode getUpdatedPartialConfigAfterUpdate2() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "updated_partial_config_update2.json")); - } - - public JsonNode getFullConfigUpdate1() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "full_config_update1.json")); - } - - public JsonNode getFullConfigUpdate2() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "full_config_update2.json")); - } - - public Map getSecretMapAfterUpdate1() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), "hunter2"); - } - - public Map getSecretMapAfterUpdate2() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter4"); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/NestedOneOfTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/NestedOneOfTestCase.java deleted file mode 100644 index 80a8a2bfbb860..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/NestedOneOfTestCase.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class NestedOneOfTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "nested_oneof"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), "hunter1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), "hunter2"); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter4"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), - "hunter1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), - "hunter2"); - }; - } - - // the following helpers are for the custom test suite for evaluating updating individual secret - // versions - - public JsonNode getUpdatedPartialConfigAfterUpdate1() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "updated_partial_config_update1.json")); - } - - public JsonNode getUpdatedPartialConfigAfterUpdate2() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "updated_partial_config_update2.json")); - } - - public JsonNode getFullConfigUpdate1() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "full_config_update1.json")); - } - - public JsonNode getFullConfigUpdate2() { - return Exceptions.toRuntime(() -> getNodeResource(getName(), "full_config_update2.json")); - } - - public Map getSecretMapAfterUpdate1() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), "hunter2"); - } - - public Map getSecretMapAfterUpdate2() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter4"); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OneOfSecretTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OneOfSecretTestCase.java deleted file mode 100644 index 16a45c34d560a..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OneOfSecretTestCase.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class OneOfSecretTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "oneof_secret"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), "access_token_1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), "clientId_1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(2), 1), "client_secret_1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(3), 1), "refresh_token_1"); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "access_token_2"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), - "access_token_1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), - "clientId_1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(2), 1), - "client_secret_1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(3), 1), - "refresh_token_1"); - }; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OneOfTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OneOfTestCase.java deleted file mode 100644 index 6de295e441727..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OneOfTestCase.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class OneOfTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "oneof"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), "hunter1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), "hunter2"); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter4"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), - "hunter1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), - "hunter2"); - }; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OptionalPasswordTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OptionalPasswordTestCase.java deleted file mode 100644 index 37a9eb4e5cea9..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/OptionalPasswordTestCase.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class OptionalPasswordTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "optional_password"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of(); - } - - @Override - public Map getSecondSecretMap() { - return Map.of(); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> {}; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/PostgresSshKeyTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/PostgresSshKeyTestCase.java deleted file mode 100644 index 74a176a2d0e49..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/PostgresSshKeyTestCase.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class PostgresSshKeyTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "postgres_ssh_key"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), "hunter1", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), "hunter2"); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 2), "hunter3", - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 2), "hunter4"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(0), 1), - "hunter1"); - secretPersistence.write( - new SecretCoordinate(PREFIX + SecretsHelpersTest.WORKSPACE_ID + SECRET + SecretsHelpersTest.UUIDS.get(1), 1), - "hunter2"); - }; - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/SimpleTestCase.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/SimpleTestCase.java deleted file mode 100644 index 8236eec224acd..0000000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/test_cases/SimpleTestCase.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence.split_secrets.test_cases; - -import io.airbyte.config.persistence.split_secrets.SecretCoordinate; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpersTest; -import io.airbyte.config.persistence.split_secrets.SecretsTestCase; -import java.util.Map; -import java.util.function.Consumer; - -public class SimpleTestCase implements SecretsTestCase { - - @Override - public String getName() { - return "simple"; - } - - @Override - public Map getFirstSecretMap() { - return Map.of( - new SecretCoordinate("airbyte_workspace_" + SecretsHelpersTest.WORKSPACE_ID + "_secret_" + SecretsHelpersTest.UUIDS.get(0), 1), "hunter1"); - } - - @Override - public Map getSecondSecretMap() { - return Map.of( - new SecretCoordinate("airbyte_workspace_" + SecretsHelpersTest.WORKSPACE_ID + "_secret_" + SecretsHelpersTest.UUIDS.get(0), 2), "hunter2"); - } - - @Override - public Consumer getPersistenceUpdater() { - return secretPersistence -> { - secretPersistence.write( - new SecretCoordinate("airbyte_workspace_" + SecretsHelpersTest.WORKSPACE_ID + "_secret_" + SecretsHelpersTest.UUIDS.get(0), 1), - "hunter1"); - }; - } - -} diff --git a/airbyte-config/config-persistence/src/test/resources/array/expected.json b/airbyte-config/config-persistence/src/test/resources/array/expected.json deleted file mode 100644 index 0ca4d08720b6a..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array/expected.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "username": "charles", - "rotating_key_strings": ["**********", "**********", "**********"], - "rotating_key_objects": [ - { - "a": "**********" - }, - { - "a": "**********" - }, - { - "a": "**********" - } - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array/expectedPaths b/airbyte-config/config-persistence/src/test/resources/array/expectedPaths deleted file mode 100644 index 61c3501c4f3f3..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.rotating_key_strings[*];$.rotating_key_objects[*].a diff --git a/airbyte-config/config-persistence/src/test/resources/array/full_config.json b/airbyte-config/config-persistence/src/test/resources/array/full_config.json deleted file mode 100644 index 9efa335afb76c..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array/full_config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "username": "charles", - "rotating_key_strings": ["key1", "key2", "key3"], - "rotating_key_objects": [{ "a": "key1" }, { "a": "key2" }, { "a": "key3" }] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array/partial_config.json b/airbyte-config/config-persistence/src/test/resources/array/partial_config.json deleted file mode 100644 index e7a6dd7909bb1..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array/partial_config.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "username": "charles", - "rotating_key_strings": [ - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_c03ef566-79a7-4e77-b6f3-d23d2528f25a_v1" - }, - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_35f08b15-bfd9-44fe-a8c7-5aa9e156c0f5_v1" - }, - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_159c0b6f-f9ae-48b4-b7f3-bcac4ba15743_v1" - } - ], - "rotating_key_objects": [ - { - "a": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - } - }, - { - "a": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v1" - } - }, - { - "a": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_1206db5b-b968-4df1-9a76-f3fcdae7e307_v1" - } - } - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array/spec.json b/airbyte-config/config-persistence/src/test/resources/array/spec.json deleted file mode 100644 index 23834b218a8c2..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array/spec.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "rotating_key_strings": { - "type": "array", - "items": { - "type": "string", - "airbyte_secret": true - } - }, - "rotating_key_objects": { - "type": "array", - "items": { - "type": "object", - "required": ["a"], - "properties": { - "a": { - "type": "string", - "airbyte_secret": true - } - } - } - } - }, - "required": ["username", "rotating_key_strings", "rotating_key_objects"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array/update_config.json b/airbyte-config/config-persistence/src/test/resources/array/update_config.json deleted file mode 100644 index f30ff6fd6aaa7..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array/update_config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "username": "charles", - "rotating_key_strings": ["key5", "key6", "key7"], - "rotating_key_objects": [{ "a": "key8" }, { "a": "key9" }, { "a": "key10" }] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/array/updated_partial_config.json deleted file mode 100644 index 672a7de1bd2cb..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array/updated_partial_config.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "username": "charles", - "rotating_key_strings": [ - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_c03ef566-79a7-4e77-b6f3-d23d2528f25a_v2" - }, - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_35f08b15-bfd9-44fe-a8c7-5aa9e156c0f5_v2" - }, - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_159c0b6f-f9ae-48b4-b7f3-bcac4ba15743_v2" - } - ], - "rotating_key_objects": [ - { - "a": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - } - }, - { - "a": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - } - }, - { - "a": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_1206db5b-b968-4df1-9a76-f3fcdae7e307_v2" - } - } - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array2/expected.json b/airbyte-config/config-persistence/src/test/resources/array2/expected.json deleted file mode 100644 index abaa3e233eef6..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array2/expected.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "cred": ["**********"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array2/full_config.json b/airbyte-config/config-persistence/src/test/resources/array2/full_config.json deleted file mode 100644 index 7edbb656fb36f..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array2/full_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "cred": ["hunter2"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array2/partial_config.json b/airbyte-config/config-persistence/src/test/resources/array2/partial_config.json deleted file mode 100644 index 7ba03d02f4e52..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array2/partial_config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "cred": [ - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_159c0b6f-f9ae-48b4-b7f3-bcac4ba15743_v1" - } - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array2/spec.json b/airbyte-config/config-persistence/src/test/resources/array2/spec.json deleted file mode 100644 index 05c3bbea3900e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array2/spec.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type": "object", - "properties": { - "cred": { - "oneOf": [ - { - "type": "string", - "airbyte_secret": true - }, - { - "type": "array", - "items": { - "type": "string", - "airbyte_secret": true - } - } - ] - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/array2/update_config.json b/airbyte-config/config-persistence/src/test/resources/array2/update_config.json deleted file mode 100644 index e8270b1653cfd..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array2/update_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "cred": ["hunter20"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array2/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/array2/updated_partial_config.json deleted file mode 100644 index aab04a199c11c..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array2/updated_partial_config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "cred": [ - { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_159c0b6f-f9ae-48b4-b7f3-bcac4ba15743_v2" - } - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/expected.json b/airbyte-config/config-persistence/src/test/resources/array_of_oneof/expected.json deleted file mode 100644 index 486406fa6e77a..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/expected.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "username": "charles", - "rotating_keys": [ - { - "key1": "**********" - }, - { - "key2": "**********", - "key3": "non-secret" - }, - "str" - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/expectedPaths b/airbyte-config/config-persistence/src/test/resources/array_of_oneof/expectedPaths deleted file mode 100644 index 29b1c466d2d64..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.rotating_keys[*].key1;$.rotating_keys[*].key2 diff --git a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/full_config.json b/airbyte-config/config-persistence/src/test/resources/array_of_oneof/full_config.json deleted file mode 100644 index 54331c9c6a914..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/full_config.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "username": "charles", - "rotating_keys": [ - { - "key1": "hunter1" - }, - { - "key2": "hunter2", - "key3": "non-secret" - }, - "str" - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/partial_config.json b/airbyte-config/config-persistence/src/test/resources/array_of_oneof/partial_config.json deleted file mode 100644 index 98253c061dc6e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/partial_config.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "username": "charles", - "rotating_keys": [ - { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - } - }, - { - "key2": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v1" - }, - "key3": "non-secret" - }, - "str" - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/spec.json b/airbyte-config/config-persistence/src/test/resources/array_of_oneof/spec.json deleted file mode 100644 index 297c7df148479..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/spec.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "rotating_keys": { - "type": "array", - "items": { - "oneOf": [ - { - "type": "object", - "required": ["key1"], - "properties": { - "key1": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "required": ["key2", "key3"], - "properties": { - "key2": { - "type": "string", - "airbyte_secret": true - }, - "key3": { - "type": "string" - } - } - }, - { - "type": "string" - } - ] - } - } - }, - "required": ["username", "rotating_keys"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/update_config.json b/airbyte-config/config-persistence/src/test/resources/array_of_oneof/update_config.json deleted file mode 100644 index 1771c378cc588..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/update_config.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "username": "charles", - "rotating_keys": [ - { - "key1": "hunter3" - }, - { - "key2": "hunter4", - "key3": "non-secret" - }, - "str" - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/array_of_oneof/updated_partial_config.json deleted file mode 100644 index 89317f488e655..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/array_of_oneof/updated_partial_config.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "username": "charles", - "rotating_keys": [ - { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - } - }, - { - "key2": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - }, - "key3": "non-secret" - }, - "str" - ] -} diff --git a/airbyte-config/config-persistence/src/test/resources/enum/expected.json b/airbyte-config/config-persistence/src/test/resources/enum/expected.json deleted file mode 100644 index 46f6281497d6a..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/enum/expected.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "account_id": "1234567891234567", - "start_date": "2022-04-01T00:00:00Z", - "access_token": "**********", - "include_deleted": false, - "fetch_thumbnail_images": false -} diff --git a/airbyte-config/config-persistence/src/test/resources/enum/full_config.json b/airbyte-config/config-persistence/src/test/resources/enum/full_config.json deleted file mode 100644 index da1a8ba13e1d3..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/enum/full_config.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "account_id": "1234567891234567", - "start_date": "2022-04-01T00:00:00Z", - "access_token": { - "_secret": "it-s-hidden" - }, - "include_deleted": false, - "fetch_thumbnail_images": false -} diff --git a/airbyte-config/config-persistence/src/test/resources/enum/spec.json b/airbyte-config/config-persistence/src/test/resources/enum/spec.json deleted file mode 100644 index 5b927b33e3cfa..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/enum/spec.json +++ /dev/null @@ -1,296 +0,0 @@ -{ - "type": "object", - "title": "Source Facebook Marketing", - "required": ["account_id", "start_date", "access_token"], - "properties": { - "end_date": { - "type": "string", - "order": 2, - "title": "End Date", - "format": "date-time", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2017-01-26T00:00:00Z"], - "description": "The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the latest data." - }, - "account_id": { - "type": "string", - "order": 0, - "title": "Account ID", - "examples": ["111111111111111"], - "description": "The Facebook Ad account ID to use when pulling data from the Facebook Marketing API." - }, - "start_date": { - "type": "string", - "order": 1, - "title": "Start Date", - "format": "date-time", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2017-01-25T00:00:00Z"], - "description": "The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated." - }, - "access_token": { - "type": "string", - "order": 3, - "title": "Access Token", - "description": "The value of the access token generated. See the docs for more information", - "airbyte_secret": true - }, - "custom_insights": { - "type": "array", - "items": { - "type": "object", - "title": "InsightConfig", - "required": ["name"], - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "The name value of insight" - }, - "fields": { - "type": "array", - "items": { - "enum": [ - "account_currency", - "account_id", - "account_name", - "action_values", - "actions", - "ad_bid_value", - "ad_click_actions", - "ad_id", - "ad_impression_actions", - "ad_name", - "adset_bid_value", - "adset_end", - "adset_id", - "adset_name", - "adset_start", - "age_targeting", - "attribution_setting", - "auction_bid", - "auction_competitiveness", - "auction_max_competitor_bid", - "buying_type", - "campaign_id", - "campaign_name", - "canvas_avg_view_percent", - "canvas_avg_view_time", - "catalog_segment_actions", - "catalog_segment_value", - "catalog_segment_value_mobile_purchase_roas", - "catalog_segment_value_omni_purchase_roas", - "catalog_segment_value_website_purchase_roas", - "clicks", - "conversion_rate_ranking", - "conversion_values", - "conversions", - "converted_product_quantity", - "converted_product_value", - "cost_per_15_sec_video_view", - "cost_per_2_sec_continuous_video_view", - "cost_per_action_type", - "cost_per_ad_click", - "cost_per_conversion", - "cost_per_dda_countby_convs", - "cost_per_estimated_ad_recallers", - "cost_per_inline_link_click", - "cost_per_inline_post_engagement", - "cost_per_one_thousand_ad_impression", - "cost_per_outbound_click", - "cost_per_thruplay", - "cost_per_unique_action_type", - "cost_per_unique_click", - "cost_per_unique_conversion", - "cost_per_unique_inline_link_click", - "cost_per_unique_outbound_click", - "cpc", - "cpm", - "cpp", - "created_time", - "ctr", - "date_start", - "date_stop", - "dda_countby_convs", - "dda_results", - "engagement_rate_ranking", - "estimated_ad_recall_rate", - "estimated_ad_recall_rate_lower_bound", - "estimated_ad_recall_rate_upper_bound", - "estimated_ad_recallers", - "estimated_ad_recallers_lower_bound", - "estimated_ad_recallers_upper_bound", - "frequency", - "full_view_impressions", - "full_view_reach", - "gender_targeting", - "impressions", - "inline_link_click_ctr", - "inline_link_clicks", - "inline_post_engagement", - "instant_experience_clicks_to_open", - "instant_experience_clicks_to_start", - "instant_experience_outbound_clicks", - "interactive_component_tap", - "labels", - "location", - "mobile_app_purchase_roas", - "objective", - "optimization_goal", - "outbound_clicks", - "outbound_clicks_ctr", - "place_page_name", - "purchase_roas", - "qualifying_question_qualify_answer_rate", - "quality_ranking", - "quality_score_ectr", - "quality_score_ecvr", - "quality_score_organic", - "reach", - "social_spend", - "spend", - "total_postbacks", - "unique_actions", - "unique_clicks", - "unique_conversions", - "unique_ctr", - "unique_inline_link_click_ctr", - "unique_inline_link_clicks", - "unique_link_clicks_ctr", - "unique_outbound_clicks", - "unique_outbound_clicks_ctr", - "unique_video_continuous_2_sec_watched_actions", - "unique_video_view_15_sec", - "updated_time", - "video_15_sec_watched_actions", - "video_30_sec_watched_actions", - "video_avg_time_watched_actions", - "video_continuous_2_sec_watched_actions", - "video_p100_watched_actions", - "video_p25_watched_actions", - "video_p50_watched_actions", - "video_p75_watched_actions", - "video_p95_watched_actions", - "video_play_actions", - "video_play_curve_actions", - "video_play_retention_0_to_15s_actions", - "video_play_retention_20_to_60s_actions", - "video_play_retention_graph_actions", - "video_thruplay_watched_actions", - "video_time_watched_actions", - "website_ctr", - "website_purchase_roas", - "wish_bid" - ], - "title": "ValidEnums", - "description": "Generic enumeration.\n\nDerive from this class to define new enumerations." - }, - "title": "Fields", - "default": [], - "description": "A list of chosen fields for fields parameter" - }, - "end_date": { - "type": "string", - "title": "End Date", - "format": "date-time", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2017-01-26T00:00:00Z"], - "description": "The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this date will be replicated. Not setting this option will result in always syncing the latest data." - }, - "breakdowns": { - "type": "array", - "items": { - "enum": [ - "ad_format_asset", - "age", - "app_id", - "body_asset", - "call_to_action_asset", - "country", - "description_asset", - "device_platform", - "dma", - "frequency_value", - "gender", - "hourly_stats_aggregated_by_advertiser_time_zone", - "hourly_stats_aggregated_by_audience_time_zone", - "image_asset", - "impression_device", - "link_url_asset", - "place_page_id", - "platform_position", - "product_id", - "publisher_platform", - "region", - "skan_conversion_id", - "title_asset", - "video_asset" - ], - "title": "ValidBreakdowns", - "description": "Generic enumeration.\n\nDerive from this class to define new enumerations." - }, - "title": "Breakdowns", - "default": [], - "description": "A list of chosen breakdowns for breakdowns" - }, - "start_date": { - "type": "string", - "title": "Start Date", - "format": "date-time", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2017-01-25T00:00:00Z"], - "description": "The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z." - }, - "time_increment": { - "type": "integer", - "title": "Time Increment", - "default": 1, - "description": "Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).", - "exclusiveMaximum": 90, - "exclusiveMinimum": 0 - }, - "action_breakdowns": { - "type": "array", - "items": { - "enum": [ - "action_canvas_component_name", - "action_carousel_card_id", - "action_carousel_card_name", - "action_destination", - "action_device", - "action_reaction", - "action_target_id", - "action_type", - "action_video_sound", - "action_video_type" - ], - "title": "ValidActionBreakdowns", - "description": "Generic enumeration.\n\nDerive from this class to define new enumerations." - }, - "title": "Action Breakdowns", - "default": [], - "description": "A list of chosen action_breakdowns for action_breakdowns" - } - }, - "description": "Config for custom insights" - }, - "order": 6, - "title": "Custom Insights", - "description": "A list which contains insights entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns)" - }, - "include_deleted": { - "type": "boolean", - "order": 4, - "title": "Include Deleted", - "default": false, - "description": "Include data from deleted Campaigns, Ads, and AdSets" - }, - "fetch_thumbnail_images": { - "type": "boolean", - "order": 5, - "title": "Fetch Thumbnail Images", - "default": false, - "description": "In each Ad Creative, fetch the thumbnail_url and store the result in thumbnail_data_url" - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/expected.json b/airbyte-config/config-persistence/src/test/resources/nested_object/expected.json deleted file mode 100644 index 73267f6d0797a..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/expected.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "charles", - "password": "**********", - "nested": { - "password": "**********", - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/expectedPaths b/airbyte-config/config-persistence/src/test/resources/nested_object/expectedPaths deleted file mode 100644 index 4160e14762291..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.password;$.nested.password diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/full_config.json b/airbyte-config/config-persistence/src/test/resources/nested_object/full_config.json deleted file mode 100644 index 197612ecea523..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/full_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "charles", - "password": "hunter1", - "nested": { - "password": "hunter2", - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/full_config_update1.json b/airbyte-config/config-persistence/src/test/resources/nested_object/full_config_update1.json deleted file mode 100644 index 33bce99e34611..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/full_config_update1.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "charles", - "password": "hunter3", - "nested": { - "password": "hunter2", - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/full_config_update2.json b/airbyte-config/config-persistence/src/test/resources/nested_object/full_config_update2.json deleted file mode 100644 index bcf3cdc83296a..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/full_config_update2.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "charles", - "password": "hunter3", - "nested": { - "password": "hunter4", - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/partial_config.json b/airbyte-config/config-persistence/src/test/resources/nested_object/partial_config.json deleted file mode 100644 index e9e44d7ce52ce..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/partial_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "username": "charles", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v1" - }, - "nested": { - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - }, - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/spec.json b/airbyte-config/config-persistence/src/test/resources/nested_object/spec.json deleted file mode 100644 index 71200431412de..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/spec.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string", - "airbyte_secret": true - }, - "nested": { - "type": "object", - "properties": { - "password": { - "type": "string", - "airbyte_secret": true - }, - "not_password": { - "type": "number" - } - }, - "required": ["password", "not_password"] - } - }, - "required": ["username", "password", "nested"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/update_config.json b/airbyte-config/config-persistence/src/test/resources/nested_object/update_config.json deleted file mode 100644 index bcf3cdc83296a..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/update_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "charles", - "password": "hunter3", - "nested": { - "password": "hunter4", - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config.json deleted file mode 100644 index 24c73c6d182c9..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "username": "charles", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - }, - "nested": { - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - }, - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config_update1.json b/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config_update1.json deleted file mode 100644 index 02accccc92ee7..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config_update1.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "username": "charles", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - }, - "nested": { - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - }, - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config_update2.json b/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config_update2.json deleted file mode 100644 index 24c73c6d182c9..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_object/updated_partial_config_update2.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "username": "charles", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - }, - "nested": { - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - }, - "not_password": 13 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_oneof/expected.json b/airbyte-config/config-persistence/src/test/resources/nested_oneof/expected.json deleted file mode 100644 index 445f1ae5fe788..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_oneof/expected.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": "**********" - }, - "password2": { - "key6": { - "key8": "**********" - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_oneof/expectedPaths b/airbyte-config/config-persistence/src/test/resources/nested_oneof/expectedPaths deleted file mode 100644 index 75e6804e2014b..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_oneof/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.password1.key1;$.password1.key2.key3;$.password1.key2.key4;$.password2.key5;$.password2.key6.key7;$.password2.key6.key8 diff --git a/airbyte-config/config-persistence/src/test/resources/nested_oneof/full_config.json b/airbyte-config/config-persistence/src/test/resources/nested_oneof/full_config.json deleted file mode 100644 index 1a36cb495f867..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_oneof/full_config.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": "hunter1" - }, - "password2": { - "key6": { - "key8": "hunter2" - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_oneof/partial_config.json b/airbyte-config/config-persistence/src/test/resources/nested_oneof/partial_config.json deleted file mode 100644 index 237372ee821e0..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_oneof/partial_config.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - } - }, - "password2": { - "key6": { - "key8": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v1" - } - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_oneof/spec.json b/airbyte-config/config-persistence/src/test/resources/nested_oneof/spec.json deleted file mode 100644 index fb7131ab77085..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_oneof/spec.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "password1": { - "oneOf": [ - { - "type": "object", - "required": ["key1"], - "properties": { - "key1": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "required": ["key2"], - "properties": { - "key2": { - "oneOf": [ - { - "type": "object", - "required": ["key3"], - "properties": { - "key3": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "required": ["key4"], - "properties": { - "key4": { - "type": "string", - "airbyte_secret": true - } - } - } - ] - } - } - } - ] - }, - "password2": { - "oneOf": [ - { - "type": "object", - "required": ["key5"], - "properties": { - "key5": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "required": ["key6"], - "properties": { - "key6": { - "oneOf": [ - { - "type": "object", - "required": ["key7"], - "properties": { - "key7": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "required": ["key8"], - "properties": { - "key8": { - "type": "string", - "airbyte_secret": true - } - } - } - ] - } - } - } - ] - } - }, - "required": ["username", "password1", "password2"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_oneof/update_config.json b/airbyte-config/config-persistence/src/test/resources/nested_oneof/update_config.json deleted file mode 100644 index 65cfc80f60df0..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_oneof/update_config.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": "hunter3" - }, - "password2": { - "key6": { - "key8": "hunter4" - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/nested_oneof/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/nested_oneof/updated_partial_config.json deleted file mode 100644 index eea535ddb8f38..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/nested_oneof/updated_partial_config.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - } - }, - "password2": { - "key6": { - "key8": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - } - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof/expected.json b/airbyte-config/config-persistence/src/test/resources/oneof/expected.json deleted file mode 100644 index f534e8fbe1315..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof/expected.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": "**********" - }, - "password2": { - "key1": "**********" - }, - "password3": { - "non-secret": 42 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof/expectedPaths b/airbyte-config/config-persistence/src/test/resources/oneof/expectedPaths deleted file mode 100644 index 7fdcd68edaeaa..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.password1.key1;$.password2.key1;$.password3.key1 diff --git a/airbyte-config/config-persistence/src/test/resources/oneof/full_config.json b/airbyte-config/config-persistence/src/test/resources/oneof/full_config.json deleted file mode 100644 index 83cdcaddbf76b..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof/full_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": "hunter1" - }, - "password2": { - "key1": "hunter2" - }, - "password3": { - "non-secret": 42 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof/partial_config.json b/airbyte-config/config-persistence/src/test/resources/oneof/partial_config.json deleted file mode 100644 index f4a217a706410..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof/partial_config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - } - }, - "password2": { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v1" - } - }, - "password3": { - "non-secret": 42 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof/spec.json b/airbyte-config/config-persistence/src/test/resources/oneof/spec.json deleted file mode 100644 index 3a472d3c5f02e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof/spec.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "password1": { - "oneOf": [ - { - "type": "object", - "required": ["non-secret"], - "properties": { - "non-secret": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["key1"], - "properties": { - "key1": { - "type": "string", - "airbyte_secret": true - } - } - } - ] - }, - "password2": { - "oneOf": [ - { - "type": "object", - "required": ["key1"], - "properties": { - "key1": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "required": ["non-secret"], - "properties": { - "non-secret": { - "type": "number" - } - } - } - ] - }, - "password3": { - "oneOf": [ - { - "type": "object", - "required": ["key1"], - "properties": { - "key1": { - "type": "string", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "required": ["non-secret"], - "properties": { - "non-secret": { - "type": "number" - } - } - } - ] - } - }, - "required": ["username", "password1", "password2", "password3"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof/update_config.json b/airbyte-config/config-persistence/src/test/resources/oneof/update_config.json deleted file mode 100644 index 0ff6c5bb76f90..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof/update_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": "hunter3" - }, - "password2": { - "key1": "hunter4" - }, - "password3": { - "non-secret": 42 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/oneof/updated_partial_config.json deleted file mode 100644 index 8c9d671038e6c..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof/updated_partial_config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "username": "charles", - "password1": { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - } - }, - "password2": { - "key1": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - } - }, - "password3": { - "non-secret": 42 - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof_secret/expected.json b/airbyte-config/config-persistence/src/test/resources/oneof_secret/expected.json deleted file mode 100644 index 54f692e42a9e9..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof_secret/expected.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "credentials": { - "auth_method": "oauth2.0", - "client_id": "**********", - "client_secret": "**********", - "access_token": "**********", - "refresh_token": "**********" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof_secret/expectedPaths b/airbyte-config/config-persistence/src/test/resources/oneof_secret/expectedPaths deleted file mode 100644 index 2e8f07aa898a8..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof_secret/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.credentials.access_token;$.credentials.client_id;$.credentials.client_secret;$.credentials.refresh_token; diff --git a/airbyte-config/config-persistence/src/test/resources/oneof_secret/full_config.json b/airbyte-config/config-persistence/src/test/resources/oneof_secret/full_config.json deleted file mode 100644 index a8793dc672a5c..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof_secret/full_config.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "credentials": { - "auth_method": "oauth2.0", - "client_id": "clientId_1", - "client_secret": "client_secret_1", - "access_token": "access_token_1", - "refresh_token": "refresh_token_1" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof_secret/partial_config.json b/airbyte-config/config-persistence/src/test/resources/oneof_secret/partial_config.json deleted file mode 100644 index 0eeaac9caca7d..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof_secret/partial_config.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "credentials": { - "auth_method": "oauth2.0", - "client_id": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v1" - }, - "client_secret": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_1206db5b-b968-4df1-9a76-f3fcdae7e307_v1" - }, - "access_token": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - }, - "refresh_token": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_c03ef566-79a7-4e77-b6f3-d23d2528f25a_v1" - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof_secret/spec.json b/airbyte-config/config-persistence/src/test/resources/oneof_secret/spec.json deleted file mode 100644 index d8a54edba8e08..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof_secret/spec.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "type": "object", - "properties": { - "credentials": { - "title": "Authorization Method", - "type": "object", - "oneOf": [ - { - "type": "object", - "title": "OAuth2.0", - "required": [ - "client_id", - "client_secret", - "access_token", - "refresh_token" - ], - "properties": { - "auth_method": { - "type": "string", - "const": "oauth2.0", - "enum": ["oauth2.0"], - "default": "oauth2.0", - "order": 0 - }, - "client_id": { - "type": "string", - "title": "Client ID", - "description": "The Client ID of your application.", - "airbyte_secret": true - }, - "client_secret": { - "type": "string", - "title": "Client Secret", - "description": "The Client Secret of your application.", - "airbyte_secret": true - }, - "access_token": { - "type": "string", - "title": "Access Token", - "description": "Access Token for making authenticated requests.", - "airbyte_secret": true - }, - "refresh_token": { - "type": "string", - "title": "Refresh Token", - "description": "Refresh Token to renew the expired Access Token.", - "default": "", - "airbyte_secret": true - } - } - }, - { - "title": "Access Token", - "type": "object", - "required": ["access_token"], - "properties": { - "auth_method": { - "type": "string", - "const": "access_token", - "enum": ["access_token"], - "default": "access_token", - "order": 0 - }, - "access_token": { - "type": "string", - "title": "Access Token", - "airbyte_secret": true - } - } - } - ] - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof_secret/update_config.json b/airbyte-config/config-persistence/src/test/resources/oneof_secret/update_config.json deleted file mode 100644 index c6df01a70110d..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof_secret/update_config.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "credentials": { - "auth_method": "access_token", - "access_token": "access_token_2" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/oneof_secret/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/oneof_secret/updated_partial_config.json deleted file mode 100644 index 1064cac031e52..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/oneof_secret/updated_partial_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "credentials": { - "auth_method": "access_token", - "access_token": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/optional_password/expected.json b/airbyte-config/config-persistence/src/test/resources/optional_password/expected.json deleted file mode 100644 index 1ad13d50b0841..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/optional_password/expected.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "username": "charles" -} diff --git a/airbyte-config/config-persistence/src/test/resources/optional_password/expectedPaths b/airbyte-config/config-persistence/src/test/resources/optional_password/expectedPaths deleted file mode 100644 index deb2239ded28e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/optional_password/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.password diff --git a/airbyte-config/config-persistence/src/test/resources/optional_password/full_config.json b/airbyte-config/config-persistence/src/test/resources/optional_password/full_config.json deleted file mode 100644 index 1ad13d50b0841..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/optional_password/full_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "username": "charles" -} diff --git a/airbyte-config/config-persistence/src/test/resources/optional_password/partial_config.json b/airbyte-config/config-persistence/src/test/resources/optional_password/partial_config.json deleted file mode 100644 index 1ad13d50b0841..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/optional_password/partial_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "username": "charles" -} diff --git a/airbyte-config/config-persistence/src/test/resources/optional_password/spec.json b/airbyte-config/config-persistence/src/test/resources/optional_password/spec.json deleted file mode 100644 index 58342452c483e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/optional_password/spec.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string", - "airbyte_secret": true - } - }, - "required": ["username"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/optional_password/update_config.json b/airbyte-config/config-persistence/src/test/resources/optional_password/update_config.json deleted file mode 100644 index 1ad13d50b0841..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/optional_password/update_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "username": "charles" -} diff --git a/airbyte-config/config-persistence/src/test/resources/optional_password/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/optional_password/updated_partial_config.json deleted file mode 100644 index 1ad13d50b0841..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/optional_password/updated_partial_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "username": "charles" -} diff --git a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/expected.json b/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/expected.json deleted file mode 100644 index bca0f527b3101..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/expected.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "ssl": false, - "host": "host", - "port": 5432, - "database": "db", - "password": "**********", - "username": "user", - "tunnel_method": { - "ssh_key": "**********", - "tunnel_host": "host", - "tunnel_port": 22, - "tunnel_user": "user", - "tunnel_method": "SSH_KEY_AUTH" - }, - "replication_method": { - "method": "Standard" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/expectedPaths b/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/expectedPaths deleted file mode 100644 index d60e732b51104..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.password;$.tunnel_method.ssh_key;$.tunnel_method.tunnel_user_password diff --git a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/full_config.json b/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/full_config.json deleted file mode 100644 index 17785559e36e7..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/full_config.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "ssl": false, - "host": "host", - "port": 5432, - "database": "db", - "password": "hunter1", - "username": "user", - "tunnel_method": { - "ssh_key": "hunter2", - "tunnel_host": "host", - "tunnel_port": 22, - "tunnel_user": "user", - "tunnel_method": "SSH_KEY_AUTH" - }, - "replication_method": { - "method": "Standard" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/partial_config.json b/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/partial_config.json deleted file mode 100644 index 4dce4ae3a2653..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/partial_config.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "ssl": false, - "host": "host", - "port": 5432, - "database": "db", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - }, - "username": "user", - "tunnel_method": { - "ssh_key": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v1" - }, - "tunnel_host": "host", - "tunnel_port": 22, - "tunnel_user": "user", - "tunnel_method": "SSH_KEY_AUTH" - }, - "replication_method": { - "method": "Standard" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/spec.json b/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/spec.json deleted file mode 100644 index a945736fd3941..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/spec.json +++ /dev/null @@ -1,220 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Postgres Source Spec", - "type": "object", - "required": ["host", "port", "database", "username"], - "additionalProperties": false, - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 5432, - "examples": ["5432"], - "order": 1 - }, - "database": { - "title": "DB Name", - "description": "Name of the database.", - "type": "string", - "order": 2 - }, - "username": { - "title": "User", - "description": "Username to use to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 4 - }, - "ssl": { - "title": "Connect using SSL", - "description": "Encrypt client/server communications for increased security.", - "type": "boolean", - "default": false, - "order": 5 - }, - "replication_method": { - "type": "object", - "title": "Replication Method", - "description": "Replication method to use for extracting data from the database.", - "order": 6, - "oneOf": [ - { - "title": "Standard", - "additionalProperties": false, - "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.", - "required": ["method"], - "properties": { - "method": { - "type": "string", - "const": "Standard", - "enum": ["Standard"], - "default": "Standard", - "order": 0 - } - } - }, - { - "title": "Logical Replication (CDC)", - "additionalProperties": false, - "description": "Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the Postgres Source docs for more information.", - "required": ["method", "replication_slot", "publication"], - "properties": { - "method": { - "type": "string", - "const": "CDC", - "enum": ["CDC"], - "default": "CDC", - "order": 0 - }, - "plugin": { - "type": "string", - "description": "A logical decoding plug-in installed on the PostgreSQL server. `pgoutput` plug-in is used by default.\nIf replication table contains a lot of big jsonb values it is recommended to use `wal2json` plug-in. For more information about `wal2json` plug-in read Postgres Source docs.", - "enum": ["pgoutput", "wal2json"], - "default": "pgoutput", - "order": 1 - }, - "replication_slot": { - "type": "string", - "description": "A plug-in logical replication slot.", - "order": 2 - }, - "publication": { - "type": "string", - "description": "A Postgres publication used for consuming changes.", - "order": 3 - } - } - } - ] - }, - "tunnel_method": { - "type": "object", - "title": "SSH Tunnel Method", - "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", - "oneOf": [ - { - "title": "No Tunnel", - "required": ["tunnel_method"], - "properties": { - "tunnel_method": { - "description": "No ssh tunnel needed to connect to database", - "type": "string", - "const": "NO_TUNNEL", - "order": 0 - } - } - }, - { - "title": "SSH Key Authentication", - "required": [ - "tunnel_method", - "tunnel_host", - "tunnel_port", - "tunnel_user", - "ssh_key" - ], - "properties": { - "tunnel_method": { - "description": "Connect through a jump server tunnel host using username and ssh key", - "type": "string", - "const": "SSH_KEY_AUTH", - "order": 0 - }, - "tunnel_host": { - "title": "SSH Tunnel Jump Server Host", - "description": "Hostname of the jump server host that allows inbound ssh tunnel.", - "type": "string", - "order": 1 - }, - "tunnel_port": { - "title": "SSH Connection Port", - "description": "Port on the proxy/jump server that accepts inbound ssh connections.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 22, - "examples": ["22"], - "order": 2 - }, - "tunnel_user": { - "title": "SSH Login Username", - "description": "OS-level username for logging into the jump server host.", - "type": "string", - "order": 3 - }, - "ssh_key": { - "title": "SSH Private Key", - "description": "OS-level user account ssh key credentials for logging into the jump server host.", - "type": "string", - "airbyte_secret": true, - "multiline": true, - "order": 4 - } - } - }, - { - "title": "Password Authentication", - "required": [ - "tunnel_method", - "tunnel_host", - "tunnel_port", - "tunnel_user", - "tunnel_user_password" - ], - "properties": { - "tunnel_method": { - "description": "Connect through a jump server tunnel host using username and password authentication", - "type": "string", - "const": "SSH_PASSWORD_AUTH", - "order": 0 - }, - "tunnel_host": { - "title": "SSH Tunnel Jump Server Host", - "description": "Hostname of the jump server host that allows inbound ssh tunnel.", - "type": "string", - "order": 1 - }, - "tunnel_port": { - "title": "SSH Connection Port", - "description": "Port on the proxy/jump server that accepts inbound ssh connections.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 22, - "examples": ["22"], - "order": 2 - }, - "tunnel_user": { - "title": "SSH Login Username", - "description": "OS-level username for logging into the jump server host", - "type": "string", - "order": 3 - }, - "tunnel_user_password": { - "title": "Password", - "description": "OS-level password for logging into the jump server host", - "type": "string", - "airbyte_secret": true, - "order": 4 - } - } - } - ] - } - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/update_config.json b/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/update_config.json deleted file mode 100644 index b3c5807f2991e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/update_config.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "ssl": false, - "host": "host", - "port": 5432, - "database": "db", - "password": "hunter3", - "username": "user", - "tunnel_method": { - "ssh_key": "hunter4", - "tunnel_host": "host", - "tunnel_port": 22, - "tunnel_user": "user", - "tunnel_method": "SSH_KEY_AUTH" - }, - "replication_method": { - "method": "Standard" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/updated_partial_config.json deleted file mode 100644 index 0ab6329c1b79e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/postgres_ssh_key/updated_partial_config.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "ssl": false, - "host": "host", - "port": 5432, - "database": "db", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - }, - "username": "user", - "tunnel_method": { - "ssh_key": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_2c2ef2b3-259a-4e73-96d1-f56dacee2e5e_v2" - }, - "tunnel_host": "host", - "tunnel_port": 22, - "tunnel_user": "user", - "tunnel_method": "SSH_KEY_AUTH" - }, - "replication_method": { - "method": "Standard" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/simple/expected.json b/airbyte-config/config-persistence/src/test/resources/simple/expected.json deleted file mode 100644 index 9e17c22ba06aa..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/simple/expected.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "charles", - "password": "**********" -} diff --git a/airbyte-config/config-persistence/src/test/resources/simple/expectedPaths b/airbyte-config/config-persistence/src/test/resources/simple/expectedPaths deleted file mode 100644 index deb2239ded28e..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/simple/expectedPaths +++ /dev/null @@ -1 +0,0 @@ -$.password diff --git a/airbyte-config/config-persistence/src/test/resources/simple/full_config.json b/airbyte-config/config-persistence/src/test/resources/simple/full_config.json deleted file mode 100644 index a3f38b18f36ca..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/simple/full_config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "charles", - "password": "hunter1" -} diff --git a/airbyte-config/config-persistence/src/test/resources/simple/partial_config.json b/airbyte-config/config-persistence/src/test/resources/simple/partial_config.json deleted file mode 100644 index fa5e1b76ecc3f..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/simple/partial_config.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "username": "charles", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v1" - } -} diff --git a/airbyte-config/config-persistence/src/test/resources/simple/spec.json b/airbyte-config/config-persistence/src/test/resources/simple/spec.json deleted file mode 100644 index ce9713a8ba0dc..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/simple/spec.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string", - "airbyte_secret": true - } - }, - "required": ["username", "password"] -} diff --git a/airbyte-config/config-persistence/src/test/resources/simple/update_config.json b/airbyte-config/config-persistence/src/test/resources/simple/update_config.json deleted file mode 100644 index 3059101e69f56..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/simple/update_config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "charles", - "password": "hunter2" -} diff --git a/airbyte-config/config-persistence/src/test/resources/simple/updated_partial_config.json b/airbyte-config/config-persistence/src/test/resources/simple/updated_partial_config.json deleted file mode 100644 index 13917f12379ee..0000000000000 --- a/airbyte-config/config-persistence/src/test/resources/simple/updated_partial_config.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "username": "charles", - "password": { - "_secret": "airbyte_workspace_e0eb0554-ffe0-4e9c-9dc0-ed7f52023eb2_secret_9eba44d8-51e7-48f1-bde2-619af0e42c22_v2" - } -} diff --git a/airbyte-config/init/Dockerfile b/airbyte-config/init/Dockerfile deleted file mode 100644 index b18390e8d5923..0000000000000 --- a/airbyte-config/init/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -ARG ALPINE_IMAGE=alpine:3.13 -FROM ${ALPINE_IMAGE} AS seed - -WORKDIR /app - -# the sole purpose of this image is to seed the data volume with the default data -# that the app should have when it is first installed. -COPY bin/scripts scripts diff --git a/airbyte-config/init/build.gradle b/airbyte-config/init/build.gradle index d46a5e2c68889..239325682b1c6 100644 --- a/airbyte-config/init/build.gradle +++ b/airbyte-config/init/build.gradle @@ -8,7 +8,6 @@ dependencies { implementation 'commons-cli:commons-cli:1.4' implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') implementation libs.airbyte.protocol implementation project(':airbyte-json-validation') implementation libs.lombok @@ -18,17 +17,6 @@ dependencies { testImplementation 'com.squareup.okhttp3:mockwebserver:4.9.1' } -task copyScripts(type: Copy) { - dependsOn copyDocker - - from('scripts') - into 'build/docker/bin/scripts' -} - -tasks.named("buildDockerImage") { - dependsOn copyScripts -} - Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) task validateIcons(type: JavaExec, dependsOn: [compileJava]) { diff --git a/airbyte-config/init/gradle.properties b/airbyte-config/init/gradle.properties deleted file mode 100644 index 9a0f2280fccc3..0000000000000 --- a/airbyte-config/init/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=init diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigNotFoundException.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/ConfigNotFoundException.java similarity index 61% rename from airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigNotFoundException.java rename to airbyte-config/init/src/main/java/io/airbyte/config/init/ConfigNotFoundException.java index 3175156491f54..7b4d55d70a8cd 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigNotFoundException.java +++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/ConfigNotFoundException.java @@ -2,10 +2,7 @@ * Copyright (c) 2023 Airbyte, Inc., all rights reserved. */ -package io.airbyte.config.persistence; - -import io.airbyte.config.AirbyteConfig; -import java.util.UUID; +package io.airbyte.config.init; public class ConfigNotFoundException extends Exception { @@ -19,14 +16,6 @@ public ConfigNotFoundException(final String type, final String configId) { this.configId = configId; } - public ConfigNotFoundException(final AirbyteConfig type, final String configId) { - this(type.toString(), configId); - } - - public ConfigNotFoundException(final AirbyteConfig type, final UUID uuid) { - this(type.toString(), uuid.toString()); - } - public String getType() { return type; } diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/DefinitionsProvider.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/DefinitionsProvider.java index 1109ac623d804..e1439feaebc85 100644 --- a/airbyte-config/init/src/main/java/io/airbyte/config/init/DefinitionsProvider.java +++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/DefinitionsProvider.java @@ -6,7 +6,6 @@ import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; import java.util.List; import java.util.UUID; diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/LocalDefinitionsProvider.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/LocalDefinitionsProvider.java index 5b3e489b3998d..1ed7ca424fdd2 100644 --- a/airbyte-config/init/src/main/java/io/airbyte/config/init/LocalDefinitionsProvider.java +++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/LocalDefinitionsProvider.java @@ -11,7 +11,6 @@ import io.airbyte.config.CombinedConnectorCatalog; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/RemoteDefinitionsProvider.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/RemoteDefinitionsProvider.java index 4467767c76d95..f9cd62e0a978d 100644 --- a/airbyte-config/init/src/main/java/io/airbyte/config/init/RemoteDefinitionsProvider.java +++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/RemoteDefinitionsProvider.java @@ -9,7 +9,6 @@ import io.airbyte.config.CombinedConnectorCatalog; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; import io.micronaut.cache.annotation.CacheConfig; import io.micronaut.cache.annotation.Cacheable; import io.micronaut.context.annotation.Primary; @@ -49,8 +48,8 @@ public RemoteDefinitionsProvider(@Value("${airbyte.platform.remote-connector-cat @Value("${airbyte.platform.remote-connector-catalog.timeout-ms}") final long remoteCatalogTimeoutMs) throws URISyntaxException { log.info("Creating remote definitions provider for URL '{}'...", remoteCatalogUrl); - this.remoteDefinitionCatalogUrl = new URI(remoteCatalogUrl); - this.timeout = Duration.ofMillis(remoteCatalogTimeoutMs); + remoteDefinitionCatalogUrl = new URI(remoteCatalogUrl); + timeout = Duration.ofMillis(remoteCatalogTimeoutMs); } private Map getSourceDefinitionsMap() { diff --git a/airbyte-config/init/src/test/java/io/airbyte/config/init/LocalDefinitionsProviderTest.java b/airbyte-config/init/src/test/java/io/airbyte/config/init/LocalDefinitionsProviderTest.java index 33db1b46ce717..dd17bde840312 100644 --- a/airbyte-config/init/src/test/java/io/airbyte/config/init/LocalDefinitionsProviderTest.java +++ b/airbyte-config/init/src/test/java/io/airbyte/config/init/LocalDefinitionsProviderTest.java @@ -14,7 +14,6 @@ import io.airbyte.commons.yaml.Yamls; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; import java.io.IOException; import java.net.URI; import java.net.URL; diff --git a/airbyte-config/init/src/test/java/io/airbyte/config/init/RemoteDefinitionsProviderTest.java b/airbyte-config/init/src/test/java/io/airbyte/config/init/RemoteDefinitionsProviderTest.java index fe837f1209f74..e93a7d9ac9042 100644 --- a/airbyte-config/init/src/test/java/io/airbyte/config/init/RemoteDefinitionsProviderTest.java +++ b/airbyte-config/init/src/test/java/io/airbyte/config/init/RemoteDefinitionsProviderTest.java @@ -14,7 +14,6 @@ import io.airbyte.commons.util.MoreIterators; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; import java.io.IOException; import java.net.URI; import java.net.URL; diff --git a/airbyte-config/init/src/test/java/io/airbyte/config/init/SpecFormatTest.java b/airbyte-config/init/src/test/java/io/airbyte/config/init/SpecFormatTest.java index 59f9459b7489d..85d743f3be969 100644 --- a/airbyte-config/init/src/test/java/io/airbyte/config/init/SpecFormatTest.java +++ b/airbyte-config/init/src/test/java/io/airbyte/config/init/SpecFormatTest.java @@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.JsonSchemas; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.ArrayList; @@ -41,7 +40,7 @@ void testOnAllExistingConfig() throws IOException, JsonValidationException { Assertions.assertThat(allSpecs) .flatMap(spec -> { try { - if (!JsonSecretsProcessor.isValidJsonSchema(spec)) { + if (!isValidJsonSchema(spec)) { throw new RuntimeException("Fail JsonSecretsProcessor validation"); } JsonSchemas.traverseJsonSchema(spec, (node, path) -> {}); @@ -54,4 +53,9 @@ void testOnAllExistingConfig() throws IOException, JsonValidationException { .isEmpty(); } + private static boolean isValidJsonSchema(final JsonNode schema) { + return schema.isObject() && ((schema.has("properties") && schema.get("properties").isObject()) + || (schema.has("oneOf") && schema.get("oneOf").isArray())); + } + } diff --git a/airbyte-commons-worker/build.gradle b/airbyte-connector-test-harnesses/acceptance-test-harness/build.gradle similarity index 96% rename from airbyte-commons-worker/build.gradle rename to airbyte-connector-test-harnesses/acceptance-test-harness/build.gradle index 9fedf175d574b..554efcc9dc112 100644 --- a/airbyte-commons-worker/build.gradle +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/build.gradle @@ -22,7 +22,6 @@ dependencies { implementation project(':airbyte-api') implementation project(':airbyte-commons-protocol') implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') implementation project(':airbyte-json-validation') implementation libs.airbyte.protocol diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/Worker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/TestHarness.java similarity index 70% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/Worker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/TestHarness.java index fa5263db75456..24f6d3fdbbd25 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/Worker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/TestHarness.java @@ -4,16 +4,16 @@ package io.airbyte.workers; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import java.nio.file.Path; -public interface Worker { +public interface TestHarness { /** * Blocking call to run the worker's workflow. Once this is complete, getStatus should return either * COMPLETE, FAILED, or CANCELLED. */ - OutputType run(InputType inputType, Path jobRoot) throws WorkerException; + OutputType run(InputType inputType, Path jobRoot) throws TestHarnessException; /** * Cancels in-progress workers. Although all workers support cancel, in reality only the diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerUtils.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/TestHarnessUtils.java similarity index 94% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerUtils.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/TestHarnessUtils.java index 8bbd2b85f5bf9..f3bac71ac7252 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerUtils.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/TestHarnessUtils.java @@ -19,7 +19,7 @@ import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.protocol.models.Config; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.helper.FailureHelper; import io.airbyte.workers.helper.FailureHelper.ConnectorCommand; import io.airbyte.workers.internal.AirbyteStreamFactory; @@ -40,9 +40,9 @@ import org.slf4j.LoggerFactory; // TODO:(Issue-4824): Figure out how to log Docker process information. -public class WorkerUtils { +public class TestHarnessUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(WorkerUtils.class); + private static final Logger LOGGER = LoggerFactory.getLogger(TestHarnessUtils.class); public static void gentleClose(final Process process, final long timeout, final TimeUnit timeUnit) { @@ -155,7 +155,7 @@ public static Map> getMessagesByType(final Process pr messagesByType = streamFactory.create(IOs.newBufferedReader(stdout)) .collect(Collectors.groupingBy(AirbyteMessage::getType)); - WorkerUtils.gentleClose(process, timeOut, TimeUnit.MINUTES); + TestHarnessUtils.gentleClose(process, timeOut, TimeUnit.MINUTES); return messagesByType; } } @@ -192,12 +192,12 @@ public static String getStdErrFromErrorStream(final InputStream errorStream) thr } public static void throwWorkerException(final String errorMessage, final Process process) - throws WorkerException, IOException { + throws TestHarnessException, IOException { final String stderr = getStdErrFromErrorStream(process.getErrorStream()); if (stderr.isEmpty()) { - throw new WorkerException(errorMessage); + throw new TestHarnessException(errorMessage); } else { - throw new WorkerException(errorMessage + ": \n" + stderr); + throw new TestHarnessException(errorMessage + ": \n" + stderr); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerConstants.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/WorkerConstants.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerConstants.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/WorkerConstants.java diff --git a/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/exception/TestHarnessException.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/exception/TestHarnessException.java new file mode 100644 index 0000000000000..d86e21880a3a1 --- /dev/null +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/exception/TestHarnessException.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.exception; + +public class TestHarnessException extends Exception { + + public TestHarnessException(final String message) { + super(message); + } + + public TestHarnessException(final String message, final Throwable cause) { + super(message, cause); + } + +} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/CheckConnectionWorker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/CheckConnectionTestHarness.java similarity index 56% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/general/CheckConnectionWorker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/CheckConnectionTestHarness.java index 97df6a4298948..70b705acdc67f 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/CheckConnectionWorker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/CheckConnectionTestHarness.java @@ -6,6 +6,6 @@ import io.airbyte.config.ConnectorJobOutput; import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.workers.Worker; +import io.airbyte.workers.TestHarness; -public interface CheckConnectionWorker extends Worker {} +public interface CheckConnectionTestHarness extends TestHarness {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java similarity index 92% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java index 620ee5295bfd0..1a63b5134a7df 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java @@ -19,8 +19,8 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.config.OperatorDbt; import io.airbyte.config.ResourceRequirements; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.TestHarnessUtils; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.normalization.NormalizationRunner; import io.airbyte.workers.process.ProcessFactory; import java.nio.file.Path; @@ -92,7 +92,7 @@ public boolean transform(final String jobId, final List dbtArguments = new ArrayList<>(); dbtArguments.add(DBT_ENTRYPOINT_SH); if (Strings.isNullOrEmpty(dbtConfig.getDbtArguments())) { - throw new WorkerException("Dbt Arguments are required"); + throw new TestHarnessException("Dbt Arguments are required"); } Collections.addAll(dbtArguments, Commandline.translateCommandline(dbtConfig.getDbtArguments())); process = @@ -115,13 +115,13 @@ public boolean transform(final String jobId, LineGobbler.gobble(process.getInputStream(), LOGGER::info, CONTAINER_LOG_MDC_BUILDER); LineGobbler.gobble(process.getErrorStream(), LOGGER::error, CONTAINER_LOG_MDC_BUILDER); - WorkerUtils.wait(process); + TestHarnessUtils.wait(process); return process.exitValue() == 0; } catch (final Exception e) { // make sure we kill the process on failure to avoid zombies. if (process != null) { - WorkerUtils.cancelProcess(process); + TestHarnessUtils.cancelProcess(process); } throw e; } @@ -136,9 +136,9 @@ public void close() throws Exception { } LOGGER.debug("Closing dbt transformation process"); - WorkerUtils.gentleClose(process, 1, TimeUnit.MINUTES); + TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES); if (process.isAlive() || process.exitValue() != 0) { - throw new WorkerException("Dbt transformation process wasn't successful"); + throw new TestHarnessException("Dbt transformation process wasn't successful"); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionWorker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.java similarity index 73% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionWorker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.java index 3e785f916fc9e..3c0a80d199ac0 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionWorker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.java @@ -18,9 +18,9 @@ import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.workers.TestHarnessUtils; import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.helper.ConnectorConfigUpdater; import io.airbyte.workers.internal.AirbyteStreamFactory; import io.airbyte.workers.internal.DefaultAirbyteStreamFactory; @@ -33,9 +33,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class DefaultCheckConnectionWorker implements CheckConnectionWorker { +public class DefaultCheckConnectionTestHarness implements CheckConnectionTestHarness { - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultCheckConnectionWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultCheckConnectionTestHarness.class); private final IntegrationLauncher integrationLauncher; private final ConnectorConfigUpdater connectorConfigUpdater; @@ -43,20 +43,20 @@ public class DefaultCheckConnectionWorker implements CheckConnectionWorker { private Process process; - public DefaultCheckConnectionWorker(final IntegrationLauncher integrationLauncher, - final ConnectorConfigUpdater connectorConfigUpdater, - final AirbyteStreamFactory streamFactory) { + public DefaultCheckConnectionTestHarness(final IntegrationLauncher integrationLauncher, + final ConnectorConfigUpdater connectorConfigUpdater, + final AirbyteStreamFactory streamFactory) { this.integrationLauncher = integrationLauncher; this.connectorConfigUpdater = connectorConfigUpdater; this.streamFactory = streamFactory; } - public DefaultCheckConnectionWorker(final IntegrationLauncher integrationLauncher, final ConnectorConfigUpdater connectorConfigUpdater) { + public DefaultCheckConnectionTestHarness(final IntegrationLauncher integrationLauncher, final ConnectorConfigUpdater connectorConfigUpdater) { this(integrationLauncher, connectorConfigUpdater, new DefaultAirbyteStreamFactory()); } @Override - public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Path jobRoot) throws WorkerException { + public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Path jobRoot) throws TestHarnessException { LineGobbler.startSection("CHECK"); try { @@ -71,15 +71,15 @@ public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Pa LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - final Map> messagesByType = WorkerUtils.getMessagesByType(process, streamFactory, 30); + final Map> messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30); final Optional connectionStatus = messagesByType .getOrDefault(Type.CONNECTION_STATUS, new ArrayList<>()).stream() .map(AirbyteMessage::getConnectionStatus) .findFirst(); if (input.getActorId() != null && input.getActorType() != null) { - final Optional optionalConfigMsg = WorkerUtils.getMostRecentConfigControlMessage(messagesByType); - if (optionalConfigMsg.isPresent() && WorkerUtils.getDidControlMessageChangeConfig(inputConfig, optionalConfigMsg.get())) { + final Optional optionalConfigMsg = TestHarnessUtils.getMostRecentConfigControlMessage(messagesByType); + if (optionalConfigMsg.isPresent() && TestHarnessUtils.getDidControlMessageChangeConfig(inputConfig, optionalConfigMsg.get())) { switch (input.getActorType()) { case SOURCE -> connectorConfigUpdater.updateSource( input.getActorId(), @@ -92,7 +92,7 @@ public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Pa } } - final Optional failureReason = WorkerUtils.getJobFailureReasonFromMessages(OutputType.CHECK_CONNECTION, messagesByType); + final Optional failureReason = TestHarnessUtils.getJobFailureReasonFromMessages(OutputType.CHECK_CONNECTION, messagesByType); failureReason.ifPresent(jobOutput::setFailureReason); final int exitCode = process.exitValue(); @@ -107,7 +107,7 @@ public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Pa LOGGER.info("Check connection job received output: {}", output); jobOutput.setCheckConnection(output); } else if (failureReason.isEmpty()) { - WorkerUtils.throwWorkerException("Error checking connection status: no status nor failure reason were outputted", process); + TestHarnessUtils.throwWorkerException("Error checking connection status: no status nor failure reason were outputted", process); } LineGobbler.endSection("CHECK"); return jobOutput; @@ -115,13 +115,13 @@ public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Pa } catch (final Exception e) { LOGGER.error("Unexpected error while checking connection: ", e); LineGobbler.endSection("CHECK"); - throw new WorkerException("Unexpected error while getting checking connection.", e); + throw new TestHarnessException("Unexpected error while getting checking connection.", e); } } @Override public void cancel() { - WorkerUtils.cancelProcess(process); + TestHarnessUtils.cancelProcess(process); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.java similarity index 73% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.java index 8e87369e9b331..8a9727c1dc84f 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.java @@ -18,9 +18,9 @@ import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.workers.TestHarnessUtils; import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.helper.CatalogClientConverters; import io.airbyte.workers.helper.ConnectorConfigUpdater; import io.airbyte.workers.internal.AirbyteStreamFactory; @@ -35,9 +35,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class DefaultDiscoverCatalogWorker implements DiscoverCatalogWorker { +public class DefaultDiscoverCatalogTestHarness implements DiscoverCatalogTestHarness { - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiscoverCatalogWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiscoverCatalogTestHarness.class); private static final String WRITE_DISCOVER_CATALOG_LOGS_TAG = "call to write discover schema result"; private final IntegrationLauncher integrationLauncher; @@ -46,24 +46,24 @@ public class DefaultDiscoverCatalogWorker implements DiscoverCatalogWorker { private final AirbyteApiClient airbyteApiClient; private volatile Process process; - public DefaultDiscoverCatalogWorker(final AirbyteApiClient airbyteApiClient, - final IntegrationLauncher integrationLauncher, - final ConnectorConfigUpdater connectorConfigUpdater, - final AirbyteStreamFactory streamFactory) { + public DefaultDiscoverCatalogTestHarness(final AirbyteApiClient airbyteApiClient, + final IntegrationLauncher integrationLauncher, + final ConnectorConfigUpdater connectorConfigUpdater, + final AirbyteStreamFactory streamFactory) { this.airbyteApiClient = airbyteApiClient; this.integrationLauncher = integrationLauncher; this.streamFactory = streamFactory; this.connectorConfigUpdater = connectorConfigUpdater; } - public DefaultDiscoverCatalogWorker(final AirbyteApiClient airbyteApiClient, - final IntegrationLauncher integrationLauncher, - final ConnectorConfigUpdater connectorConfigUpdater) { + public DefaultDiscoverCatalogTestHarness(final AirbyteApiClient airbyteApiClient, + final IntegrationLauncher integrationLauncher, + final ConnectorConfigUpdater connectorConfigUpdater) { this(airbyteApiClient, integrationLauncher, connectorConfigUpdater, new DefaultAirbyteStreamFactory()); } @Override - public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaInput, final Path jobRoot) throws WorkerException { + public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaInput, final Path jobRoot) throws TestHarnessException { try { final JsonNode inputConfig = discoverSchemaInput.getConnectionConfiguration(); process = integrationLauncher.discover( @@ -76,22 +76,22 @@ public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaI LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - final Map> messagesByType = WorkerUtils.getMessagesByType(process, streamFactory, 30); + final Map> messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30); final Optional catalog = messagesByType .getOrDefault(Type.CATALOG, new ArrayList<>()).stream() .map(AirbyteMessage::getCatalog) .findFirst(); - final Optional optionalConfigMsg = WorkerUtils.getMostRecentConfigControlMessage(messagesByType); - if (optionalConfigMsg.isPresent() && WorkerUtils.getDidControlMessageChangeConfig(inputConfig, optionalConfigMsg.get())) { + final Optional optionalConfigMsg = TestHarnessUtils.getMostRecentConfigControlMessage(messagesByType); + if (optionalConfigMsg.isPresent() && TestHarnessUtils.getDidControlMessageChangeConfig(inputConfig, optionalConfigMsg.get())) { connectorConfigUpdater.updateSource( UUID.fromString(discoverSchemaInput.getSourceId()), optionalConfigMsg.get().getConfig()); jobOutput.setConnectorConfigurationUpdated(true); } - final Optional failureReason = WorkerUtils.getJobFailureReasonFromMessages(OutputType.DISCOVER_CATALOG_ID, messagesByType); + final Optional failureReason = TestHarnessUtils.getJobFailureReasonFromMessages(OutputType.DISCOVER_CATALOG_ID, messagesByType); failureReason.ifPresent(jobOutput::setFailureReason); final int exitCode = process.exitValue(); @@ -106,13 +106,13 @@ public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaI WRITE_DISCOVER_CATALOG_LOGS_TAG); jobOutput.setDiscoverCatalogId(result.getCatalogId()); } else if (failureReason.isEmpty()) { - WorkerUtils.throwWorkerException("Integration failed to output a catalog struct and did not output a failure reason", process); + TestHarnessUtils.throwWorkerException("Integration failed to output a catalog struct and did not output a failure reason", process); } return jobOutput; - } catch (final WorkerException e) { + } catch (final TestHarnessException e) { throw e; } catch (final Exception e) { - throw new WorkerException("Error while discovering schema", e); + throw new TestHarnessException("Error while discovering schema", e); } } @@ -131,7 +131,7 @@ private SourceDiscoverSchemaWriteRequestBody buildSourceDiscoverSchemaWriteReque @Override public void cancel() { - WorkerUtils.cancelProcess(process); + TestHarnessUtils.cancelProcess(process); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultGetSpecWorker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultGetSpecTestHarness.java similarity index 67% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultGetSpecWorker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultGetSpecTestHarness.java index cc19bdfc04fa7..81245535f1317 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultGetSpecWorker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DefaultGetSpecTestHarness.java @@ -12,8 +12,8 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.TestHarnessUtils; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.internal.AirbyteStreamFactory; import io.airbyte.workers.internal.DefaultAirbyteStreamFactory; import io.airbyte.workers.process.IntegrationLauncher; @@ -25,40 +25,40 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class DefaultGetSpecWorker implements GetSpecWorker { +public class DefaultGetSpecTestHarness implements GetSpecTestHarness { - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultGetSpecWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultGetSpecTestHarness.class); private final IntegrationLauncher integrationLauncher; private final AirbyteStreamFactory streamFactory; private Process process; - public DefaultGetSpecWorker(final IntegrationLauncher integrationLauncher, - final AirbyteStreamFactory streamFactory) { + public DefaultGetSpecTestHarness(final IntegrationLauncher integrationLauncher, + final AirbyteStreamFactory streamFactory) { this.integrationLauncher = integrationLauncher; this.streamFactory = streamFactory; } - public DefaultGetSpecWorker(final IntegrationLauncher integrationLauncher) { + public DefaultGetSpecTestHarness(final IntegrationLauncher integrationLauncher) { this(integrationLauncher, new DefaultAirbyteStreamFactory()); } @Override - public ConnectorJobOutput run(final JobGetSpecConfig config, final Path jobRoot) throws WorkerException { + public ConnectorJobOutput run(final JobGetSpecConfig config, final Path jobRoot) throws TestHarnessException { try { process = integrationLauncher.spec(jobRoot); final ConnectorJobOutput jobOutput = new ConnectorJobOutput().withOutputType(OutputType.SPEC); LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - final Map> messagesByType = WorkerUtils.getMessagesByType(process, streamFactory, 30); + final Map> messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30); final Optional spec = messagesByType .getOrDefault(Type.SPEC, new ArrayList<>()).stream() .map(AirbyteMessage::getSpec) .findFirst(); - final Optional failureReason = WorkerUtils.getJobFailureReasonFromMessages(OutputType.SPEC, messagesByType); + final Optional failureReason = TestHarnessUtils.getJobFailureReasonFromMessages(OutputType.SPEC, messagesByType); failureReason.ifPresent(jobOutput::setFailureReason); final int exitCode = process.exitValue(); @@ -69,19 +69,19 @@ public ConnectorJobOutput run(final JobGetSpecConfig config, final Path jobRoot) if (spec.isPresent()) { jobOutput.setSpec(spec.get()); } else if (failureReason.isEmpty()) { - WorkerUtils.throwWorkerException("Integration failed to output a spec struct and did not output a failure reason", process); + TestHarnessUtils.throwWorkerException("Integration failed to output a spec struct and did not output a failure reason", process); } return jobOutput; } catch (final Exception e) { - throw new WorkerException(String.format("Error while getting spec from image %s", config.getDockerImage()), e); + throw new TestHarnessException(String.format("Error while getting spec from image %s", config.getDockerImage()), e); } } @Override public void cancel() { - WorkerUtils.cancelProcess(process); + TestHarnessUtils.cancelProcess(process); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DiscoverCatalogWorker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DiscoverCatalogTestHarness.java similarity index 56% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DiscoverCatalogWorker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DiscoverCatalogTestHarness.java index cf428ff747578..06ee749747cca 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DiscoverCatalogWorker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/DiscoverCatalogTestHarness.java @@ -6,6 +6,6 @@ import io.airbyte.config.ConnectorJobOutput; import io.airbyte.config.StandardDiscoverCatalogInput; -import io.airbyte.workers.Worker; +import io.airbyte.workers.TestHarness; -public interface DiscoverCatalogWorker extends Worker {} +public interface DiscoverCatalogTestHarness extends TestHarness {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/GetSpecWorker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/GetSpecTestHarness.java similarity index 58% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/general/GetSpecWorker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/GetSpecTestHarness.java index 7bdd790ed6eb7..ad8d8653cad31 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/GetSpecWorker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/general/GetSpecTestHarness.java @@ -6,6 +6,6 @@ import io.airbyte.config.ConnectorJobOutput; import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.workers.Worker; +import io.airbyte.workers.TestHarness; -public interface GetSpecWorker extends Worker {} +public interface GetSpecTestHarness extends TestHarness {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/CatalogClientConverters.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/CatalogClientConverters.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/CatalogClientConverters.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/CatalogClientConverters.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ConnectorConfigUpdater.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/ConnectorConfigUpdater.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ConnectorConfigUpdater.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/ConnectorConfigUpdater.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/EntrypointEnvChecker.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/EntrypointEnvChecker.java similarity index 94% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/EntrypointEnvChecker.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/EntrypointEnvChecker.java index 1bb39a76dbdbf..8baed9aa4945d 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/EntrypointEnvChecker.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/EntrypointEnvChecker.java @@ -4,7 +4,7 @@ package io.airbyte.workers.helper; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.process.ProcessFactory; import java.io.BufferedReader; import java.io.IOException; @@ -33,7 +33,7 @@ public static String getEntrypointEnvVariable(final ProcessFactory processFactor final int jobAttempt, final Path jobRoot, final String imageName) - throws IOException, InterruptedException, WorkerException { + throws IOException, InterruptedException, TestHarnessException { final Process process = processFactory.create( "entrypoint-checker", jobId, diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/FailureHelper.java similarity index 59% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/FailureHelper.java index cdd1946e8ad79..9bc8bd6dc096a 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/helper/FailureHelper.java @@ -5,7 +5,6 @@ package io.airbyte.workers.helper; import com.fasterxml.jackson.annotation.JsonValue; -import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.FailureReason; import io.airbyte.config.FailureReason.FailureOrigin; import io.airbyte.config.FailureReason.FailureType; @@ -45,12 +44,6 @@ public String toString() { } - private static final String WORKFLOW_TYPE_SYNC = "SyncWorkflow"; - private static final String ACTIVITY_TYPE_REPLICATE = "Replicate"; - private static final String ACTIVITY_TYPE_PERSIST = "Persist"; - private static final String ACTIVITY_TYPE_NORMALIZE = "Normalize"; - private static final String ACTIVITY_TYPE_DBT_RUN = "Run"; - public static FailureReason genericFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { return new FailureReason() .withInternalMessage(t.getMessage()) @@ -141,99 +134,12 @@ public static FailureReason checkFailure(final Throwable t, .format("Checking %s connection failed - please review this connection's configuration to prevent future syncs from failing", origin)); } - public static FailureReason replicationFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.REPLICATION) - .withExternalMessage("Something went wrong during replication"); - } - - public static FailureReason persistenceFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.PERSISTENCE) - .withExternalMessage("Something went wrong during state persistence"); - } - - public static FailureReason normalizationFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.NORMALIZATION) - .withExternalMessage("Something went wrong during normalization"); - } - - public static FailureReason normalizationFailure(final AirbyteTraceMessage m, final Long jobId, final Integer attemptNumber) { - return genericFailure(m, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.NORMALIZATION) - .withExternalMessage(m.getError().getMessage()); - } - - public static FailureReason dbtFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.DBT) - .withExternalMessage("Something went wrong during dbt"); - } - public static FailureReason unknownOriginFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { return genericFailure(t, jobId, attemptNumber) .withFailureOrigin(FailureOrigin.UNKNOWN) .withExternalMessage("An unknown failure occurred"); } - public static AttemptFailureSummary failureSummary(final Set failures, final Boolean partialSuccess) { - return new AttemptFailureSummary() - .withFailures(orderedFailures(failures)) - .withPartialSuccess(partialSuccess); - } - - public static AttemptFailureSummary failureSummaryForCancellation(final Long jobId, - final Integer attemptNumber, - final Set failures, - final Boolean partialSuccess) { - failures.add(new FailureReason() - .withFailureType(FailureType.MANUAL_CANCELLATION) - .withInternalMessage("Setting attempt to FAILED because the job was cancelled") - .withExternalMessage("This attempt was cancelled") - .withTimestamp(System.currentTimeMillis()) - .withMetadata(jobAndAttemptMetadata(jobId, attemptNumber))); - - return failureSummary(failures, partialSuccess); - } - - public static AttemptFailureSummary failureSummaryForTemporalCleaningJobState(final Long jobId, final Integer attemptNumber) { - final FailureReason failureReason = new FailureReason() - .withFailureOrigin(FailureOrigin.AIRBYTE_PLATFORM) - .withFailureType(FailureType.SYSTEM_ERROR) - .withInternalMessage( - "Setting attempt to FAILED because the temporal workflow for this connection was restarted, and existing job state was cleaned.") - .withExternalMessage("An internal Airbyte error has occurred. This sync will need to be retried.") - .withTimestamp(System.currentTimeMillis()) - .withMetadata(jobAndAttemptMetadata(jobId, attemptNumber)); - return new AttemptFailureSummary().withFailures(List.of(failureReason)); - } - - public static FailureReason failureReasonFromWorkflowAndActivity( - final String workflowType, - final String activityType, - final Throwable t, - final Long jobId, - final Integer attemptNumber) { - if (WORKFLOW_TYPE_SYNC.equals(workflowType) && ACTIVITY_TYPE_REPLICATE.equals(activityType)) { - return replicationFailure(t, jobId, attemptNumber); - } else if (WORKFLOW_TYPE_SYNC.equals(workflowType) && ACTIVITY_TYPE_PERSIST.equals(activityType)) { - return persistenceFailure(t, jobId, attemptNumber); - } else if (WORKFLOW_TYPE_SYNC.equals(workflowType) && ACTIVITY_TYPE_NORMALIZE.equals(activityType)) { - return normalizationFailure(t, jobId, attemptNumber); - } else if (WORKFLOW_TYPE_SYNC.equals(workflowType) && ACTIVITY_TYPE_DBT_RUN.equals(activityType)) { - return dbtFailure(t, jobId, attemptNumber); - } else { - return unknownOriginFailure(t, jobId, attemptNumber); - } - } - - public static FailureReason platformFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.AIRBYTE_PLATFORM) - .withExternalMessage("Something went wrong within the airbyte platform"); - } - private static Metadata jobAndAttemptMetadata(final Long jobId, final Integer attemptNumber) { return new Metadata() .withAdditionalProperty(JOB_ID_METADATA_KEY, jobId) diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteDestination.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteDestination.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteDestination.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteDestination.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteSource.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteSource.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteSource.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteSource.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteStreamFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteStreamFactory.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteStreamFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/AirbyteStreamFactory.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java similarity index 94% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java index 4f1ee56bbd7ce..1310d2de8ddfa 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java @@ -17,9 +17,9 @@ import io.airbyte.config.WorkerDestinationConfig; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.workers.TestHarnessUtils; import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.process.IntegrationLauncher; import java.io.BufferedWriter; import java.io.IOException; @@ -74,7 +74,7 @@ public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher, } @Override - public void start(final WorkerDestinationConfig destinationConfig, final Path jobRoot) throws IOException, WorkerException { + public void start(final WorkerDestinationConfig destinationConfig, final Path jobRoot) throws IOException, TestHarnessException { Preconditions.checkState(destinationProcess == null); LOGGER.info("Running destination..."); @@ -123,11 +123,11 @@ public void close() throws Exception { } LOGGER.debug("Closing destination process"); - WorkerUtils.gentleClose(destinationProcess, 1, TimeUnit.MINUTES); + TestHarnessUtils.gentleClose(destinationProcess, 1, TimeUnit.MINUTES); if (destinationProcess.isAlive() || !IGNORED_EXIT_CODES.contains(getExitValue())) { final String message = destinationProcess.isAlive() ? "Destination has not terminated " : "Destination process exit with code " + getExitValue(); - throw new WorkerException(message + ". This warning is normal if the job was cancelled."); + throw new TestHarnessException(message + ". This warning is normal if the job was cancelled."); } } @@ -139,7 +139,7 @@ public void cancel() throws Exception { LOGGER.info("Destination process no longer exists, cancellation is a no-op."); } else { LOGGER.info("Destination process exists, cancelling..."); - WorkerUtils.cancelProcess(destinationProcess); + TestHarnessUtils.cancelProcess(destinationProcess); LOGGER.info("Cancelled destination process!"); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java similarity index 95% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java index bf04a2229b837..230ecf25c1129 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java @@ -18,9 +18,9 @@ import io.airbyte.config.WorkerSourceConfig; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.workers.TestHarnessUtils; import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.process.IntegrationLauncher; import java.nio.file.Path; import java.time.Duration; @@ -79,7 +79,7 @@ public DefaultAirbyteSource(final IntegrationLauncher integrationLauncher, this.streamFactory = streamFactory; this.protocolSerializer = protocolSerializer; this.heartbeatMonitor = heartbeatMonitor; - this.featureFlagLogConnectorMsgs = featureFlags.logConnectorMessages(); + featureFlagLogConnectorMsgs = featureFlags.logConnectorMessages(); } @Override @@ -144,14 +144,14 @@ public void close() throws Exception { } LOGGER.debug("Closing source process"); - WorkerUtils.gentleClose( + TestHarnessUtils.gentleClose( sourceProcess, GRACEFUL_SHUTDOWN_DURATION.toMillis(), TimeUnit.MILLISECONDS); if (sourceProcess.isAlive() || !IGNORED_EXIT_CODES.contains(getExitValue())) { final String message = sourceProcess.isAlive() ? "Source has not terminated " : "Source process exit with code " + getExitValue(); - throw new WorkerException(message + ". This warning is normal if the job was cancelled."); + throw new TestHarnessException(message + ". This warning is normal if the job was cancelled."); } } @@ -163,7 +163,7 @@ public void cancel() throws Exception { LOGGER.info("Source process no longer exists, cancellation is a no-op."); } else { LOGGER.info("Source process exists, cancelling..."); - WorkerUtils.cancelProcess(sourceProcess); + TestHarnessUtils.cancelProcess(sourceProcess); LOGGER.info("Cancelled source process!"); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriter.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriter.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriter.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriter.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriterFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriterFactory.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriterFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/internal/VersionedAirbyteMessageBufferedWriterFactory.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java similarity index 93% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java index 79bb870c493d9..657927f74d361 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java @@ -26,9 +26,9 @@ import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.workers.TestHarnessUtils; import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.process.ProcessFactory; import java.io.InputStream; import java.nio.file.Path; @@ -61,7 +61,7 @@ public DefaultNormalizationRunner(final ProcessFactory processFactory, final String normalizationImage, final String normalizationIntegrationType) { this.processFactory = processFactory; - this.normalizationImageName = normalizationImage; + normalizationImageName = normalizationImage; this.normalizationIntegrationType = normalizationIntegrationType; } @@ -77,7 +77,7 @@ public boolean configureDbt(final String jobId, WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config)); final String gitRepoUrl = dbtConfig.getGitRepoUrl(); if (Strings.isNullOrEmpty(gitRepoUrl)) { - throw new WorkerException("Git Repo Url is required"); + throw new TestHarnessException("Git Repo Url is required"); } final String gitRepoBranch = dbtConfig.getGitRepoBranch(); if (Strings.isNullOrEmpty(gitRepoBranch)) { @@ -167,13 +167,13 @@ private boolean runProcess(final String jobId, } LineGobbler.gobble(process.getErrorStream(), LOGGER::error, CONTAINER_LOG_MDC_BUILDER); - WorkerUtils.wait(process); + TestHarnessUtils.wait(process); return process.exitValue() == 0; } catch (final Exception e) { // make sure we kill the process on failure to avoid zombies. if (process != null) { - WorkerUtils.cancelProcess(process); + TestHarnessUtils.cancelProcess(process); } throw e; } @@ -186,7 +186,7 @@ public void close() throws Exception { } LOGGER.info("Terminating normalization process..."); - WorkerUtils.gentleClose(process, 1, TimeUnit.MINUTES); + TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES); /* * After attempting to close the process check the following: @@ -194,9 +194,9 @@ public void close() throws Exception { * Did the process actually terminate? If "yes", did it do so nominally? */ if (process.isAlive()) { - throw new WorkerException("Normalization process did not terminate after 1 minute."); + throw new TestHarnessException("Normalization process did not terminate after 1 minute."); } else if (process.exitValue() != 0) { - throw new WorkerException("Normalization process did not terminate normally (exit code: " + process.exitValue() + ")"); + throw new TestHarnessException("Normalization process did not terminate normally (exit code: " + process.exitValue() + ")"); } else { LOGGER.info("Normalization process successfully terminated."); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationRunner.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/normalization/NormalizationRunner.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationRunner.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/normalization/NormalizationRunner.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java similarity index 87% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java index e2ae3a1604019..67d9392296b4c 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java @@ -20,11 +20,8 @@ import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.AllowedHosts; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.WorkerEnvConstants; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; @@ -69,7 +66,7 @@ public AirbyteIntegrationLauncher(final String jobId, } @Override - public Process spec(final Path jobRoot) throws WorkerException { + public Process spec(final Path jobRoot) throws TestHarnessException { return processFactory.create( SPEC_JOB, jobId, @@ -89,7 +86,7 @@ public Process spec(final Path jobRoot) throws WorkerException { } @Override - public Process check(final Path jobRoot, final String configFilename, final String configContents) throws WorkerException { + public Process check(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException { return processFactory.create( CHECK_JOB, jobId, @@ -110,7 +107,7 @@ public Process check(final Path jobRoot, final String configFilename, final Stri } @Override - public Process discover(final Path jobRoot, final String configFilename, final String configContents) throws WorkerException { + public Process discover(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException { return processFactory.create( DISCOVER_JOB, jobId, @@ -138,7 +135,7 @@ public Process read(final Path jobRoot, final String catalogContents, final String stateFilename, final String stateContents) - throws WorkerException { + throws TestHarnessException { final List arguments = Lists.newArrayList( "read", CONFIG, configFilename, @@ -180,7 +177,7 @@ public Process write(final Path jobRoot, final String configContents, final String catalogFilename, final String catalogContents) - throws WorkerException { + throws TestHarnessException { final Map files = ImmutableMap.of( configFilename, configContents, catalogFilename, catalogContents); @@ -206,24 +203,19 @@ public Process write(final Path jobRoot, } private Map getWorkerMetadata() { - final Configs configs = new EnvConfigs(); // We've managed to exceed the maximum number of parameters for Map.of(), so use a builder + convert // back to hashmap return Maps.newHashMap( ImmutableMap.builder() - .put(WorkerEnvConstants.WORKER_CONNECTOR_IMAGE, imageName) - .put(WorkerEnvConstants.WORKER_JOB_ID, jobId) - .put(WorkerEnvConstants.WORKER_JOB_ATTEMPT, String.valueOf(attempt)) + .put("WORKER_CONNECTOR_IMAGE", imageName) + .put("WORKER_JOB_ID", jobId) + .put("WORKER_JOB_ATTEMPT", String.valueOf(attempt)) .put(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, String.valueOf(featureFlags.useStreamCapableState())) .put(EnvVariableFeatureFlags.AUTO_DETECT_SCHEMA, String.valueOf(featureFlags.autoDetectSchema())) .put(EnvVariableFeatureFlags.APPLY_FIELD_SELECTION, String.valueOf(featureFlags.applyFieldSelection())) .put(EnvVariableFeatureFlags.FIELD_SELECTION_WORKSPACES, featureFlags.fieldSelectionWorkspaces()) .put(EnvVariableFeatureFlags.STRICT_COMPARISON_NORMALIZATION_WORKSPACES, featureFlags.strictComparisonNormalizationWorkspaces()) .put(EnvVariableFeatureFlags.STRICT_COMPARISON_NORMALIZATION_TAG, featureFlags.strictComparisonNormalizationTag()) - .put(EnvConfigs.SOCAT_KUBE_CPU_LIMIT, configs.getSocatSidecarKubeCpuLimit()) - .put(EnvConfigs.SOCAT_KUBE_CPU_REQUEST, configs.getSocatSidecarKubeCpuRequest()) - .put(EnvConfigs.LAUNCHDARKLY_KEY, configs.getLaunchDarklyKey()) - .put(EnvConfigs.FEATURE_FLAG_CLIENT, configs.getFeatureFlagClient()) .build()); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java similarity index 90% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java index 0810bbc98d951..d8d7567e46853 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java @@ -14,9 +14,8 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.config.AllowedHosts; import io.airbyte.config.ResourceRequirements; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.TestHarnessUtils; +import io.airbyte.workers.exception.TestHarnessException; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -40,10 +39,10 @@ public class DockerProcessFactory implements ProcessFactory { private static final String IMAGE_EXISTS_SCRIPT = "image_exists.sh"; private final String workspaceMountSource; - private final WorkerConfigs workerConfigs; private final Path workspaceRoot; private final String localMountSource; private final String networkName; + private final Map envMap; private final Path imageExistsScriptPath; /** @@ -53,18 +52,19 @@ public class DockerProcessFactory implements ProcessFactory { * @param workspaceMountSource workspace volume * @param localMountSource local volume * @param networkName docker network + * @param envMap */ - public DockerProcessFactory(final WorkerConfigs workerConfigs, - final Path workspaceRoot, + public DockerProcessFactory(final Path workspaceRoot, final String workspaceMountSource, final String localMountSource, - final String networkName) { - this.workerConfigs = workerConfigs; + final String networkName, + final Map envMap) { this.workspaceRoot = workspaceRoot; this.workspaceMountSource = workspaceMountSource; this.localMountSource = localMountSource; this.networkName = networkName; - this.imageExistsScriptPath = prepareImageExistsScript(); + this.envMap = envMap; + imageExistsScriptPath = prepareImageExistsScript(); } private static Path prepareImageExistsScript() { @@ -97,10 +97,10 @@ public Process create(final String jobType, final Map jobMetadata, final Map internalToExternalPorts, final String... args) - throws WorkerException { + throws TestHarnessException { try { if (!checkImageExists(imageName)) { - throw new WorkerException("Could not find image: " + imageName); + throw new TestHarnessException("Could not find image: " + imageName); } if (!jobRoot.toFile().exists()) { @@ -142,7 +142,7 @@ public Process create(final String jobType, cmd.add(String.format("%s:%s", localMountSource, LOCAL_MOUNT_DESTINATION)); } - final Map allEnvMap = MoreMaps.merge(jobMetadata, workerConfigs.getEnvMap()); + final Map allEnvMap = MoreMaps.merge(jobMetadata, envMap); for (final Map.Entry envEntry : allEnvMap.entrySet()) { cmd.add("-e"); cmd.add(envEntry.getKey() + "=" + envEntry.getValue()); @@ -171,7 +171,7 @@ public Process create(final String jobType, return new ProcessBuilder(cmd).start(); } catch (final IOException e) { - throw new WorkerException(e.getMessage(), e); + throw new TestHarnessException(e.getMessage(), e); } } @@ -206,16 +206,16 @@ private Path rebasePath(final Path jobRoot) { } @VisibleForTesting - boolean checkImageExists(final String imageName) throws WorkerException { + boolean checkImageExists(final String imageName) throws TestHarnessException { try { final Process process = new ProcessBuilder(imageExistsScriptPath.toString(), imageName).start(); LineGobbler.gobble(process.getErrorStream(), LOGGER::error); LineGobbler.gobble(process.getInputStream(), LOGGER::info); - WorkerUtils.gentleClose(process, 10, TimeUnit.MINUTES); + TestHarnessUtils.gentleClose(process, 10, TimeUnit.MINUTES); if (process.isAlive()) { - throw new WorkerException("Process to check if image exists is stuck. Exiting."); + throw new TestHarnessException("Process to check if image exists is stuck. Exiting."); } else { return process.exitValue() == 0; } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/IntegrationLauncher.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/IntegrationLauncher.java similarity index 82% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/process/IntegrationLauncher.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/IntegrationLauncher.java index 569c798904166..1bf9518a27db4 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/IntegrationLauncher.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/IntegrationLauncher.java @@ -4,7 +4,7 @@ package io.airbyte.workers.process; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import java.nio.file.Path; /** @@ -17,11 +17,11 @@ */ public interface IntegrationLauncher { - Process spec(final Path jobRoot) throws WorkerException; + Process spec(final Path jobRoot) throws TestHarnessException; - Process check(final Path jobRoot, final String configFilename, final String configContents) throws WorkerException; + Process check(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException; - Process discover(final Path jobRoot, final String configFilename, final String configContents) throws WorkerException; + Process discover(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException; Process read(final Path jobRoot, final String configFilename, @@ -30,14 +30,14 @@ Process read(final Path jobRoot, final String catalogContents, final String stateFilename, final String stateContents) - throws WorkerException; + throws TestHarnessException; default Process read(final Path jobRoot, final String configFilename, final String configContents, final String catalogFilename, final String catalogContents) - throws WorkerException { + throws TestHarnessException { return read(jobRoot, configFilename, configContents, catalogFilename, catalogContents, null, null); } @@ -46,6 +46,6 @@ Process write(final Path jobRoot, final String configContents, final String catalogFilename, final String catalogContents) - throws WorkerException; + throws TestHarnessException; } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/Metadata.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/Metadata.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/process/Metadata.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/Metadata.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/ProcessFactory.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/ProcessFactory.java similarity index 97% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/process/ProcessFactory.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/ProcessFactory.java index 98ff3f7b9a6b9..11b2111f23ce4 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/ProcessFactory.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/process/ProcessFactory.java @@ -6,7 +6,7 @@ import io.airbyte.config.AllowedHosts; import io.airbyte.config.ResourceRequirements; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import java.nio.file.Path; import java.util.Map; import java.util.regex.Matcher; @@ -38,7 +38,7 @@ public interface ProcessFactory { * variables. * @param args Arguments to pass to the docker image being run in the new process. * @return ProcessBuilder object to run the process. - * @throws WorkerException + * @throws TestHarnessException */ Process create(String jobType, String jobId, @@ -55,7 +55,7 @@ Process create(String jobType, final Map jobMetadata, final Map portMapping, final String... args) - throws WorkerException; + throws TestHarnessException; /** * Docker image names are by convention separated by slashes. The last portion is the image's name. diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java similarity index 100% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java similarity index 85% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java index af94e1a643a7c..edb31383da3d5 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java @@ -12,8 +12,6 @@ import io.airbyte.config.OperatorNormalization; import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.Status; import io.airbyte.config.StandardSyncInput; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; @@ -36,17 +34,16 @@ public class TestConfigHelpers { private static final String FIELD_NAME = "favorite_color"; private static final long LAST_SYNC_TIME = 1598565106; - public static ImmutablePair createSyncConfig() { + public static ImmutablePair createSyncConfig() { return createSyncConfig(false); } - public static ImmutablePair createSyncConfig(final Boolean multipleNamespaces) { + public static ImmutablePair createSyncConfig(final Boolean multipleNamespaces) { final UUID workspaceId = UUID.randomUUID(); final UUID sourceDefinitionId = UUID.randomUUID(); final UUID sourceId = UUID.randomUUID(); final UUID destinationDefinitionId = UUID.randomUUID(); final UUID destinationId = UUID.randomUUID(); - final UUID connectionId = UUID.randomUUID(); final UUID normalizationOperationId = UUID.randomUUID(); final UUID dbtOperationId = UUID.randomUUID(); @@ -110,34 +107,23 @@ public static ImmutablePair createSyncConfig(fi catalog.withStreams(Collections.singletonList(stream)); } - final StandardSync standardSync = new StandardSync() - .withConnectionId(connectionId) - .withDestinationId(destinationId) - .withSourceId(sourceId) - .withStatus(Status.ACTIVE) - .withName(CONNECTION_NAME) - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withPrefix(CONNECTION_NAME) - .withCatalog(catalog) - .withOperationIds(List.of(normalizationOperationId, dbtOperationId)); - final String stateValue = Jsons.serialize(Map.of("lastSync", String.valueOf(LAST_SYNC_TIME))); final State state = new State().withState(Jsons.jsonNode(stateValue)); final StandardSyncInput syncInput = new StandardSyncInput() - .withNamespaceDefinition(standardSync.getNamespaceDefinition()) - .withPrefix(standardSync.getPrefix()) + .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) + .withPrefix(CONNECTION_NAME) .withSourceId(sourceId) .withDestinationId(destinationId) .withDestinationConfiguration(destinationConnectionConfig.getConfiguration()) - .withCatalog(standardSync.getCatalog()) + .withCatalog(catalog) .withSourceConfiguration(sourceConnectionConfig.getConfiguration()) .withState(state) .withOperationSequence(List.of(normalizationOperation, customDbtOperation)) .withWorkspaceId(workspaceId); - return new ImmutablePair<>(standardSync, syncInput); + return new ImmutablePair<>(null, syncInput); } } diff --git a/airbyte-commons-worker/src/main/resources/dbt_transformation_entrypoint.sh b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/dbt_transformation_entrypoint.sh similarity index 100% rename from airbyte-commons-worker/src/main/resources/dbt_transformation_entrypoint.sh rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/dbt_transformation_entrypoint.sh diff --git a/airbyte-commons-worker/src/main/resources/entrypoints/sync/check.sh b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/entrypoints/sync/check.sh similarity index 100% rename from airbyte-commons-worker/src/main/resources/entrypoints/sync/check.sh rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/entrypoints/sync/check.sh diff --git a/airbyte-commons-worker/src/main/resources/entrypoints/sync/init.sh b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/entrypoints/sync/init.sh similarity index 100% rename from airbyte-commons-worker/src/main/resources/entrypoints/sync/init.sh rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/entrypoints/sync/init.sh diff --git a/airbyte-commons-worker/src/main/resources/entrypoints/sync/main.sh b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/entrypoints/sync/main.sh similarity index 100% rename from airbyte-commons-worker/src/main/resources/entrypoints/sync/main.sh rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/entrypoints/sync/main.sh diff --git a/airbyte-commons-worker/src/main/resources/image_exists.sh b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/image_exists.sh similarity index 100% rename from airbyte-commons-worker/src/main/resources/image_exists.sh rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/image_exists.sh diff --git a/airbyte-commons-worker/src/main/resources/sshtunneling.sh b/airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/sshtunneling.sh similarity index 100% rename from airbyte-commons-worker/src/main/resources/sshtunneling.sh rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/main/resources/sshtunneling.sh diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerUtilsTest.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/TestHarnessUtilsTest.java similarity index 83% rename from airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerUtilsTest.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/TestHarnessUtilsTest.java index 6e440d06d4039..eedeb00433163 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerUtilsTest.java +++ b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/TestHarnessUtilsTest.java @@ -12,9 +12,6 @@ import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncInput; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.workers.internal.HeartbeatMonitor; @@ -34,7 +31,7 @@ import org.slf4j.LoggerFactory; @SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class WorkerUtilsTest { +class TestHarnessUtilsTest { private static final Logger LOGGER = LoggerFactory.getLogger(GentleCloseWithHeartbeat.class); @@ -59,7 +56,6 @@ void setup() { private void runShutdown() { gentleCloseWithHeartbeat( - new WorkerConfigs(new EnvConfigs()), process, heartbeatMonitor, SHUTDOWN_TIME_DURATION, @@ -127,17 +123,17 @@ void testProcessDies() { @Test void testMapStreamNamesToSchemasWithNullNamespace() { - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); + final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); final StandardSyncInput syncInput = syncPair.getValue(); - final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); + final Map mapOutput = TestHarnessUtils.mapStreamNamesToSchemas(syncInput); assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", null))); } @Test void testMapStreamNamesToSchemasWithMultipleNamespaces() { - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(true); + final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(true); final StandardSyncInput syncInput = syncPair.getValue(); - final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); + final Map mapOutput = TestHarnessUtils.mapStreamNamesToSchemas(syncInput); assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", "namespace"))); assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", "namespace2"))); } @@ -158,8 +154,7 @@ void testMapStreamNamesToSchemasWithMultipleNamespaces() { * @param forcedShutdownDuration - amount of time to wait if a process needs to be destroyed * forcibly. */ - static void gentleCloseWithHeartbeat(final WorkerConfigs workerConfigs, - final Process process, + static void gentleCloseWithHeartbeat(final Process process, final HeartbeatMonitor heartbeatMonitor, final Duration gracefulShutdownDuration, final Duration checkHeartbeatDuration, @@ -167,10 +162,6 @@ static void gentleCloseWithHeartbeat(final WorkerConfigs workerConfigs, final BiConsumer forceShutdown) { while (process.isAlive() && heartbeatMonitor.isBeating()) { try { - if (workerConfigs.getWorkerEnvironment().equals(Configs.WorkerEnvironment.KUBERNETES)) { - LOGGER.debug("Gently closing process {} with heartbeat..", process.info().commandLine().get()); - } - process.waitFor(checkHeartbeatDuration.toMillis(), TimeUnit.MILLISECONDS); } catch (final InterruptedException e) { LOGGER.error("Exception while waiting for process to finish", e); @@ -179,10 +170,6 @@ static void gentleCloseWithHeartbeat(final WorkerConfigs workerConfigs, if (process.isAlive()) { try { - if (workerConfigs.getWorkerEnvironment().equals(Configs.WorkerEnvironment.KUBERNETES)) { - LOGGER.debug("Gently closing process {} without heartbeat..", process.info().commandLine().get()); - } - process.waitFor(gracefulShutdownDuration.toMillis(), TimeUnit.MILLISECONDS); } catch (final InterruptedException e) { LOGGER.error("Exception during grace period for process to finish. This can happen when cancelling jobs."); @@ -191,10 +178,6 @@ static void gentleCloseWithHeartbeat(final WorkerConfigs workerConfigs, // if we were unable to exist gracefully, force shutdown... if (process.isAlive()) { - if (workerConfigs.getWorkerEnvironment().equals(Configs.WorkerEnvironment.KUBERNETES)) { - LOGGER.debug("Force shutdown process {}..", process.info().commandLine().get()); - } - forceShutdown.accept(process, forcedShutdownDuration); } } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/CatalogClientConvertersTest.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/helper/CatalogClientConvertersTest.java similarity index 100% rename from airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/CatalogClientConvertersTest.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/helper/CatalogClientConvertersTest.java diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.java similarity index 100% rename from airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.java diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java similarity index 100% rename from airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.java b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.java similarity index 100% rename from airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.java rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.java diff --git a/airbyte-commons-worker/src/test/resources/version-detection/logs-with-version.jsonl b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/resources/version-detection/logs-with-version.jsonl similarity index 100% rename from airbyte-commons-worker/src/test/resources/version-detection/logs-with-version.jsonl rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/resources/version-detection/logs-with-version.jsonl diff --git a/airbyte-commons-worker/src/test/resources/version-detection/logs-without-spec-message.jsonl b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/resources/version-detection/logs-without-spec-message.jsonl similarity index 100% rename from airbyte-commons-worker/src/test/resources/version-detection/logs-without-spec-message.jsonl rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/resources/version-detection/logs-without-spec-message.jsonl diff --git a/airbyte-commons-worker/src/test/resources/version-detection/logs-without-version.jsonl b/airbyte-connector-test-harnesses/acceptance-test-harness/src/test/resources/version-detection/logs-without-version.jsonl similarity index 100% rename from airbyte-commons-worker/src/test/resources/version-detection/logs-without-version.jsonl rename to airbyte-connector-test-harnesses/acceptance-test-harness/src/test/resources/version-detection/logs-without-version.jsonl diff --git a/airbyte-db/db-lib/Dockerfile b/airbyte-db/db-lib/Dockerfile deleted file mode 100644 index a9f42ce521ba1..0000000000000 --- a/airbyte-db/db-lib/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM postgres:13-alpine - -COPY bin/init.sql /docker-entrypoint-initdb.d/000_init.sql diff --git a/airbyte-db/db-lib/build.gradle b/airbyte-db/db-lib/build.gradle index 1386fe9050064..d6da5ce71bc74 100644 --- a/airbyte-db/db-lib/build.gradle +++ b/airbyte-db/db-lib/build.gradle @@ -49,7 +49,7 @@ dependencies { // MongoDB implementation 'org.mongodb:mongodb-driver-sync:4.3.0' - + // Teradata implementation 'com.teradata.jdbc:terajdbc4:17.20.00.12' @@ -58,53 +58,4 @@ dependencies { } -task(newConfigsMigration, dependsOn: 'classes', type: JavaExec) { - mainClass = 'io.airbyte.db.instance.development.MigrationDevCenter' - classpath = files(configurations.migrations.files) - args 'configs', 'create' -} - -task(runConfigsMigration, dependsOn: 'classes', type: JavaExec) { - mainClass = 'io.airbyte.db.instance.development.MigrationDevCenter' - classpath = files(configurations.migrations.files) - args 'configs', 'migrate' -} - -task(dumpConfigsSchema, dependsOn: 'classes', type: JavaExec) { - mainClass = 'io.airbyte.db.instance.development.MigrationDevCenter' - classpath = files(configurations.migrations.files) - args 'configs', 'dump_schema' -} - -task(newJobsMigration, dependsOn: 'classes', type: JavaExec) { - mainClass = 'io.airbyte.db.instance.development.MigrationDevCenter' - classpath = files(configurations.migrations.files) - args 'jobs', 'create' -} - -task(runJobsMigration, dependsOn: 'classes', type: JavaExec) { - mainClass = 'io.airbyte.db.instance.development.MigrationDevCenter' - classpath = files(configurations.migrations.files) - args 'jobs', 'migrate' -} - -task(dumpJobsSchema, dependsOn: 'classes', type: JavaExec) { - mainClass = 'io.airbyte.db.instance.development.MigrationDevCenter' - classpath = files(configurations.migrations.files) - args 'jobs', 'dump_schema' -} - -task copyInitSql(type: Copy) { - dependsOn copyDocker - - from('src/main/resources') { - include 'init.sql' - } - into 'build/docker/bin' -} - -tasks.named("buildDockerImage") { - dependsOn copyInitSql -} - Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-db/db-lib/gradle.properties b/airbyte-db/db-lib/gradle.properties deleted file mode 100644 index 1e1c824e5a00c..0000000000000 --- a/airbyte-db/db-lib/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=db diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQueryDatabase.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQueryDatabase.java index f9b22d89001a0..da98a9036ad60 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQueryDatabase.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQueryDatabase.java @@ -23,7 +23,6 @@ import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Streams; -import io.airbyte.config.WorkerEnvConstants; import io.airbyte.db.SqlDatabase; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -84,7 +83,7 @@ private String getUserAgentHeader(final String connectorVersion) { } private String getConnectorVersion() { - return Optional.ofNullable(System.getenv(WorkerEnvConstants.WORKER_CONNECTOR_IMAGE)) + return Optional.ofNullable(System.getenv("WORKER_CONNECTOR_IMAGE")) .orElse(EMPTY) .replace("airbyte/", EMPTY).replace(":", "/"); } diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseAvailabilityCheck.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseAvailabilityCheck.java deleted file mode 100644 index 812feaf1a2156..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseAvailabilityCheck.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check; - -import static org.jooq.impl.DSL.select; - -import io.airbyte.db.Database; -import java.util.Optional; -import java.util.function.Function; -import org.jooq.DSLContext; -import org.slf4j.Logger; - -/** - * Performs a check to verify that the configured database is available. - */ -public interface DatabaseAvailabilityCheck extends DatabaseCheck { - - /** - * The number of times to check if the database is available. TODO replace with a default value in a - * value injection annotation - */ - int NUM_POLL_TIMES = 10; - - /** - * Checks whether the configured database is available. - * - * @throws DatabaseCheckException if unable to perform the check. - */ - @Override - default void check() throws DatabaseCheckException { - var initialized = false; - var totalTime = 0; - final var sleepTime = getTimeoutMs() / NUM_POLL_TIMES; - - while (!initialized) { - getLogger().warn("Waiting for database to become available..."); - if (totalTime >= getTimeoutMs()) { - throw new DatabaseCheckException("Unable to connect to the database."); - } - - final Optional dslContext = getDslContext(); - - if (dslContext.isPresent()) { - final Database database = new Database(dslContext.get()); - initialized = isDatabaseConnected(getDatabaseName()).apply(database); - if (!initialized) { - getLogger().info("Database is not ready yet. Please wait a moment, it might still be initializing..."); - try { - Thread.sleep(sleepTime); - } catch (final InterruptedException e) { - throw new DatabaseCheckException("Unable to wait for database to be ready.", e); - } - totalTime += sleepTime; - } else { - getLogger().info("Database available."); - } - } else { - throw new DatabaseCheckException("Database configuration not present."); - } - } - } - - /** - * Generates a {@link Function} that is used to test if a connection can be made to the database by - * verifying that the {@code information_schema.tables} tables has been populated. - * - * @param databaseName The name of the database to test. - * @return A {@link Function} that can be invoked to test if the database is available. - */ - default Function isDatabaseConnected(final String databaseName) { - return database -> { - try { - getLogger().info("Testing {} database connection...", databaseName); - return database.query(ctx -> ctx.fetchExists(select().from("information_schema.tables"))); - } catch (final Exception e) { - getLogger().error("Failed to verify database connection.", e); - return false; - } - }; - } - - /** - * Retrieves the configured database name to be tested. - * - * @return The name of the database to test. - */ - String getDatabaseName(); - - /** - * Retrieves the configured {@link DSLContext} to be used to test the database availability. - * - * @return The configured {@link DSLContext} object. - */ - Optional getDslContext(); - - /** - * Retrieves the configured {@link Logger} object to be used to record progress of the migration - * check. - * - * @return The configured {@link Logger} object. - */ - Logger getLogger(); - - /** - * Retrieves the timeout in milliseconds for the check. Once this timeout is exceeded, the check - * will fail with an {@link InterruptedException}. - * - * @return The timeout in milliseconds for the check. - */ - long getTimeoutMs(); - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseCheck.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseCheck.java deleted file mode 100644 index b8e19b4827c5b..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseCheck.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check; - -/** - * Defines the interface for performing checks against a database. - */ -public interface DatabaseCheck { - - /** - * Checks whether the configured database is available. - * - * @throws DatabaseCheckException if unable to perform the check. - */ - void check() throws DatabaseCheckException; - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseCheckException.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseCheckException.java deleted file mode 100644 index c9daf0cf7c504..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseCheckException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check; - -/** - * Custom exception that represents a failure that occurs during an attempt to check the - * availability or migration status of a database. - */ -public class DatabaseCheckException extends Exception { - - public DatabaseCheckException(final String message) { - super(message); - } - - public DatabaseCheckException(final String message, final Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseMigrationCheck.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseMigrationCheck.java deleted file mode 100644 index bae8babcbe9d7..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/DatabaseMigrationCheck.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check; - -import java.util.Optional; -import org.flywaydb.core.Flyway; -import org.slf4j.Logger; - -/** - * Performs a check to verify that the configured database has been migrated to the appropriate - * version. - */ -public interface DatabaseMigrationCheck { - - /** - * Represents an unavailable schema migration version that ensures a re-test. - */ - String UNAVAILABLE_VERSION = "0"; - - /** - * The number of times to check if the database has been migrated to the required schema version. - * TODO replace with a default value in a value injection annotation - */ - int NUM_POLL_TIMES = 10; - - /** - * Checks whether the configured database has been migrated to the required minimum schema version. - * - * @throws DatabaseCheckException if unable to perform the check. - */ - default void check() throws DatabaseCheckException { - final var startTime = System.currentTimeMillis(); - final var sleepTime = getTimeoutMs() / NUM_POLL_TIMES; - final Optional flywayOptional = getFlyway(); - - // Verify that the database is up and reachable first - final Optional availabilityCheck = getDatabaseAvailabilityCheck(); - if (availabilityCheck.isPresent()) { - availabilityCheck.get().check(); - if (flywayOptional.isPresent()) { - final var flyway = flywayOptional.get(); - - var currDatabaseMigrationVersion = getCurrentVersion(flyway); - getLogger().info("Current database migration version {}.", currDatabaseMigrationVersion); - getLogger().info("Minimum Flyway version required {}.", getMinimumFlywayVersion()); - - while (currDatabaseMigrationVersion.compareTo(getMinimumFlywayVersion()) < 0) { - if (System.currentTimeMillis() - startTime >= getTimeoutMs()) { - throw new DatabaseCheckException("Timeout while waiting for database to fulfill minimum flyway migration version.."); - } - - try { - Thread.sleep(sleepTime); - } catch (final InterruptedException e) { - throw new DatabaseCheckException("Unable to wait for database to be migrated.", e); - } - - currDatabaseMigrationVersion = getCurrentVersion(flyway); - } - getLogger().info("Verified that database has been migrated to the required minimum version {}.", getTimeoutMs()); - } else { - throw new DatabaseCheckException("Flyway configuration not present."); - } - } else { - throw new DatabaseCheckException("Availability check not configured."); - } - } - - /** - * Retrieves the current version of the migration schema. - * - * @param flyway A {@link Flyway} that can be used to retrieve the current version. - * @return The current version of the migrated schema or {@link #UNAVAILABLE_VERSION} if the version - * cannot be discovered. - */ - default String getCurrentVersion(final Flyway flyway) { - /** - * The database may be available, but not yet migrated. If this is the case, the Flyway object will - * not be able to retrieve the current version of the schema. If that happens, return a fake version - * so that the check will fail and try again. - */ - if (flyway.info().current() != null) { - return flyway.info().current().getVersion().getVersion(); - } else { - return UNAVAILABLE_VERSION; - } - } - - /** - * Retrieves the {@link DatabaseAvailabilityCheck} used to verify that the database is running and - * available. - * - * @return The {@link DatabaseAvailabilityCheck}. - */ - Optional getDatabaseAvailabilityCheck(); - - /** - * Retrieves the configured {@link Flyway} object to be used to check the migration status of the - * database. - * - * @return The configured {@link Flyway} object. - */ - Optional getFlyway(); - - /** - * Retrieves the configured {@link Logger} object to be used to record progress of the migration - * check. - * - * @return The configured {@link Logger} object. - */ - Logger getLogger(); - - /** - * Retrieves the required minimum migration version of the schema. - * - * @return The required minimum migration version of the schema. - */ - String getMinimumFlywayVersion(); - - /** - * Retrieves the timeout in milliseconds for the check. Once this timeout is exceeded, the check - * will fail with an {@link InterruptedException}. - * - * @return The timeout in milliseconds for the check. - */ - long getTimeoutMs(); - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/ConfigsDatabaseAvailabilityCheck.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/ConfigsDatabaseAvailabilityCheck.java deleted file mode 100644 index 455697aee6173..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/ConfigsDatabaseAvailabilityCheck.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.instance.DatabaseConstants; -import java.util.Optional; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Implementation of the {@link DatabaseAvailabilityCheck} for the Configurations database. - */ -public class ConfigsDatabaseAvailabilityCheck implements DatabaseAvailabilityCheck { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConfigsDatabaseAvailabilityCheck.class); - - // TODO inject via dependency injection framework - private final DSLContext dslContext; - - // TODO inject via dependency injection framework - private final long timeoutMs; - - public ConfigsDatabaseAvailabilityCheck(final DSLContext dslContext, final long timeoutMs) { - this.dslContext = dslContext; - this.timeoutMs = timeoutMs; - } - - @Override - public String getDatabaseName() { - return DatabaseConstants.CONFIGS_DATABASE_LOGGING_NAME; - } - - @Override - public Optional getDslContext() { - return Optional.ofNullable(dslContext); - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public long getTimeoutMs() { - return timeoutMs; - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/ConfigsDatabaseMigrationCheck.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/ConfigsDatabaseMigrationCheck.java deleted file mode 100644 index 73c4f902e2a8b..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/ConfigsDatabaseMigrationCheck.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.check.DatabaseMigrationCheck; -import java.util.Optional; -import org.flywaydb.core.Flyway; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Implementation of the {@link DatabaseMigrationCheck} for the Configurations database. - */ -public class ConfigsDatabaseMigrationCheck implements DatabaseMigrationCheck { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConfigsDatabaseMigrationCheck.class); - - // TODO inject via dependency injection framework - private final ConfigsDatabaseAvailabilityCheck databaseAvailablityCheck; - - // TODO inject via dependency injection framework - private final Flyway flyway; - - // TODO inject via dependency injection framework - private final String minimumFlywayVersion; - - // TODO inject via dependency injection framework - private final long timeoutMs; - - public ConfigsDatabaseMigrationCheck(final ConfigsDatabaseAvailabilityCheck databaseAvailablityCheck, - final Flyway flyway, - final String minimumFlywayVersion, - final long timeoutMs) { - this.databaseAvailablityCheck = databaseAvailablityCheck; - this.flyway = flyway; - this.minimumFlywayVersion = minimumFlywayVersion; - this.timeoutMs = timeoutMs; - } - - @Override - public Optional getDatabaseAvailabilityCheck() { - return Optional.ofNullable(databaseAvailablityCheck); - } - - @Override - public Optional getFlyway() { - return Optional.ofNullable(flyway); - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public String getMinimumFlywayVersion() { - return minimumFlywayVersion; - } - - @Override - public long getTimeoutMs() { - return timeoutMs; - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/JobsDatabaseAvailabilityCheck.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/JobsDatabaseAvailabilityCheck.java deleted file mode 100644 index c101fb36ef500..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/JobsDatabaseAvailabilityCheck.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.instance.DatabaseConstants; -import java.util.Optional; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Implementation of the {@link DatabaseAvailabilityCheck} for the Jobs database. - */ -public class JobsDatabaseAvailabilityCheck implements DatabaseAvailabilityCheck { - - private static final Logger LOGGER = LoggerFactory.getLogger(JobsDatabaseAvailabilityCheck.class); - - // TODO inject via dependency injection framework - private final DSLContext dslContext; - - // TODO inject via dependency injection framework - private final long timeoutMs; - - public JobsDatabaseAvailabilityCheck(final DSLContext dslContext, final long timeoutMs) { - this.dslContext = dslContext; - this.timeoutMs = timeoutMs; - } - - @Override - public String getDatabaseName() { - return DatabaseConstants.JOBS_DATABASE_LOGGING_NAME; - } - - @Override - public Optional getDslContext() { - return Optional.ofNullable(dslContext); - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public long getTimeoutMs() { - return timeoutMs; - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/JobsDatabaseMigrationCheck.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/JobsDatabaseMigrationCheck.java deleted file mode 100644 index c395dfbe986da..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/check/impl/JobsDatabaseMigrationCheck.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.check.DatabaseMigrationCheck; -import java.util.Optional; -import org.flywaydb.core.Flyway; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Implementation of the {@link DatabaseMigrationCheck} for the Jobs database. - */ -public class JobsDatabaseMigrationCheck implements DatabaseMigrationCheck { - - private static final Logger LOGGER = LoggerFactory.getLogger(JobsDatabaseMigrationCheck.class); - - // TODO inject via dependency injection framework - private final JobsDatabaseAvailabilityCheck databaseAvailablityCheck; - - // TODO inject via dependency injection framework - private final Flyway flyway; - - // TODO inject via dependency injection framework - private final String minimumFlywayVersion; - - // TODO inject via dependency injection framework - private final long timeoutMs; - - public JobsDatabaseMigrationCheck(final JobsDatabaseAvailabilityCheck databaseAvailablityCheck, - final Flyway flyway, - final String minimumFlywayVersion, - final long timeoutMs) { - this.databaseAvailablityCheck = databaseAvailablityCheck; - this.flyway = flyway; - this.minimumFlywayVersion = minimumFlywayVersion; - this.timeoutMs = timeoutMs; - } - - @Override - public Optional getDatabaseAvailabilityCheck() { - return Optional.ofNullable(databaseAvailablityCheck); - } - - @Override - public Optional getFlyway() { - return Optional.ofNullable(flyway); - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public String getMinimumFlywayVersion() { - return minimumFlywayVersion; - } - - @Override - public long getTimeoutMs() { - return timeoutMs; - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DatabaseCheckFactory.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DatabaseCheckFactory.java deleted file mode 100644 index 02a6441966fa8..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DatabaseCheckFactory.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.factory; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.check.DatabaseMigrationCheck; -import io.airbyte.db.check.impl.ConfigsDatabaseAvailabilityCheck; -import io.airbyte.db.check.impl.ConfigsDatabaseMigrationCheck; -import io.airbyte.db.check.impl.JobsDatabaseAvailabilityCheck; -import io.airbyte.db.check.impl.JobsDatabaseMigrationCheck; -import io.airbyte.db.init.DatabaseInitializer; -import io.airbyte.db.init.impl.ConfigsDatabaseInitializer; -import io.airbyte.db.init.impl.JobsDatabaseInitializer; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -/** - * Temporary factory class that provides convenience methods for creating a - * {@link io.airbyte.db.check.DatabaseCheck} and {@link DatabaseInitializer} instances. This class - * will be removed once the project has been converted to leverage an application framework to - * manage the creation and injection of various check objects. - */ -public class DatabaseCheckFactory { - - /** - * Constructs a new {@link DatabaseAvailabilityCheck} that verifies the availability of the - * {@code Configurations} database. - * - * @param dslContext The {@link DSLContext} instance used to communicate with the - * {@code Configurations} database. - * @param timeoutMs The amount of time to wait for the database to become available, in - * milliseconds. - * @return A configured {@link DatabaseAvailabilityCheck} for the {@code Configurations} database. - */ - public static ConfigsDatabaseAvailabilityCheck createConfigsDatabaseAvailabilityCheck(final DSLContext dslContext, final long timeoutMs) { - return new ConfigsDatabaseAvailabilityCheck(dslContext, timeoutMs); - } - - /** - * Constructs a new {@link DatabaseAvailabilityCheck} that verifies the availability of the - * {@code Jobs} database. - * - * @param dslContext The {@link DSLContext} instance used to communicate with the {@code Jobs} - * database. - * @param timeoutMs The amount of time to wait for the database to become available, in - * milliseconds. - * @return A configured {@link DatabaseAvailabilityCheck} for the {@code Jobs} database. - */ - public static JobsDatabaseAvailabilityCheck createJobsDatabaseAvailabilityCheck(final DSLContext dslContext, final long timeoutMs) { - return new JobsDatabaseAvailabilityCheck(dslContext, timeoutMs); - } - - /** - * Constructs a new {@link DatabaseMigrationCheck} that verifies that the {@code Configurations} - * database has been migrated to the requested minimum schema version. - * - * @param dslContext The {@link DSLContext} instance used to communicate with the - * {@code Configurations} database. - * @param flyway The {@link Flyway} instance used to determine the current migration status. - * @param minimumMigrationVersion The required minimum schema version. - * @param timeoutMs Teh amount of time to wait for the migration to complete/match the requested - * minimum schema version, in milliseconds. - * @return The configured {@link DatabaseMigrationCheck} for the {@code Configurations} database. - */ - public static DatabaseMigrationCheck createConfigsDatabaseMigrationCheck(final DSLContext dslContext, - final Flyway flyway, - final String minimumMigrationVersion, - final long timeoutMs) { - return new ConfigsDatabaseMigrationCheck(createConfigsDatabaseAvailabilityCheck(dslContext, timeoutMs), - flyway, minimumMigrationVersion, timeoutMs); - } - - /** - * Constructs a new {@link DatabaseMigrationCheck} that verifies that the {@code Jobs} database has - * been migrated to the requested minimum schema version. - * - * @param dslContext The {@link DSLContext} instance used to communicate with the - * {@code Configurations} database. - * @param flyway The {@link Flyway} instance used to determine the current migration status. - * @param minimumMigrationVersion The required minimum schema version. - * @param timeoutMs Teh amount of time to wait for the migration to complete/match the requested - * minimum schema version, in milliseconds. - * @return The configured {@link DatabaseMigrationCheck} for the {@code Jobs} database. - */ - public static DatabaseMigrationCheck createJobsDatabaseMigrationCheck(final DSLContext dslContext, - final Flyway flyway, - final String minimumMigrationVersion, - final long timeoutMs) { - return new JobsDatabaseMigrationCheck(createJobsDatabaseAvailabilityCheck(dslContext, timeoutMs), flyway, minimumMigrationVersion, timeoutMs); - } - - /** - * Constructs a new {@link DatabaseInitializer} that ensures that the {@code Configurations} - * database schema has been initialized. - * - * @param dslContext The {@link DSLContext} instance used to communicate with the - * {@code Configurations} database. - * @param timeoutMs The amount of time to wait for the database to become available, in - * milliseconds. - * @param initialSchema The initial schema creation script to be executed if the database is not - * already populated. - * @return The configured {@link DatabaseInitializer} for the {@code Configurations} database. - */ - public static DatabaseInitializer createConfigsDatabaseInitializer(final DSLContext dslContext, final long timeoutMs, final String initialSchema) { - return new ConfigsDatabaseInitializer(createConfigsDatabaseAvailabilityCheck(dslContext, timeoutMs), dslContext, initialSchema); - } - - /** - * Constructs a new {@link DatabaseInitializer} that ensures that the {@code Jobs} database schema - * has been initialized. - * - * @param dslContext The {@link DSLContext} instance used to communicate with the {@code Jobs} - * database. - * @param timeoutMs The amount of time to wait for the database to become available, in - * milliseconds. - * @param initialSchema The initial schema creation script to be executed if the database is not - * already populated. - * @return The configured {@link DatabaseInitializer} for the {@code Jobs} database. - */ - public static DatabaseInitializer createJobsDatabaseInitializer(final DSLContext dslContext, final long timeoutMs, final String initialSchema) { - return new JobsDatabaseInitializer(createJobsDatabaseAvailabilityCheck(dslContext, timeoutMs), dslContext, initialSchema); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java deleted file mode 100644 index 6c30202ecc700..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.factory; - -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; - -/** - * Temporary factory class that provides convenience methods for creating a {@link Flyway} - * instances. This class will be removed once the project has been converted to leverage an - * application framework to manage the creation and injection of {@link Flyway} objects. - */ -@SuppressWarnings("PMD.AvoidUsingHardCodedIP") -public class FlywayFactory { - - static final String MIGRATION_TABLE_FORMAT = "airbyte_%s_migrations"; - - // Constants for Flyway baseline. See here for details: - // https://flywaydb.org/documentation/command/baseline - static final String BASELINE_VERSION = "0.29.0.001"; - static final String BASELINE_DESCRIPTION = "Baseline from file-based migration v1"; - static final boolean BASELINE_ON_MIGRATION = true; - - /** - * Constructs a configured {@link Flyway} instance using the provided configuration. - * - * @param dataSource The {@link DataSource} used to connect to the database. - * @param installedBy The name of the module performing the migration. - * @param dbIdentifier The name of the database to be migrated. This is used to name the table to - * hold the migration history for the database. - * @param migrationFileLocations The array of migration files to be used. - * @return The configured {@link Flyway} instance. - */ - public static Flyway create(final DataSource dataSource, - final String installedBy, - final String dbIdentifier, - final String... migrationFileLocations) { - return create(dataSource, - installedBy, - dbIdentifier, - BASELINE_VERSION, - BASELINE_DESCRIPTION, - BASELINE_ON_MIGRATION, - migrationFileLocations); - } - - /** - * Constructs a configured {@link Flyway} instance using the provided configuration. - * - * @param dataSource The {@link DataSource} used to connect to the database. - * @param installedBy The name of the module performing the migration. - * @param dbIdentifier The name of the database to be migrated. This is used to name the table to - * hold the migration history for the database. - * @param baselineVersion The version to tag an existing schema with when executing baseline. - * @param baselineDescription The description to tag an existing schema with when executing - * baseline. - * @param baselineOnMigrate Whether to automatically call baseline when migrate is executed against - * a non-empty schema with no schema history table. - * @param migrationFileLocations The array of migration files to be used. - * @return The configured {@link Flyway} instance. - */ - public static Flyway create(final DataSource dataSource, - final String installedBy, - final String dbIdentifier, - final String baselineVersion, - final String baselineDescription, - final boolean baselineOnMigrate, - final String... migrationFileLocations) { - return Flyway.configure() - .dataSource(dataSource) - .baselineVersion(baselineVersion) - .baselineDescription(baselineDescription) - .baselineOnMigrate(baselineOnMigrate) - .installedBy(installedBy) - .table(String.format(MIGRATION_TABLE_FORMAT, dbIdentifier)) - .locations(migrationFileLocations) - .load(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/DatabaseInitializationException.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/DatabaseInitializationException.java deleted file mode 100644 index 81cfa6d63b6f9..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/DatabaseInitializationException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init; - -/** - * Custom exception that represents a failure that occurs during an attempt to initialize a - * database. - */ -public class DatabaseInitializationException extends Exception { - - public DatabaseInitializationException(final String message) { - super(message); - } - - public DatabaseInitializationException(final String message, final Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/DatabaseInitializer.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/DatabaseInitializer.java deleted file mode 100644 index 028551eca95b1..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/DatabaseInitializer.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init; - -import static org.jooq.impl.DSL.select; - -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.check.DatabaseCheckException; -import java.io.IOException; -import java.util.Collection; -import java.util.Optional; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; - -/** - * Performs the initialization of the configured database if the database is available and has not - * yet been initialized. - * - * In the future, this logic could be completely removed if the schema initialization script is - * converted to a migration script. - */ -public interface DatabaseInitializer { - - /** - * Initializes the configured database by using the following steps: - * - *
      - *
    1. Verify that the database is available and accepting connections
    2. - *
    3. Verify that the database is populated with the initial schema. If not, create the initial - * schema.
    4. - *
    - * - * @throws DatabaseInitializationException if unable to verify the database availability. - */ - default void initialize() throws DatabaseInitializationException { - // Verify that the database is up and reachable first - final Optional availabilityCheck = getDatabaseAvailabilityCheck(); - if (availabilityCheck.isPresent()) { - try { - availabilityCheck.get().check(); - final Optional dslContext = getDslContext(); - if (dslContext.isPresent()) { - final Database database = new Database(dslContext.get()); - new ExceptionWrappingDatabase(database).transaction(this::initializeSchema); - } else { - throw new DatabaseInitializationException("Database configuration not present."); - } - } catch (final DatabaseCheckException | IOException e) { - throw new DatabaseInitializationException("Database availability check failed.", e); - } - } else { - throw new DatabaseInitializationException("Availability check not configured."); - } - } - - /** - * Tests whether the provided table exists in the database. - * - * @param ctx A {@link DSLContext} used to query the database. - * @param tableName The name of the table. - * @return {@code True} if the table exists or {@code false} otherwise. - */ - default boolean hasTable(final DSLContext ctx, final String tableName) { - return ctx.fetchExists(select() - .from("information_schema.tables") - .where(DSL.field("table_name").eq(tableName) - .and(DSL.field("table_schema").eq("public")))); - } - - /** - * Initializes the schema in the database represented by the provided {@link DSLContext} instance. - * - * If the initial tables already exist in the database, initialization is skipped. Otherwise, the - * script provided by the {@link #getInitialSchema()} method is executed against the database. - * - * @param ctx The {@link DSLContext} used to execute the schema initialization. - * @return {@code true} indicating that the operation ran - */ - default boolean initializeSchema(final DSLContext ctx) { - final Optional> tableNames = getTableNames(); - - if (tableNames.isPresent()) { - // Verify that all the required tables are present - if (tableNames.get().stream().allMatch(tableName -> hasTable(ctx, tableName))) { - getLogger().info("The {} database is initialized", getDatabaseName()); - } else { - getLogger().info("The {} database has not been initialized; initializing it with schema: \n{}", getDatabaseName(), - getInitialSchema()); - ctx.execute(getInitialSchema()); - getLogger().info("The {} database successfully initialized with schema: \n{}.", getDatabaseName(), getInitialSchema()); - } - return true; - } else { - getLogger().warn("Initial collection of table names is empty. Cannot perform schema check."); - return false; - } - } - - /** - * Retrieves the {@link DatabaseAvailabilityCheck} used to verify that the database is running and - * available. - * - * @return The {@link DatabaseAvailabilityCheck}. - */ - Optional getDatabaseAvailabilityCheck(); - - /** - * Retrieves the configured database name to be tested. - * - * @return The name of the database to test. - */ - String getDatabaseName(); - - /** - * Retrieves the configured {@link DSLContext} to be used to test the database availability. - * - * @return The configured {@link DSLContext} object. - */ - Optional getDslContext(); - - /** - * Retrieve the initial schema to be applied to the database if the database is not already - * populated with the expected table(s). - * - * @return The initial schema. - */ - String getInitialSchema(); - - /** - * Retrieves the configured {@link Logger} object to be used to record progress of the migration - * check. - * - * @return The configured {@link Logger} object. - */ - Logger getLogger(); - - /** - * The collection of table names that will be used to confirm database availability. - * - * @return The collection of database table names. - */ - Optional> getTableNames(); - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/impl/ConfigsDatabaseInitializer.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/impl/ConfigsDatabaseInitializer.java deleted file mode 100644 index 2c819315739eb..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/impl/ConfigsDatabaseInitializer.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init.impl; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.check.impl.ConfigsDatabaseAvailabilityCheck; -import io.airbyte.db.init.DatabaseInitializer; -import io.airbyte.db.instance.DatabaseConstants; -import java.util.Collection; -import java.util.Optional; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Implementation of the {@link DatabaseInitializer} for the Configurations database that creates - * the schema if it does not currently exist. - */ -public class ConfigsDatabaseInitializer implements DatabaseInitializer { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConfigsDatabaseInitializer.class); - - // TODO inject via dependency injection framework - private final ConfigsDatabaseAvailabilityCheck databaseAvailablityCheck; - - // TODO inject via dependency injection framework - private final DSLContext dslContext; - - // TODO inject via dependency injection framework - private final String initialSchema; - - public ConfigsDatabaseInitializer(final ConfigsDatabaseAvailabilityCheck databaseAvailablityCheck, - final DSLContext dslContext, - final String initialSchema) { - this.databaseAvailablityCheck = databaseAvailablityCheck; - this.dslContext = dslContext; - this.initialSchema = initialSchema; - } - - @Override - public Optional getDatabaseAvailabilityCheck() { - return Optional.ofNullable(databaseAvailablityCheck); - } - - @Override - public String getDatabaseName() { - return DatabaseConstants.CONFIGS_DATABASE_LOGGING_NAME; - } - - @Override - public Optional getDslContext() { - return Optional.ofNullable(dslContext); - } - - @Override - public String getInitialSchema() { - return initialSchema; - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public Optional> getTableNames() { - return Optional.of(DatabaseConstants.CONFIGS_INITIAL_EXPECTED_TABLES); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/impl/JobsDatabaseInitializer.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/impl/JobsDatabaseInitializer.java deleted file mode 100644 index 06530da0ad4d6..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/init/impl/JobsDatabaseInitializer.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init.impl; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.check.impl.JobsDatabaseAvailabilityCheck; -import io.airbyte.db.init.DatabaseInitializer; -import io.airbyte.db.instance.DatabaseConstants; -import java.util.Collection; -import java.util.Optional; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Implementation of the {@link DatabaseInitializer} for the Jobs database that creates the schema - * if it does not currently exist. - */ -public class JobsDatabaseInitializer implements DatabaseInitializer { - - private static final Logger LOGGER = LoggerFactory.getLogger(JobsDatabaseInitializer.class); - - // TODO inject via dependency injection framework - private final JobsDatabaseAvailabilityCheck databaseAvailablityCheck; - - // TODO inject via dependency injection framework - private final DSLContext dslContext; - - // TODO inject via dependency injection framework - private final String initialSchema; - - public JobsDatabaseInitializer(final JobsDatabaseAvailabilityCheck databaseAvailablityCheck, - final DSLContext dslContext, - final String initialSchema) { - this.databaseAvailablityCheck = databaseAvailablityCheck; - this.dslContext = dslContext; - this.initialSchema = initialSchema; - } - - @Override - public Optional getDatabaseAvailabilityCheck() { - return Optional.ofNullable(databaseAvailablityCheck); - } - - @Override - public String getDatabaseName() { - return DatabaseConstants.JOBS_DATABASE_LOGGING_NAME; - } - - @Override - public Optional getDslContext() { - return Optional.ofNullable(dslContext); - } - - @Override - public String getInitialSchema() { - return initialSchema; - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public Optional> getTableNames() { - return Optional.of(DatabaseConstants.JOBS_INITIAL_EXPECTED_TABLES); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/DatabaseConstants.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/DatabaseConstants.java deleted file mode 100644 index bc25da817dcb6..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/DatabaseConstants.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance; - -import io.airbyte.db.instance.jobs.JobsDatabaseSchema; -import java.util.Collections; -import java.util.Set; - -/** - * Collection of database related constants. - */ -public final class DatabaseConstants { - - /** - * Logical name of the Configurations database. - */ - public static final String CONFIGS_DATABASE_LOGGING_NAME = "airbyte configs"; - - /** - * Collection of tables expected to be present in the Configurations database after creation. - */ - public static final Set CONFIGS_INITIAL_EXPECTED_TABLES = Collections.singleton("airbyte_configs"); - - /** - * Path to the script that contains the initial schema definition for the Configurations database. - */ - public static final String CONFIGS_INITIAL_SCHEMA_PATH = "configs_database/schema.sql"; - - public static final String CONFIGS_SCHEMA_DUMP_PATH = "src/main/resources/configs_database/schema_dump.txt"; - - /** - * Logical name of the Jobs database. - */ - public static final String JOBS_DATABASE_LOGGING_NAME = "airbyte jobs"; - - /** - * Collection of tables expected to be present in the Jobs database after creation. - */ - public static final Set JOBS_INITIAL_EXPECTED_TABLES = JobsDatabaseSchema.getTableNames(); - - /** - * Path to the script that contains the initial schema definition for the Jobs database. - */ - public static final String JOBS_INITIAL_SCHEMA_PATH = "jobs_database/schema.sql"; - - public static final String JOBS_SCHEMA_DUMP_PATH = "src/main/resources/jobs_database/schema_dump.txt"; - - /** - * Default database connection timeout in milliseconds. - */ - public static final long DEFAULT_CONNECTION_TIMEOUT_MS = 30 * 1000; - - /** - * Default amount of time to wait to assert that a database has been migrated, in milliseconds. - */ - public static final long DEFAULT_ASSERT_DATABASE_TIMEOUT_MS = 2 * DEFAULT_CONNECTION_TIMEOUT_MS; - - /** - * Private constructor to prevent instantiation. - */ - private DatabaseConstants() {} - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/DatabaseMigrator.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/DatabaseMigrator.java deleted file mode 100644 index b91534fbe69d8..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/DatabaseMigrator.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance; - -import java.io.IOException; -import java.util.List; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.output.BaselineResult; -import org.flywaydb.core.api.output.MigrateResult; - -public interface DatabaseMigrator { - - /** - * Run migration. - */ - MigrateResult migrate(); - - /** - * List migration information. - */ - List list(); - - /** - * Get the latest migration information. - */ - MigrationInfo getLatestMigration(); - - /** - * Setup Flyway migration in a database and create baseline. - */ - BaselineResult createBaseline(); - - /** - * Dump the current database schema. - */ - String dumpSchema() throws IOException; - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/FlywayDatabaseMigrator.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/FlywayDatabaseMigrator.java deleted file mode 100644 index 614c45b9717b4..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/FlywayDatabaseMigrator.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; -import org.flywaydb.core.Flyway; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.MigrationInfoService; -import org.flywaydb.core.api.output.BaselineResult; -import org.flywaydb.core.api.output.MigrateResult; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class FlywayDatabaseMigrator implements DatabaseMigrator { - - private static final Logger LOGGER = LoggerFactory.getLogger(FlywayDatabaseMigrator.class); - - private final Database database; - private final Flyway flyway; - - public FlywayDatabaseMigrator(final Database database, final Flyway flyway) { - this.database = database; - this.flyway = flyway; - } - - @Override - public MigrateResult migrate() { - final MigrateResult result = flyway.migrate(); - result.warnings.forEach(LOGGER::warn); - return result; - } - - @Override - public List list() { - final MigrationInfoService result = flyway.info(); - result.getInfoResult().warnings.forEach(LOGGER::warn); - return Arrays.asList(result.all()); - } - - @Override - public MigrationInfo getLatestMigration() { - return flyway.info().current(); - } - - @Override - public BaselineResult createBaseline() { - final BaselineResult result = flyway.baseline(); - result.warnings.forEach(LOGGER::warn); - return result; - } - - @Override - public String dumpSchema() throws IOException { - return getDisclaimer() + new ExceptionWrappingDatabase(database).query(ctx -> ctx.meta().ddl().queryStream() - .map(query -> query.toString() + ";") - .filter(statement -> !statement.startsWith("create schema")) - .collect(Collectors.joining("\n"))); - } - - protected String getDisclaimer() { - return """ - // The content of the file is just to have a basic idea of the current state of the database and is not fully accurate.\040 - // It is also not used by any piece of code to generate anything.\040 - // It doesn't contain the enums created in the database and the default values might also be buggy.\s - """ + '\n'; - } - - @VisibleForTesting - public Database getDatabase() { - return database; - } - - @VisibleForTesting - public Flyway getFlyway() { - return flyway; - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java deleted file mode 100644 index 85a92456e3e99..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DataSourceFactory; -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import java.io.IOException; -import java.sql.Connection; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.jooq.impl.DSL; -import org.jooq.meta.postgres.PostgresDatabase; -import org.jooq.tools.StringUtils; -import org.jooq.tools.jdbc.JDBCUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.PostgreSQLContainer; - -/** - * Custom database for jOOQ code generation. It performs the following operations: - *
      - *
    • Run Flyway migration.
    • - *
    • Dump the database schema.
    • - *
    • Create a connection for jOOQ code generation.
    • - *
    - *

    - *

    - * Reference: https://github.com/sabomichal/jooq-meta-postgres-flyway - */ -public abstract class FlywayMigrationDatabase extends PostgresDatabase { - - private static final Logger LOGGER = LoggerFactory.getLogger(FlywayMigrationDatabase.class); - - private static final String DEFAULT_DOCKER_IMAGE = "postgres:13-alpine"; - - private Connection connection; - - private DataSource dataSource; - - private DSLContext dslContext; - - protected abstract Database getDatabase(DSLContext dslContext) throws IOException; - - protected abstract DatabaseMigrator getDatabaseMigrator(Database database, Flyway flyway); - - protected abstract String getInstalledBy(); - - protected abstract String getDbIdentifier(); - - protected abstract String[] getMigrationFileLocations(); - - protected abstract void initializeDatabase(final DSLContext dslContext) throws DatabaseInitializationException, IOException; - - @Override - protected DSLContext create0() { - return DSL.using(getInternalConnection(), SQLDialect.POSTGRES); - } - - protected Connection getInternalConnection() { - if (connection == null) { - try { - createInternalConnection(); - } catch (final Exception e) { - throw new RuntimeException("Failed to launch postgres container and run migration", e); - } - } - return connection; - } - - private void createInternalConnection() throws Exception { - String dockerImage = getProperties().getProperty("dockerImage"); - if (StringUtils.isBlank(dockerImage)) { - dockerImage = DEFAULT_DOCKER_IMAGE; - } - - final PostgreSQLContainer container = new PostgreSQLContainer<>(dockerImage) - .withDatabaseName("jooq_airbyte_configs") - .withUsername("jooq_generator") - .withPassword("jooq_generator"); - container.start(); - - dataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - - initializeDatabase(dslContext); - - final Flyway flyway = FlywayFactory.create(dataSource, getInstalledBy(), getDbIdentifier(), getMigrationFileLocations()); - final Database database = getDatabase(dslContext); - final DatabaseMigrator migrator = getDatabaseMigrator(database, flyway); - migrator.migrate(); - - connection = dataSource.getConnection(); - setConnection(connection); - } - - @Override - public void close() { - JDBCUtils.safeClose(connection); - connection = null; - dslContext.close(); - try { - DataSourceFactory.close(dataSource); - } catch (final Exception e) { - LOGGER.warn("Unable to close data source.", e); - } - super.close(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/TableSchema.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/TableSchema.java deleted file mode 100644 index 330ebae4869b3..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/TableSchema.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance; - -import com.fasterxml.jackson.databind.JsonNode; - -public interface TableSchema { - - /** - * @return table name in lower case - */ - String getTableName(); - - /** - * @return the table definition in JsonSchema - */ - JsonNode getTableDefinition(); - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenter.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenter.java deleted file mode 100644 index e28407fbc5074..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import io.airbyte.db.instance.development.MigrationDevCenter; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; - -/** - * Helper class for migration development. See README for details. - */ -public class ConfigsDatabaseMigrationDevCenter extends MigrationDevCenter { - - public ConfigsDatabaseMigrationDevCenter() { - super("configs", DatabaseConstants.CONFIGS_SCHEMA_DUMP_PATH, DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH); - } - - @Override - protected FlywayDatabaseMigrator getMigrator(final Database database, final Flyway flyway) { - return new ConfigsDatabaseMigrator(database, flyway); - } - - @Override - protected Flyway getFlyway(final DataSource dataSource) { - return FlywayFactory.create(dataSource, getClass().getSimpleName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrator.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrator.java deleted file mode 100644 index daf1e7130455f..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrator.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import io.airbyte.db.Database; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import org.flywaydb.core.Flyway; - -public class ConfigsDatabaseMigrator extends FlywayDatabaseMigrator { - - public static final String DB_IDENTIFIER = "configs"; - public static final String MIGRATION_FILE_LOCATION = "classpath:io/airbyte/db/instance/configs/migrations"; - - public ConfigsDatabaseMigrator(final Database database, final Flyway flyway) { - super(database, flyway); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTables.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTables.java deleted file mode 100644 index 3d7f97ba3de10..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTables.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public enum ConfigsDatabaseTables { - - ACTOR, - ACTOR_DEFINITION, - ACTOR_OAUTH_PARAMETER, - CONNECTION, - CONNECTION_OPERATION, - OPERATION, - STATE, - WORKSPACE; - - public String getTableName() { - return name().toLowerCase(); - } - - /** - * @return table names in lower case - */ - public static Set getTableNames() { - return Stream.of(ConfigsDatabaseTables.values()).map(ConfigsDatabaseTables::getTableName).collect(Collectors.toSet()); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTestProvider.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTestProvider.java deleted file mode 100644 index 209dc29f6ddd8..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTestProvider.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.table; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.config.ConfigSchema; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.test.TestDatabaseProvider; -import java.io.IOException; -import java.time.OffsetDateTime; -import java.util.UUID; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; -import org.jooq.JSONB; - -public class ConfigsDatabaseTestProvider implements TestDatabaseProvider { - - private final DSLContext dslContext; - private final Flyway flyway; - - public ConfigsDatabaseTestProvider(final DSLContext dslContext, final Flyway flyway) { - this.dslContext = dslContext; - this.flyway = flyway; - } - - @Override - public Database create(final boolean runMigration) throws IOException, DatabaseInitializationException { - final String initalSchema = MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH); - DatabaseCheckFactory.createConfigsDatabaseInitializer(dslContext, DatabaseConstants.DEFAULT_CONNECTION_TIMEOUT_MS, initalSchema).initialize(); - - final Database database = new Database(dslContext); - - if (runMigration) { - final DatabaseMigrator migrator = new ConfigsDatabaseMigrator(database, flyway); - migrator.createBaseline(); - migrator.migrate(); - } else { - // The configs database is considered ready only if there are some seed records. - // So we need to create at least one record here. - final OffsetDateTime timestamp = OffsetDateTime.now(); - new ExceptionWrappingDatabase(database).transaction(ctx -> ctx.insertInto(table("airbyte_configs")) - .set(field("config_id"), UUID.randomUUID().toString()) - .set(field("config_type"), ConfigSchema.STATE.name()) - .set(field("config_blob"), JSONB.valueOf("{}")) - .set(field("created_at"), timestamp) - .set(field("updated_at"), timestamp) - .execute()); - } - - return database; - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsFlywayMigrationDatabase.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsFlywayMigrationDatabase.java deleted file mode 100644 index de5788ade7e91..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/ConfigsFlywayMigrationDatabase.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Database; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.FlywayMigrationDatabase; -import java.io.IOException; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -/** - * Configs database for jOOQ code generation. - */ -public class ConfigsFlywayMigrationDatabase extends FlywayMigrationDatabase { - - @Override - protected Database getDatabase(final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - @Override - protected DatabaseMigrator getDatabaseMigrator(final Database database, final Flyway flyway) { - return new ConfigsDatabaseMigrator(database, flyway); - } - - @Override - protected String getInstalledBy() { - return ConfigsFlywayMigrationDatabase.class.getSimpleName(); - } - - @Override - protected String getDbIdentifier() { - return ConfigsDatabaseMigrator.DB_IDENTIFIER; - } - - @Override - protected String[] getMigrationFileLocations() { - return new String[] {ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION}; - } - - @Override - protected void initializeDatabase(final DSLContext dslContext) throws DatabaseInitializationException, IOException { - final String initialSchema = MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH); - DatabaseCheckFactory.createConfigsDatabaseInitializer(dslContext, DatabaseConstants.DEFAULT_CONNECTION_TIMEOUT_MS, initialSchema).initialize(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state.java deleted file mode 100644 index a54c317dea5a6..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.StandardSyncState; -import io.airbyte.config.State; -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DatabaseDriver; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.SQLDialect; -import org.jooq.Table; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Copy the latest job state for each standard sync to the config database. - */ -public class V0_30_22_001__Store_last_sync_state extends BaseJavaMigration { - - private static final String MIGRATION_NAME = "Configs db migration 0.30.22.001"; - private static final Logger LOGGER = LoggerFactory.getLogger(V0_30_22_001__Store_last_sync_state.class); - - // airbyte configs table - // (we cannot use the jooq generated code here to avoid circular dependency) - static final Table TABLE_AIRBYTE_CONFIGS = DSL.table("airbyte_configs"); - static final Field COLUMN_CONFIG_TYPE = DSL.field("config_type", SQLDataType.VARCHAR(60).nullable(false)); - static final Field COLUMN_CONFIG_ID = DSL.field("config_id", SQLDataType.VARCHAR(36).nullable(false)); - static final Field COLUMN_CONFIG_BLOB = DSL.field("config_blob", SQLDataType.JSONB.nullable(false)); - static final Field COLUMN_CREATED_AT = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - static final Field COLUMN_UPDATED_AT = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - final DSLContext ctx = DSL.using(context.getConnection()); - - final Optional jobsDatabase = getJobsDatabase(context.getConfiguration().getUser(), - context.getConfiguration().getPassword(), context.getConfiguration().getUrl()); - if (jobsDatabase.isPresent()) { - copyData(ctx, getStandardSyncStates(jobsDatabase.get()), OffsetDateTime.now()); - } - } - - @VisibleForTesting - static void copyData(final DSLContext ctx, final Set standardSyncStates, final OffsetDateTime timestamp) { - LOGGER.info("[{}] Number of connection states to copy: {}", MIGRATION_NAME, standardSyncStates.size()); - - for (final StandardSyncState standardSyncState : standardSyncStates) { - ctx.insertInto(TABLE_AIRBYTE_CONFIGS) - .set(COLUMN_CONFIG_TYPE, ConfigSchema.STANDARD_SYNC_STATE.name()) - .set(COLUMN_CONFIG_ID, standardSyncState.getConnectionId().toString()) - .set(COLUMN_CONFIG_BLOB, JSONB.valueOf(Jsons.serialize(standardSyncState))) - .set(COLUMN_CREATED_AT, timestamp) - .set(COLUMN_UPDATED_AT, timestamp) - // This migration is idempotent. If the record for a sync_id already exists, - // it means that the migration has already been run before. Abort insertion. - .onDuplicateKeyIgnore() - .execute(); - } - } - - /** - * This migration requires a connection to the job database, which may be a separate database from - * the config database. However, the job database only exists in production, not in development or - * test. We use the job database environment variables to determine how to connect to the job - * database. This approach is not 100% reliable. However, it is better than doing half of the - * migration here (creating the table), and the rest of the work during server start up (copying the - * data from the job database). - */ - @VisibleForTesting - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - static Optional getJobsDatabase(final String databaseUser, final String databasePassword, final String databaseUrl) { - try { - if (databaseUrl == null || "".equals(databaseUrl.trim())) { - throw new IllegalArgumentException("The databaseUrl cannot be empty."); - } - // If the environment variables exist, it means the migration is run in production. - // Connect to the official job database. - final DSLContext dslContext = - DSLContextFactory.create(databaseUser, databasePassword, DatabaseDriver.POSTGRESQL.getDriverClassName(), databaseUrl, SQLDialect.POSTGRES); - final Database jobsDatabase = new Database(dslContext); - LOGGER.info("[{}] Connected to jobs database: {}", MIGRATION_NAME, databaseUrl); - return Optional.of(jobsDatabase); - } catch (final IllegalArgumentException e) { - // If the environment variables do not exist, it means the migration is run in development. - LOGGER.info("[{}] This is the dev environment; there is no jobs database", MIGRATION_NAME); - return Optional.empty(); - } - } - - /** - * @return a set of StandardSyncStates from the latest attempt for each connection. - */ - @VisibleForTesting - static Set getStandardSyncStates(final Database jobsDatabase) throws SQLException { - final Table jobsTable = DSL.table("jobs"); - final Field jobId = DSL.field("jobs.id", SQLDataType.BIGINT); - final Field connectionId = DSL.field("jobs.scope", SQLDataType.VARCHAR); - - final Table attemptsTable = DSL.table("attempts"); - final Field attemptJobId = DSL.field("attempts.job_id", SQLDataType.BIGINT); - final Field attemptCreatedAt = DSL.field("attempts.created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - // output schema: JobOutput.yaml - // sync schema: StandardSyncOutput.yaml - // state schema: State.yaml, e.g. { "state": { "cursor": 1000 } } - final Field attemptState = DSL.field("attempts.output -> 'sync' -> 'state'", SQLDataType.JSONB); - - return jobsDatabase.query(ctx -> ctx - .select(connectionId, attemptState) - .distinctOn(connectionId) - .from(attemptsTable) - .innerJoin(jobsTable) - .on(jobId.eq(attemptJobId)) - .where(attemptState.isNotNull()) - // this query assumes that an attempt with larger created_at field is always a newer attempt - .orderBy(connectionId, attemptCreatedAt.desc()) - .fetch() - .stream() - .map(r -> getStandardSyncState(UUID.fromString(r.value1()), Jsons.deserialize(r.value2().data(), State.class)))) - .collect(Collectors.toSet()); - } - - @VisibleForTesting - static StandardSyncState getStandardSyncState(final UUID connectionId, final State state) { - return new StandardSyncState().withConnectionId(connectionId).withState(state); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization.java deleted file mode 100644 index 32323327903a6..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization.java +++ /dev/null @@ -1,923 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.currentOffsetDateTime; -import static org.jooq.impl.DSL.foreignKey; -import static org.jooq.impl.DSL.primaryKey; -import static org.jooq.impl.DSL.select; -import static org.jooq.impl.DSL.table; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.AirbyteConfig; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncState; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.db.jdbc.JdbcUtils; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.Catalog; -import org.jooq.DSLContext; -import org.jooq.EnumType; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.Schema; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.jooq.impl.SchemaImpl; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class V0_32_8_001__AirbyteConfigDatabaseDenormalization extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_32_8_001__AirbyteConfigDatabaseDenormalization.class); - - @Override - public void migrate(final Context context) throws Exception { - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - migrate(ctx); - } - - @VisibleForTesting - public static void migrate(final DSLContext ctx) { - createEnums(ctx); - createAndPopulateWorkspace(ctx); - createAndPopulateActorDefinition(ctx); - createAndPopulateActor(ctx); - crateAndPopulateActorOauthParameter(ctx); - createAndPopulateOperation(ctx); - createAndPopulateConnection(ctx); - createAndPopulateState(ctx); - } - - private static void createEnums(final DSLContext ctx) { - ctx.createType("source_type").asEnum("api", "file", JdbcUtils.DATABASE_KEY, "custom").execute(); - LOGGER.info("source_type enum created"); - ctx.createType("actor_type").asEnum("source", "destination").execute(); - LOGGER.info("actor_type enum created"); - ctx.createType("operator_type").asEnum("normalization", "dbt").execute(); - LOGGER.info("operator_type enum created"); - ctx.createType("namespace_definition_type").asEnum("source", "destination", "customformat").execute(); - LOGGER.info("namespace_definition_type enum created"); - ctx.createType("status_type").asEnum("active", "inactive", "deprecated").execute(); - LOGGER.info("status_type enum created"); - } - - private static void createAndPopulateWorkspace(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field slug = DSL.field("slug", SQLDataType.VARCHAR(256).nullable(false)); - final Field initialSetupComplete = DSL.field("initial_setup_complete", SQLDataType.BOOLEAN.nullable(false)); - final Field customerId = DSL.field("customer_id", SQLDataType.UUID.nullable(true)); - final Field email = DSL.field("email", SQLDataType.VARCHAR(256).nullable(true)); - final Field anonymousDataCollection = DSL.field("anonymous_data_collection", SQLDataType.BOOLEAN.nullable(true)); - final Field sendNewsletter = DSL.field("send_newsletter", SQLDataType.BOOLEAN.nullable(true)); - final Field sendSecurityUpdates = DSL.field("send_security_updates", SQLDataType.BOOLEAN.nullable(true)); - final Field displaySetupWizard = DSL.field("display_setup_wizard", SQLDataType.BOOLEAN.nullable(true)); - final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(false).defaultValue(false)); - final Field notifications = DSL.field("notifications", SQLDataType.JSONB.nullable(true)); - final Field firstSyncComplete = DSL.field("first_sync_complete", SQLDataType.BOOLEAN.nullable(true)); - final Field feedbackComplete = DSL.field("feedback_complete", SQLDataType.BOOLEAN.nullable(true)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("workspace") - .columns(id, - customerId, - name, - slug, - email, - initialSetupComplete, - anonymousDataCollection, - sendNewsletter, - sendSecurityUpdates, - displaySetupWizard, - tombstone, - notifications, - firstSyncComplete, - feedbackComplete, - createdAt, - updatedAt) - .constraints(primaryKey(id)) - .execute(); - LOGGER.info("workspace table created"); - final List> configsWithMetadata = listConfigsWithMetadata(ConfigSchema.STANDARD_WORKSPACE, - StandardWorkspace.class, - ctx); - - for (final ConfigWithMetadata configWithMetadata : configsWithMetadata) { - final StandardWorkspace standardWorkspace = configWithMetadata.getConfig(); - ctx.insertInto(DSL.table("workspace")) - .set(id, standardWorkspace.getWorkspaceId()) - .set(customerId, standardWorkspace.getCustomerId()) - .set(name, standardWorkspace.getName()) - .set(slug, standardWorkspace.getSlug()) - .set(email, standardWorkspace.getEmail()) - .set(initialSetupComplete, standardWorkspace.getInitialSetupComplete()) - .set(anonymousDataCollection, standardWorkspace.getAnonymousDataCollection()) - .set(sendNewsletter, standardWorkspace.getNews()) - .set(sendSecurityUpdates, standardWorkspace.getSecurityUpdates()) - .set(displaySetupWizard, standardWorkspace.getDisplaySetupWizard()) - .set(tombstone, standardWorkspace.getTombstone() != null && standardWorkspace.getTombstone()) - .set(notifications, JSONB.valueOf(Jsons.serialize(standardWorkspace.getNotifications()))) - .set(firstSyncComplete, standardWorkspace.getFirstCompletedSync()) - .set(feedbackComplete, standardWorkspace.getFeedbackDone()) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - } - LOGGER.info("workspace table populated with " + configsWithMetadata.size() + " records"); - } - - private static void createAndPopulateActorDefinition(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field dockerRepository = DSL.field("docker_repository", SQLDataType.VARCHAR(256).nullable(false)); - final Field dockerImageTag = DSL.field("docker_image_tag", SQLDataType.VARCHAR(256).nullable(false)); - final Field documentationUrl = DSL.field("documentation_url", SQLDataType.VARCHAR(256).nullable(true)); - final Field spec = DSL.field("spec", SQLDataType.JSONB.nullable(false)); - final Field icon = DSL.field("icon", SQLDataType.VARCHAR(256).nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field sourceType = DSL.field("source_type", SQLDataType.VARCHAR.asEnumDataType(SourceType.class).nullable(true)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("actor_definition") - .columns(id, - name, - dockerRepository, - dockerImageTag, - documentationUrl, - icon, - actorType, - sourceType, - spec, - createdAt, - updatedAt) - .constraints(primaryKey(id)) - .execute(); - - LOGGER.info("actor_definition table created"); - - final List> sourceDefinitionsWithMetadata = listConfigsWithMetadata( - ConfigSchema.STANDARD_SOURCE_DEFINITION, - StandardSourceDefinition.class, - ctx); - - for (final ConfigWithMetadata configWithMetadata : sourceDefinitionsWithMetadata) { - final StandardSourceDefinition standardSourceDefinition = configWithMetadata.getConfig(); - ctx.insertInto(DSL.table("actor_definition")) - .set(id, standardSourceDefinition.getSourceDefinitionId()) - .set(name, standardSourceDefinition.getName()) - .set(dockerRepository, standardSourceDefinition.getDockerRepository()) - .set(dockerImageTag, standardSourceDefinition.getDockerImageTag()) - .set(documentationUrl, standardSourceDefinition.getDocumentationUrl()) - .set(icon, standardSourceDefinition.getIcon()) - .set(actorType, ActorType.source) - .set(sourceType, standardSourceDefinition.getSourceType() == null ? null - : Enums.toEnum(standardSourceDefinition.getSourceType().value(), SourceType.class).orElseThrow()) - .set(spec, JSONB.valueOf(Jsons.serialize(standardSourceDefinition.getSpec()))) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - } - LOGGER.info("actor_definition table populated with " + sourceDefinitionsWithMetadata.size() + " source definition records"); - - final List> destinationDefinitionsWithMetadata = listConfigsWithMetadata( - ConfigSchema.STANDARD_DESTINATION_DEFINITION, - StandardDestinationDefinition.class, - ctx); - - for (final ConfigWithMetadata configWithMetadata : destinationDefinitionsWithMetadata) { - final StandardDestinationDefinition standardDestinationDefinition = configWithMetadata.getConfig(); - ctx.insertInto(DSL.table("actor_definition")) - .set(id, standardDestinationDefinition.getDestinationDefinitionId()) - .set(name, standardDestinationDefinition.getName()) - .set(dockerRepository, standardDestinationDefinition.getDockerRepository()) - .set(dockerImageTag, standardDestinationDefinition.getDockerImageTag()) - .set(documentationUrl, standardDestinationDefinition.getDocumentationUrl()) - .set(icon, standardDestinationDefinition.getIcon()) - .set(actorType, ActorType.destination) - .set(spec, JSONB.valueOf(Jsons.serialize(standardDestinationDefinition.getSpec()))) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - } - LOGGER.info("actor_definition table populated with " + destinationDefinitionsWithMetadata.size() + " destination definition records"); - } - - private static void createAndPopulateActor(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(false).defaultValue(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("actor") - .columns(id, - workspaceId, - actorDefinitionId, - name, - configuration, - actorType, - tombstone, - createdAt, - updatedAt) - .constraints(primaryKey(id), - foreignKey(workspaceId).references("workspace", "id").onDeleteCascade(), - foreignKey(actorDefinitionId).references("actor_definition", "id").onDeleteCascade()) - .execute(); - ctx.createIndex("actor_actor_definition_id_idx").on("actor", "actor_definition_id").execute(); - - LOGGER.info("actor table created"); - - final List> sourcesWithMetadata = listConfigsWithMetadata( - ConfigSchema.SOURCE_CONNECTION, - SourceConnection.class, - ctx); - long sourceRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : sourcesWithMetadata) { - final SourceConnection sourceConnection = configWithMetadata.getConfig(); - if (workspaceDoesNotExist(sourceConnection.getWorkspaceId(), ctx)) { - LOGGER.warn( - "Skipping source connection " + sourceConnection.getSourceId() + " because the specified workspace " + sourceConnection.getWorkspaceId() - + " doesn't exist and violates foreign key constraint."); - continue; - } else if (actorDefinitionDoesNotExist(sourceConnection.getSourceDefinitionId(), ctx)) { - LOGGER.warn( - "Skipping source connection " + sourceConnection.getSourceId() + " because the specified source definition " - + sourceConnection.getSourceDefinitionId() - + " doesn't exist and violates foreign key constraint."); - continue; - } - - ctx.insertInto(DSL.table("actor")) - .set(id, sourceConnection.getSourceId()) - .set(workspaceId, sourceConnection.getWorkspaceId()) - .set(actorDefinitionId, sourceConnection.getSourceDefinitionId()) - .set(name, sourceConnection.getName()) - .set(configuration, JSONB.valueOf(Jsons.serialize(sourceConnection.getConfiguration()))) - .set(actorType, ActorType.source) - .set(tombstone, sourceConnection.getTombstone() != null && sourceConnection.getTombstone()) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - sourceRecords++; - } - LOGGER.info("actor table populated with " + sourceRecords + " source records"); - - final List> destinationsWithMetadata = listConfigsWithMetadata( - ConfigSchema.DESTINATION_CONNECTION, - DestinationConnection.class, - ctx); - long destinationRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : destinationsWithMetadata) { - final DestinationConnection destinationConnection = configWithMetadata.getConfig(); - if (workspaceDoesNotExist(destinationConnection.getWorkspaceId(), ctx)) { - LOGGER.warn( - "Skipping destination connection " + destinationConnection.getDestinationId() + " because the specified workspace " - + destinationConnection.getWorkspaceId() - + " doesn't exist and violates foreign key constraint."); - continue; - } else if (actorDefinitionDoesNotExist(destinationConnection.getDestinationDefinitionId(), ctx)) { - LOGGER.warn( - "Skipping destination connection " + destinationConnection.getDestinationId() + " because the specified source definition " - + destinationConnection.getDestinationDefinitionId() - + " doesn't exist and violates foreign key constraint."); - continue; - } - - ctx.insertInto(DSL.table("actor")) - .set(id, destinationConnection.getDestinationId()) - .set(workspaceId, destinationConnection.getWorkspaceId()) - .set(actorDefinitionId, destinationConnection.getDestinationDefinitionId()) - .set(name, destinationConnection.getName()) - .set(configuration, JSONB.valueOf(Jsons.serialize(destinationConnection.getConfiguration()))) - .set(actorType, ActorType.destination) - .set(tombstone, destinationConnection.getTombstone() != null && destinationConnection.getTombstone()) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - destinationRecords++; - } - LOGGER.info("actor table populated with " + destinationRecords + " destination records"); - } - - @VisibleForTesting - static boolean workspaceDoesNotExist(final UUID workspaceId, final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - return !ctx.fetchExists(select() - .from(table("workspace")) - .where(id.eq(workspaceId))); - } - - @VisibleForTesting - static boolean actorDefinitionDoesNotExist(final UUID definitionId, final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - return !ctx.fetchExists(select() - .from(table("actor_definition")) - .where(id.eq(definitionId))); - } - - @VisibleForTesting - static boolean actorDoesNotExist(final UUID actorId, final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - return !ctx.fetchExists(select() - .from(table("actor")) - .where(id.eq(actorId))); - } - - @VisibleForTesting - static boolean connectionDoesNotExist(final UUID connectionId, final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - return !ctx.fetchExists(select() - .from(table("connection")) - .where(id.eq(connectionId))); - } - - @VisibleForTesting - static boolean operationDoesNotExist(final UUID operationId, final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - return !ctx.fetchExists(select() - .from(table("operation")) - .where(id.eq(operationId))); - } - - private static void crateAndPopulateActorOauthParameter(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("actor_oauth_parameter") - .columns(id, - workspaceId, - actorDefinitionId, - configuration, - actorType, - createdAt, - updatedAt) - .constraints(primaryKey(id), - foreignKey(workspaceId).references("workspace", "id").onDeleteCascade(), - foreignKey(actorDefinitionId).references("actor_definition", "id").onDeleteCascade()) - .execute(); - - LOGGER.info("actor_oauth_parameter table created"); - - final List> sourceOauthParamsWithMetadata = listConfigsWithMetadata( - ConfigSchema.SOURCE_OAUTH_PARAM, - SourceOAuthParameter.class, - ctx); - long sourceOauthParamRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : sourceOauthParamsWithMetadata) { - final SourceOAuthParameter sourceOAuthParameter = configWithMetadata.getConfig(); - if (workspaceDoesNotExist(sourceOAuthParameter.getWorkspaceId(), ctx)) { - LOGGER.warn( - "Skipping source oauth parameter " + sourceOAuthParameter.getOauthParameterId() + " because the specified workspace " - + sourceOAuthParameter.getWorkspaceId() - + " doesn't exist and violates foreign key constraint."); - continue; - } else if (actorDefinitionDoesNotExist(sourceOAuthParameter.getSourceDefinitionId(), ctx)) { - LOGGER.warn( - "Skipping source oauth parameter " + sourceOAuthParameter.getSourceDefinitionId() + " because the specified source definition " - + sourceOAuthParameter.getSourceDefinitionId() - + " doesn't exist and violates foreign key constraint."); - continue; - } - ctx.insertInto(DSL.table("actor_oauth_parameter")) - .set(id, sourceOAuthParameter.getOauthParameterId()) - .set(workspaceId, sourceOAuthParameter.getWorkspaceId()) - .set(actorDefinitionId, sourceOAuthParameter.getSourceDefinitionId()) - .set(configuration, JSONB.valueOf(Jsons.serialize(sourceOAuthParameter.getConfiguration()))) - .set(actorType, ActorType.source) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - sourceOauthParamRecords++; - } - - LOGGER.info("actor_oauth_parameter table populated with " + sourceOauthParamRecords + " source oauth params records"); - - final List> destinationOauthParamsWithMetadata = listConfigsWithMetadata( - ConfigSchema.DESTINATION_OAUTH_PARAM, - DestinationOAuthParameter.class, - ctx); - long destinationOauthParamRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : destinationOauthParamsWithMetadata) { - final DestinationOAuthParameter destinationOAuthParameter = configWithMetadata.getConfig(); - if (workspaceDoesNotExist(destinationOAuthParameter.getWorkspaceId(), ctx)) { - LOGGER.warn( - "Skipping destination oauth parameter " + destinationOAuthParameter.getOauthParameterId() + " because the specified workspace " - + destinationOAuthParameter.getWorkspaceId() - + " doesn't exist and violates foreign key constraint."); - continue; - } else if (actorDefinitionDoesNotExist(destinationOAuthParameter.getDestinationDefinitionId(), ctx)) { - LOGGER.warn( - "Skipping destination oauth parameter " + destinationOAuthParameter.getOauthParameterId() - + " because the specified destination definition " - + destinationOAuthParameter.getDestinationDefinitionId() - + " doesn't exist and violates foreign key constraint."); - continue; - } - ctx.insertInto(DSL.table("actor_oauth_parameter")) - .set(id, destinationOAuthParameter.getOauthParameterId()) - .set(workspaceId, destinationOAuthParameter.getWorkspaceId()) - .set(actorDefinitionId, destinationOAuthParameter.getDestinationDefinitionId()) - .set(configuration, JSONB.valueOf(Jsons.serialize(destinationOAuthParameter.getConfiguration()))) - .set(actorType, ActorType.destination) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - destinationOauthParamRecords++; - } - - LOGGER.info("actor_oauth_parameter table populated with " + destinationOauthParamRecords + " destination oauth params records"); - } - - private static void createAndPopulateOperation(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field operatorType = DSL.field("operator_type", SQLDataType.VARCHAR.asEnumDataType(OperatorType.class).nullable(false)); - final Field operatorNormalization = DSL.field("operator_normalization", SQLDataType.JSONB.nullable(true)); - final Field operatorDbt = DSL.field("operator_dbt", SQLDataType.JSONB.nullable(true)); - final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(false).defaultValue(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("operation") - .columns(id, - workspaceId, - name, - operatorType, - operatorNormalization, - operatorDbt, - tombstone, - createdAt, - updatedAt) - .constraints(primaryKey(id), - foreignKey(workspaceId).references("workspace", "id").onDeleteCascade()) - .execute(); - - LOGGER.info("operation table created"); - - final List> configsWithMetadata = listConfigsWithMetadata( - ConfigSchema.STANDARD_SYNC_OPERATION, - StandardSyncOperation.class, - ctx); - long standardSyncOperationRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : configsWithMetadata) { - final StandardSyncOperation standardSyncOperation = configWithMetadata.getConfig(); - if (workspaceDoesNotExist(standardSyncOperation.getWorkspaceId(), ctx)) { - LOGGER.warn( - "Skipping standard sync operation " + standardSyncOperation.getOperationId() + " because the specified workspace " - + standardSyncOperation.getWorkspaceId() - + " doesn't exist and violates foreign key constraint."); - continue; - } - ctx.insertInto(DSL.table("operation")) - .set(id, standardSyncOperation.getOperationId()) - .set(workspaceId, standardSyncOperation.getWorkspaceId()) - .set(name, standardSyncOperation.getName()) - .set(operatorType, standardSyncOperation.getOperatorType() == null ? null - : Enums.toEnum(standardSyncOperation.getOperatorType().value(), OperatorType.class).orElseThrow()) - .set(operatorNormalization, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorNormalization()))) - .set(operatorDbt, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorDbt()))) - .set(tombstone, standardSyncOperation.getTombstone() != null && standardSyncOperation.getTombstone()) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - standardSyncOperationRecords++; - } - - LOGGER.info("operation table populated with " + standardSyncOperationRecords + " records"); - } - - private static void createConnectionOperation(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); - final Field operationId = DSL.field("operation_id", SQLDataType.UUID.nullable(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("connection_operation") - .columns(id, - connectionId, - operationId, - createdAt, - updatedAt) - .constraints(primaryKey(id, connectionId, operationId), - foreignKey(connectionId).references("connection", "id").onDeleteCascade(), - foreignKey(operationId).references("operation", "id").onDeleteCascade()) - .execute(); - LOGGER.info("connection_operation table created"); - } - - private static void createAndPopulateConnection(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field namespaceDefinition = DSL - .field("namespace_definition", SQLDataType.VARCHAR.asEnumDataType(NamespaceDefinitionType.class).nullable(false)); - final Field namespaceFormat = DSL.field("namespace_format", SQLDataType.VARCHAR(256).nullable(true)); - final Field prefix = DSL.field("prefix", SQLDataType.VARCHAR(256).nullable(true)); - final Field sourceId = DSL.field("source_id", SQLDataType.UUID.nullable(false)); - final Field destinationId = DSL.field("destination_id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field catalog = DSL.field("catalog", SQLDataType.JSONB.nullable(false)); - final Field status = DSL.field("status", SQLDataType.VARCHAR.asEnumDataType(StatusType.class).nullable(true)); - final Field schedule = DSL.field("schedule", SQLDataType.JSONB.nullable(true)); - final Field manual = DSL.field("manual", SQLDataType.BOOLEAN.nullable(false)); - final Field resourceRequirements = DSL.field("resource_requirements", SQLDataType.JSONB.nullable(true)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("connection") - .columns(id, - namespaceDefinition, - namespaceFormat, - prefix, - sourceId, - destinationId, - name, - catalog, - status, - schedule, - manual, - resourceRequirements, - createdAt, - updatedAt) - .constraints(primaryKey(id), - foreignKey(sourceId).references("actor", "id").onDeleteCascade(), - foreignKey(destinationId).references("actor", "id").onDeleteCascade()) - .execute(); - ctx.createIndex("connection_source_id_idx").on("connection", "source_id").execute(); - ctx.createIndex("connection_destination_id_idx").on("connection", "destination_id").execute(); - - LOGGER.info("connection table created"); - createConnectionOperation(ctx); - - final List> configsWithMetadata = listConfigsWithMetadata( - ConfigSchema.STANDARD_SYNC, - StandardSync.class, - ctx); - long standardSyncRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : configsWithMetadata) { - final StandardSync standardSync = configWithMetadata.getConfig(); - if (actorDoesNotExist(standardSync.getSourceId(), ctx)) { - LOGGER.warn( - "Skipping standard sync " + standardSync.getConnectionId() + " because the specified source " + standardSync.getSourceId() - + " doesn't exist and violates foreign key constraint."); - continue; - } else if (actorDoesNotExist(standardSync.getDestinationId(), ctx)) { - LOGGER.warn( - "Skipping standard sync " + standardSync.getConnectionId() + " because the specified destination " + standardSync.getDestinationId() - + " doesn't exist and violates foreign key constraint."); - continue; - } - ctx.insertInto(DSL.table("connection")) - .set(id, standardSync.getConnectionId()) - .set(namespaceDefinition, standardSync.getNamespaceDefinition() == null ? null - : Enums.toEnum(standardSync.getNamespaceDefinition().value(), NamespaceDefinitionType.class).orElseThrow()) - .set(namespaceFormat, standardSync.getNamespaceFormat()) - .set(prefix, standardSync.getPrefix()) - .set(sourceId, standardSync.getSourceId()) - .set(destinationId, standardSync.getDestinationId()) - .set(name, standardSync.getName()) - .set(catalog, JSONB.valueOf(Jsons.serialize(standardSync.getCatalog()))) - .set(status, standardSync.getStatus() == null ? null : Enums.toEnum(standardSync.getStatus().value(), StatusType.class).orElseThrow()) - .set(schedule, JSONB.valueOf(Jsons.serialize(standardSync.getSchedule()))) - .set(manual, standardSync.getManual()) - .set(resourceRequirements, JSONB.valueOf(Jsons.serialize(standardSync.getResourceRequirements()))) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - standardSyncRecords++; - populateConnectionOperation(ctx, configWithMetadata); - } - - LOGGER.info("connection table populated with " + standardSyncRecords + " records"); - } - - private static void createAndPopulateState(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); - final Field state = DSL.field("state", SQLDataType.JSONB.nullable(true)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("state") - .columns(id, - connectionId, - state, - createdAt, - updatedAt) - .constraints(primaryKey(id, connectionId), - foreignKey(connectionId).references("connection", "id").onDeleteCascade()) - .execute(); - - LOGGER.info("state table created"); - - final List> configsWithMetadata = listConfigsWithMetadata( - ConfigSchema.STANDARD_SYNC_STATE, - StandardSyncState.class, - ctx); - long standardSyncStateRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : configsWithMetadata) { - final StandardSyncState standardSyncState = configWithMetadata.getConfig(); - if (connectionDoesNotExist(standardSyncState.getConnectionId(), ctx)) { - LOGGER.warn( - "Skipping standard sync state because the specified standard sync " + standardSyncState.getConnectionId() - + " doesn't exist and violates foreign key constraint."); - continue; - } - ctx.insertInto(DSL.table("state")) - .set(id, UUID.randomUUID()) - .set(connectionId, standardSyncState.getConnectionId()) - .set(state, JSONB.valueOf(Jsons.serialize(standardSyncState.getState()))) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - standardSyncStateRecords++; - } - - LOGGER.info("state table populated with " + standardSyncStateRecords + " records"); - } - - private static void populateConnectionOperation(final DSLContext ctx, - final ConfigWithMetadata standardSyncWithMetadata) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); - final Field operationId = DSL.field("operation_id", SQLDataType.UUID.nullable(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - final StandardSync standardSync = standardSyncWithMetadata.getConfig(); - - if (connectionDoesNotExist(standardSync.getConnectionId(), ctx)) { - LOGGER.warn( - "Skipping connection_operations because the specified standard sync " + standardSync.getConnectionId() - + " doesn't exist and violates foreign key constraint."); - return; - } - long connectionOperationRecords = 0L; - for (final UUID operationIdFromStandardSync : standardSync.getOperationIds()) { - if (operationDoesNotExist(operationIdFromStandardSync, ctx)) { - LOGGER.warn( - "Skipping connection_operations because the specified standard sync operation " + operationIdFromStandardSync - + " doesn't exist and violates foreign key constraint."); - continue; - } - ctx.insertInto(DSL.table("connection_operation")) - .set(id, UUID.randomUUID()) - .set(connectionId, standardSync.getConnectionId()) - .set(operationId, operationIdFromStandardSync) - .set(createdAt, OffsetDateTime.ofInstant(standardSyncWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(standardSyncWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - connectionOperationRecords++; - } - LOGGER.info("connection_operation table populated with " + connectionOperationRecords + " records"); - } - - static List> listConfigsWithMetadata(final AirbyteConfig airbyteConfigType, - final Class clazz, - final DSLContext ctx) { - final Field configId = DSL.field("config_id", SQLDataType.VARCHAR(36).nullable(false)); - final Field configType = DSL.field("config_type", SQLDataType.VARCHAR(60).nullable(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field configBlob = DSL.field("config_blob", SQLDataType.JSONB.nullable(false)); - final Result results = ctx.select(asterisk()).from(DSL.table("airbyte_configs")).where(configType.eq(airbyteConfigType.name())).fetch(); - - return results.stream().map(record -> new ConfigWithMetadata<>( - record.get(configId), - record.get(configType), - record.get(createdAt).toInstant(), - record.get(updatedAt).toInstant(), - Jsons.deserialize(record.get(configBlob).data(), clazz))) - .collect(Collectors.toList()); - } - - public enum SourceType implements EnumType { - - api("api"), - file("file"), - database(JdbcUtils.DATABASE_KEY), - custom("custom"); - - private final String literal; - - SourceType(final String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema() == null ? null : getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public"), null); - - } - - @Override - public String getName() { - return "source_type"; - } - - @Override - public String getLiteral() { - return literal; - } - - } - - public enum NamespaceDefinitionType implements EnumType { - - source("source"), - destination("destination"), - customformat("customformat"); - - private final String literal; - - NamespaceDefinitionType(final String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema() == null ? null : getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public"), null); - } - - @Override - public String getName() { - return "namespace_definition_type"; - } - - @Override - public String getLiteral() { - return literal; - } - - } - - public enum StatusType implements EnumType { - - active("active"), - inactive("inactive"), - deprecated("deprecated"); - - private final String literal; - - StatusType(final String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema() == null ? null : getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public"), null); - } - - @Override - public String getName() { - return "status_type"; - } - - @Override - public String getLiteral() { - return literal; - } - - } - - public enum OperatorType implements EnumType { - - normalization("normalization"), - dbt("dbt"); - - private final String literal; - - OperatorType(final String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema() == null ? null : getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public"), null); - } - - @Override - public String getName() { - return "operator_type"; - } - - @Override - public String getLiteral() { - return literal; - } - - } - - public enum ActorType implements EnumType { - - source("source"), - destination("destination"); - - private final String literal; - - ActorType(final String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema() == null ? null : getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public"), null); - } - - @Override - public String getName() { - return "actor_type"; - } - - @Override - public String getLiteral() { - return literal; - } - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinition.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinition.java deleted file mode 100644 index bc24b520d9f1d..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinition.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_14_001__AddTombstoneToActorDefinition extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_14_001__AddTombstoneToActorDefinition.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - addTombstoneColumn(ctx); - } - - public static void addTombstoneColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(false).defaultValue(false))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.java deleted file mode 100644 index 91a9ec05ff4f0..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.Catalog; -import org.jooq.DSLContext; -import org.jooq.EnumType; -import org.jooq.Schema; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.jooq.impl.SchemaImpl; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - createReleaseStageEnum(ctx); - addReleaseStageColumn(ctx); - addReleaseDateColumn(ctx); - } - - public static void createReleaseStageEnum(final DSLContext ctx) { - ctx.createType("release_stage").asEnum("alpha", "beta", "generally_available", "custom").execute(); - } - - public static void addReleaseStageColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field("release_stage", SQLDataType.VARCHAR.asEnumDataType(ReleaseStage.class).nullable(true))) - .execute(); - } - - public static void addReleaseDateColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field("release_date", SQLDataType.DATE.nullable(true))) - .execute(); - } - - public enum ReleaseStage implements EnumType { - - alpha("alpha"), - beta("beta"), - generally_available("generally_available"), - custom("custom"); - - private final String literal; - - ReleaseStage(String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema() == null ? null : getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public"), null); - } - - @Override - public String getName() { - return "release_stage"; - } - - @Override - public String getLiteral() { - return literal; - } - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth.java deleted file mode 100644 index 8fe5d4ca4d843..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDefinitionDoesNotExist; -import static io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.listConfigsWithMetadata; -import static org.jooq.impl.DSL.currentOffsetDateTime; -import static org.jooq.impl.DSL.select; -import static org.jooq.impl.DSL.table; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import java.util.List; -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class V0_35_1_001__RemoveForeignKeyFromActorOauth extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_1_001__RemoveForeignKeyFromActorOauth.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - migrate(ctx); - } - - @VisibleForTesting - public static void migrate(final DSLContext ctx) { - dropForeignKeyConstraintFromActorOauthTable(ctx); - populateActorOauthParameter(ctx); - } - - private static void dropForeignKeyConstraintFromActorOauthTable(final DSLContext ctx) { - ctx.alterTable("actor_oauth_parameter").dropForeignKey("actor_oauth_parameter_workspace_id_fkey").execute(); - LOGGER.info("actor_oauth_parameter_workspace_id_fkey constraint dropped"); - } - - private static void populateActorOauthParameter(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - final List> sourceOauthParamsWithMetadata = listConfigsWithMetadata( - ConfigSchema.SOURCE_OAUTH_PARAM, - SourceOAuthParameter.class, - ctx); - long sourceOauthParamRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : sourceOauthParamsWithMetadata) { - final SourceOAuthParameter sourceOAuthParameter = configWithMetadata.getConfig(); - if (actorDefinitionDoesNotExist(sourceOAuthParameter.getSourceDefinitionId(), ctx)) { - LOGGER.warn( - "Skipping source oauth parameter " + sourceOAuthParameter.getSourceDefinitionId() + " because the specified source definition " - + sourceOAuthParameter.getSourceDefinitionId() - + " doesn't exist and violates foreign key constraint."); - continue; - } else if (actorOAuthParamExists(sourceOAuthParameter.getOauthParameterId(), ctx)) { - LOGGER.warn( - "Skipping source oauth parameter " + sourceOAuthParameter.getOauthParameterId() - + " because the specified parameter already exists in the table."); - continue; - } - ctx.insertInto(DSL.table("actor_oauth_parameter")) - .set(id, sourceOAuthParameter.getOauthParameterId()) - .set(workspaceId, sourceOAuthParameter.getWorkspaceId()) - .set(actorDefinitionId, sourceOAuthParameter.getSourceDefinitionId()) - .set(configuration, JSONB.valueOf(Jsons.serialize(sourceOAuthParameter.getConfiguration()))) - .set(actorType, ActorType.source) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - sourceOauthParamRecords++; - } - - LOGGER.info("actor_oauth_parameter table populated with " + sourceOauthParamRecords + " source oauth params records"); - - final List> destinationOauthParamsWithMetadata = listConfigsWithMetadata( - ConfigSchema.DESTINATION_OAUTH_PARAM, - DestinationOAuthParameter.class, - ctx); - long destinationOauthParamRecords = 0L; - for (final ConfigWithMetadata configWithMetadata : destinationOauthParamsWithMetadata) { - final DestinationOAuthParameter destinationOAuthParameter = configWithMetadata.getConfig(); - if (actorDefinitionDoesNotExist(destinationOAuthParameter.getDestinationDefinitionId(), ctx)) { - LOGGER.warn( - "Skipping destination oauth parameter " + destinationOAuthParameter.getOauthParameterId() - + " because the specified destination definition " - + destinationOAuthParameter.getDestinationDefinitionId() - + " doesn't exist and violates foreign key constraint."); - continue; - } else if (actorOAuthParamExists(destinationOAuthParameter.getOauthParameterId(), ctx)) { - LOGGER.warn( - "Skipping destination oauth parameter " + destinationOAuthParameter.getOauthParameterId() - + " because the specified parameter already exists in the table."); - continue; - } - ctx.insertInto(DSL.table("actor_oauth_parameter")) - .set(id, destinationOAuthParameter.getOauthParameterId()) - .set(workspaceId, destinationOAuthParameter.getWorkspaceId()) - .set(actorDefinitionId, destinationOAuthParameter.getDestinationDefinitionId()) - .set(configuration, JSONB.valueOf(Jsons.serialize(destinationOAuthParameter.getConfiguration()))) - .set(actorType, ActorType.destination) - .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) - .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) - .execute(); - destinationOauthParamRecords++; - } - - LOGGER.info("actor_oauth_parameter table populated with " + destinationOauthParamRecords + " destination oauth params records"); - } - - static boolean actorOAuthParamExists(final UUID oauthParamId, final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - return ctx.fetchExists(select() - .from(table("actor_oauth_parameter")) - .where(id.eq(oauthParamId))); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalog.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalog.java deleted file mode 100644 index 1fbc3a5895ca4..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalog.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.constraint; -import static org.jooq.impl.DSL.foreignKey; -import static org.jooq.impl.DSL.primaryKey; - -import com.google.common.annotations.VisibleForTesting; -import java.time.OffsetDateTime; -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_26_001__PersistDiscoveredCatalog extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_26_001__PersistDiscoveredCatalog.class); - private static final String ACTOR_CATALOG = "actor_catalog"; - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - migrate(ctx); - } - - @VisibleForTesting - public static void migrate(final DSLContext ctx) { - createActorCatalog(ctx); - createCatalogFetchEvent(ctx); - addConnectionTableForeignKey(ctx); - } - - private static void createActorCatalog(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field catalog = DSL.field("catalog", SQLDataType.JSONB.nullable(false)); - final Field catalogHash = DSL.field("catalog_hash", SQLDataType.VARCHAR(32).nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - ctx.createTableIfNotExists(ACTOR_CATALOG) - .columns(id, - catalog, - catalogHash, - createdAt) - .constraints(primaryKey(id)) - .execute(); - LOGGER.info("actor_catalog table created"); - ctx.createIndexIfNotExists("actor_catalog_catalog_hash_id_idx").on(ACTOR_CATALOG, "catalog_hash").execute(); - } - - private static void createCatalogFetchEvent(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field actorCatalogId = DSL.field("actor_catalog_id", SQLDataType.UUID.nullable(false)); - final Field actorId = DSL.field("actor_id", SQLDataType.UUID.nullable(false)); - final Field configHash = DSL.field("config_hash", SQLDataType.VARCHAR(32).nullable(false)); - final Field actorVersion = DSL.field("actor_version", SQLDataType.VARCHAR(256).nullable(false)); - - ctx.createTableIfNotExists("actor_catalog_fetch_event") - .columns(id, - actorCatalogId, - actorId, - configHash, - actorVersion) - .constraints(primaryKey(id), - foreignKey(actorCatalogId).references(ACTOR_CATALOG, "id").onDeleteCascade(), - foreignKey(actorId).references("actor", "id").onDeleteCascade()) - .execute(); - LOGGER.info("actor_catalog_fetch_event table created"); - ctx.createIndexIfNotExists("actor_catalog_fetch_event_actor_id_idx").on("actor_catalog_fetch_event", "actor_id").execute(); - ctx.createIndexIfNotExists("actor_catalog_fetch_event_actor_catalog_id_idx").on("actor_catalog_fetch_event", "actor_catalog_id").execute(); - } - - private static void addConnectionTableForeignKey(final DSLContext ctx) { - final Field sourceCatalogId = DSL.field("source_catalog_id", SQLDataType.UUID.nullable(true)); - ctx.alterTable("connection") - .addIfNotExists(sourceCatalogId).execute(); - ctx.alterTable("connection") - .dropConstraintIfExists("connection_actor_catalog_id_fk"); - ctx.alterTable("connection") - .add(constraint("connection_actor_catalog_id_fk").foreignKey(sourceCatalogId) - .references(ACTOR_CATALOG, "id").onDeleteCascade()) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumns.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumns.java deleted file mode 100644 index 5a87f6041e04f..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumns.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.currentOffsetDateTime; - -import com.google.common.annotations.VisibleForTesting; -import java.time.OffsetDateTime; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_28_001__AddActorCatalogMetadataColumns extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger( - V0_35_28_001__AddActorCatalogMetadataColumns.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - migrate(ctx); - } - - @VisibleForTesting - public static void migrate(final DSLContext ctx) { - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field modifiedAt = - DSL.field("modified_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - ctx.alterTable("actor_catalog") - .addIfNotExists(modifiedAt).execute(); - ctx.alterTable("actor_catalog_fetch_event") - .addIfNotExists(createdAt).execute(); - ctx.alterTable("actor_catalog_fetch_event") - .addIfNotExists(modifiedAt).execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_32_001__AddConnectorDefinitionResourceLimits.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_32_001__AddConnectorDefinitionResourceLimits.java deleted file mode 100644 index 4f1defedb7038..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_32_001__AddConnectorDefinitionResourceLimits.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_32_001__AddConnectorDefinitionResourceLimits extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_32_001__AddConnectorDefinitionResourceLimits.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - addResourceReqsToActorDefs(ctx); - } - - public static void addResourceReqsToActorDefs(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field("resource_requirements", SQLDataType.JSONB.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTable.java deleted file mode 100644 index df09d3fe9d6e1..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTable.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_3_001__DropAirbyteConfigsTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_3_001__DropAirbyteConfigsTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - dropTable(ctx); - } - - public static void dropTable(final DSLContext ctx) { - ctx.dropTableIfExists(DSL.table("airbyte_configs")).execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_46_001__AddMissingIndices.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_46_001__AddMissingIndices.java deleted file mode 100644 index ae33e8439e9a4..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_46_001__AddMissingIndices.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_46_001__AddMissingIndices extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_46_001__AddMissingIndices.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - - ctx.createIndexIfNotExists("actor_workspace_id_idx").on("actor", "workspace_id").execute(); - ctx.createIndexIfNotExists("connection_operation_connection_id_idx").on("connection_operation", "connection_id").execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_54_001__ChangeDefaultConnectionName.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_54_001__ChangeDefaultConnectionName.java deleted file mode 100644 index 053ebbc07c5f6..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_54_001__ChangeDefaultConnectionName.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.table; - -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_54_001__ChangeDefaultConnectionName extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_54_001__ChangeDefaultConnectionName.class); - private static final String NAME = "name"; - - public static void defaultConnectionName(final DSLContext ctx) { - LOGGER.info("Updating connection name column"); - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field(NAME, SQLDataType.VARCHAR(256).nullable(false)); - final List connections = getConnections(ctx); - - for (final Connection connection : connections) { - final Actor sourceActor = getActor(connection.getSourceId(), ctx); - final Actor destinationActor = getActor(connection.getDestinationId(), ctx); - final String connectionName = sourceActor.getName() + " <> " + destinationActor.getName(); - - ctx.update(DSL.table("connection")) - .set(name, connectionName) - .where(id.eq(connection.getConnectionId())) - .execute(); - } - } - - static List getConnections(final DSLContext ctx) { - LOGGER.info("Get connections having name default"); - final Field name = DSL.field(NAME, SQLDataType.VARCHAR(36).nullable(false)); - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field sourceId = DSL.field("source_id", SQLDataType.UUID.nullable(false)); - final Field destinationId = DSL.field("destination_id", SQLDataType.UUID.nullable(false)); - - final Field connectionName = DSL.field(NAME, SQLDataType.VARCHAR(256).nullable(false)); - final Result results = ctx.select(asterisk()).from(table("connection")).where(connectionName.eq("default")).fetch(); - - return results.stream().map(record -> new Connection( - record.get(name), - record.get(id), - record.get(sourceId), - record.get(destinationId))) - .collect(Collectors.toList()); - } - - static Actor getActor(final UUID actorDefinitionId, final DSLContext ctx) { - final Field name = DSL.field(NAME, SQLDataType.VARCHAR(36).nullable(false)); - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - - final Result results = ctx.select(asterisk()).from(table("actor")).where(id.eq(actorDefinitionId)).fetch(); - - return results.stream() - .map(record -> new Actor(record.get(name))).toList().get(0); - } - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - defaultConnectionName(ctx); - } - - public static class Actor { - - private final String name; - - public Actor(final String name) { - this.name = name; - } - - public String getName() { - return this.name; - } - - } - - public static class Connection { - - private final String name; - private final UUID connectionId; - private final UUID sourceId; - private final UUID destinationId; - - public Connection(final String name, final UUID id, final UUID sourceId, final UUID destinationId) { - this.name = name; - this.connectionId = id; - this.sourceId = sourceId; - this.destinationId = destinationId; - } - - public String getName() { - return this.name; - } - - public UUID getSourceId() { - return this.sourceId; - } - - public UUID getDestinationId() { - return this.destinationId; - } - - public UUID getConnectionId() { - return this.connectionId; - } - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_56_001__AddWorkspaceSlugTombstoneIndex.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_56_001__AddWorkspaceSlugTombstoneIndex.java deleted file mode 100644 index 9e56910ff33db..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_56_001__AddWorkspaceSlugTombstoneIndex.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_56_001__AddWorkspaceSlugTombstoneIndex extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger( - V0_35_56_001__AddWorkspaceSlugTombstoneIndex.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - ctx.createIndexIfNotExists("workspace_slug_idx").on("workspace", "slug"); - ctx.createIndexIfNotExists("workspace_tombstone_idx").on("workspace", "tombstone"); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinition.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinition.java deleted file mode 100644 index e44f79cabcf4f..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinition.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_59_001__AddPublicToActorDefinition extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_59_001__AddPublicToActorDefinition.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - addPublicColumn(ctx); - } - - public static void addPublicColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field("public", SQLDataType.BOOLEAN.nullable(false).defaultValue(false))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTable.java deleted file mode 100644 index 73659efce08aa..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTable.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.foreignKey; -import static org.jooq.impl.DSL.unique; - -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_59_002__AddActorDefinitionWorkspaceGrantTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_59_002__AddActorDefinitionWorkspaceGrantTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - createActorDefinitionWorkspaceGrant(ctx); - } - - public static void createActorDefinitionWorkspaceGrant(final DSLContext ctx) { - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); - ctx.createTableIfNotExists("actor_definition_workspace_grant") - .columns( - actorDefinitionId, - workspaceId) - .constraints( - unique(workspaceId, actorDefinitionId), - foreignKey(actorDefinitionId).references("actor_definition", "id").onDeleteCascade(), - foreignKey(workspaceId).references("workspace", "id").onDeleteCascade()) - .execute(); - LOGGER.info("actor_definition_workspace_grant table created"); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinition.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinition.java deleted file mode 100644 index ce96a53a3ccb1..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinition.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_59_003__AddCustomToActorDefinition extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_59_003__AddCustomToActorDefinition.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - addCustomColumn(ctx); - } - - public static void addCustomColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field("custom", SQLDataType.BOOLEAN.nullable(false).defaultValue(false))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_004__AddOauthParamIndex.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_004__AddOauthParamIndex.java deleted file mode 100644 index 3b3407c16d566..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_59_004__AddOauthParamIndex.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_59_004__AddOauthParamIndex extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_59_004__AddOauthParamIndex.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - ctx.createIndexIfNotExists("actor_oauth_parameter_workspace_definition_idx").on("actor_oauth_parameter", "workspace_id", "actor_definition_id") - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_65_001__CreateWorkspaceServiceAccountTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_65_001__CreateWorkspaceServiceAccountTable.java deleted file mode 100644 index aa227aba110ac..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_65_001__CreateWorkspaceServiceAccountTable.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.currentOffsetDateTime; -import static org.jooq.impl.DSL.foreignKey; -import static org.jooq.impl.DSL.primaryKey; - -import java.time.OffsetDateTime; -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_65_001__CreateWorkspaceServiceAccountTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_65_001__CreateWorkspaceServiceAccountTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - createAndPopulateWorkspace(ctx); - } - - private static void createAndPopulateWorkspace(final DSLContext ctx) { - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); - final Field serviceAccountId = DSL.field("service_account_id", SQLDataType.VARCHAR(31).nullable(false)); - final Field serviceAccountEmail = DSL.field("service_account_email", SQLDataType.VARCHAR(256).nullable(false)); - final Field jsonCredential = DSL.field("json_credential", SQLDataType.JSONB.nullable(false)); - final Field hmacKey = DSL.field("hmac_key", SQLDataType.JSONB.nullable(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("workspace_service_account") - .columns(workspaceId, - serviceAccountId, - serviceAccountEmail, - jsonCredential, - hmacKey, - createdAt, - updatedAt) - .constraints(primaryKey(workspaceId, serviceAccountId), - foreignKey(workspaceId).references("workspace", "id").onDeleteCascade()) - .execute(); - LOGGER.info("workspace_service_account table created"); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_36_3_001__AddScheduleTypeToConfigsTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_36_3_001__AddScheduleTypeToConfigsTable.java deleted file mode 100644 index 064cfc78df3f8..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_36_3_001__AddScheduleTypeToConfigsTable.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.Catalog; -import org.jooq.DSLContext; -import org.jooq.EnumType; -import org.jooq.Schema; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.jooq.impl.SchemaImpl; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_36_3_001__AddScheduleTypeToConfigsTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_36_3_001__AddScheduleTypeToConfigsTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - final DSLContext ctx = DSL.using(context.getConnection()); - createScheduleTypeEnum(ctx); - addPublicColumn(ctx); - } - - private static void createScheduleTypeEnum(final DSLContext ctx) { - ctx.createType("schedule_type").asEnum("manual", "basic_schedule", "cron").execute(); - } - - private static void addPublicColumn(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumnIfNotExists(DSL.field( - "schedule_type", - SQLDataType.VARCHAR.asEnumDataType(ScheduleType.class).nullable(true))) - .execute(); - } - - public enum ScheduleType implements EnumType { - - manual("manual"), - basicSchedule("basic_schedule"), - cron("cron"),; - - private final String literal; - - ScheduleType(final String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema() == null ? null : getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public"), null); - } - - @Override - public String getName() { - return "schedule_type"; - } - - @Override - public String getLiteral() { - return literal; - } - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_38_4_001__AddScheduleDataToConfigsTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_38_4_001__AddScheduleDataToConfigsTable.java deleted file mode 100644 index 6fb5ce945610b..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_38_4_001__AddScheduleDataToConfigsTable.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_38_4_001__AddScheduleDataToConfigsTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_38_4_001__AddScheduleDataToConfigsTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - final DSLContext ctx = DSL.using(context.getConnection()); - addPublicColumn(ctx); - } - - private static void addPublicColumn(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumnIfNotExists(DSL.field( - "schedule_data", - SQLDataType.JSONB.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTable.java deleted file mode 100644 index 41669f385db4d..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTable.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import com.google.common.annotations.VisibleForTesting; -import java.util.Arrays; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.Catalog; -import org.jooq.DSLContext; -import org.jooq.EnumType; -import org.jooq.Schema; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.jooq.impl.SchemaImpl; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_39_17_001__AddStreamDescriptorsToStateTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_39_17_001__AddStreamDescriptorsToStateTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - - migrate(ctx); - } - - @VisibleForTesting - public static void migrate(final DSLContext ctx) { - createStateTypeEnum(ctx); - addStreamDescriptorFieldsToStateTable(ctx); - } - - private static void createStateTypeEnum(final DSLContext ctx) { - ctx.createType(StateType.NAME) - .asEnum(Arrays.stream(StateType.values()).map(StateType::getLiteral).toList()) - .execute(); - } - - private static void addStreamDescriptorFieldsToStateTable(final DSLContext ctx) { - final String STATE_TABLE = "state"; - - ctx.alterTable(STATE_TABLE) - .add(Arrays.asList( - DSL.field("stream_name", SQLDataType.CLOB.nullable(true)), - DSL.field("namespace", SQLDataType.CLOB.nullable(true)), - // type defaults to LEGACY to first set the expected type of all existing states - DSL.field("type", SQLDataType.VARCHAR.asEnumDataType(StateType.class).nullable(false).defaultValue(StateType.LEGACY)), - DSL.constraint("state__connection_id__stream_name__namespace__uq") - .unique(DSL.field("connection_id"), DSL.field("stream_name"), DSL.field("namespace")))) - .execute(); - } - - public enum StateType implements EnumType { - - GLOBAL("GLOBAL"), - STREAM("STREAM"), - LEGACY("LEGACY"); - - public static final String NAME = "state_type"; - - StateType(String literal) { - this.literal = literal; - } - - @Override - public String getLiteral() { - return literal; - } - - @Override - public Catalog getCatalog() { - return getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public")); - } - - @Override - public String getName() { - return NAME; - } - - private final String literal; - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_1_001__CreateStreamReset.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_1_001__CreateStreamReset.java deleted file mode 100644 index 4cd464a77af14..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_1_001__CreateStreamReset.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.currentOffsetDateTime; -import static org.jooq.impl.DSL.unique; - -import java.time.OffsetDateTime; -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// TODO: update migration description in the class name -public class V0_39_1_001__CreateStreamReset extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_39_1_001__CreateStreamReset.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - createStreamResetTable(ctx); - } - - private static void createStreamResetTable(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); - final Field streamNamespace = DSL.field("stream_namespace", SQLDataType.CLOB.nullable(true)); - final Field streamName = DSL.field("stream_name", SQLDataType.CLOB.nullable(false)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("stream_reset") - .columns(id, connectionId, streamNamespace, streamName, createdAt, updatedAt) - .constraints( - unique(connectionId, streamName, streamNamespace)) - .execute(); - - ctx.createIndex("connection_id_stream_name_namespace_idx").on("stream_reset", "connection_id", "stream_name", "stream_namespace").execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_11_001__AddGeographyColumnToConnections.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_11_001__AddGeographyColumnToConnections.java deleted file mode 100644 index 2aeb5950e6400..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_11_001__AddGeographyColumnToConnections.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import java.util.Arrays; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.Catalog; -import org.jooq.DSLContext; -import org.jooq.EnumType; -import org.jooq.Schema; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.jooq.impl.SchemaImpl; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_11_001__AddGeographyColumnToConnections extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_11_001__AddGeographyColumnToConnections.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - - addGeographyEnumDataTypes(ctx); - addGeographyColumnToConnection(ctx); - addGeographyColumnToWorkspace(ctx); - } - - private static void addGeographyEnumDataTypes(final DSLContext ctx) { - ctx.createType(GeographyType.NAME) - .asEnum(Arrays.stream(GeographyType.values()).map(GeographyType::getLiteral).toList()) - .execute(); - } - - private static void addGeographyColumnToConnection(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumnIfNotExists(DSL.field( - "geography", - SQLDataType.VARCHAR.asEnumDataType(GeographyType.class).nullable(false).defaultValue(GeographyType.AUTO))) - .execute(); - } - - private static void addGeographyColumnToWorkspace(final DSLContext ctx) { - ctx.alterTable("workspace") - .addColumnIfNotExists(DSL.field( - "geography", - SQLDataType.VARCHAR.asEnumDataType(GeographyType.class).nullable(false).defaultValue(GeographyType.AUTO))) - .execute(); - } - - public enum GeographyType implements EnumType { - - AUTO("AUTO"), - US("US"), - EU("EU"); - - private final String literal; - public static final String NAME = "geography_type"; - - GeographyType(final String literal) { - this.literal = literal; - } - - @Override - public Catalog getCatalog() { - return getSchema().getCatalog(); - } - - @Override - public Schema getSchema() { - return new SchemaImpl(DSL.name("public")); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public String getLiteral() { - return literal; - } - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_11_002__AddSchemaChangeColumnsToConnections.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_11_002__AddSchemaChangeColumnsToConnections.java deleted file mode 100644 index be27a500ef5b1..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_11_002__AddSchemaChangeColumnsToConnections.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import java.util.Arrays; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.EnumType; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_11_002__AddSchemaChangeColumnsToConnections extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_11_002__AddSchemaChangeColumnsToConnections.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - - addNonBreakingChangePreferenceEnumTypes(ctx); - - addNotifySchemaChanges(ctx); - addNonBreakingChangePreference(ctx); - addBreakingChange(ctx); - } - - private static void addNonBreakingChangePreferenceEnumTypes(final DSLContext ctx) { - ctx.createType(NonBreakingChangePreferenceType.NAME) - .asEnum(Arrays.stream(NonBreakingChangePreferenceType.values()).map(NonBreakingChangePreferenceType::getLiteral).toList()) - .execute(); - } - - private static void addNotifySchemaChanges(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumnIfNotExists(DSL.field( - "notify_schema_changes", - SQLDataType.BOOLEAN.nullable(false).defaultValue(true))) - .execute(); - } - - private static void addNonBreakingChangePreference(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumnIfNotExists(DSL.field( - "non_breaking_change_preference", - SQLDataType.VARCHAR.asEnumDataType(NonBreakingChangePreferenceType.class).nullable(false) - .defaultValue(NonBreakingChangePreferenceType.IGNORE))) - .execute(); - - } - - private static void addBreakingChange(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumnIfNotExists(DSL.field( - "breaking_change", - SQLDataType.BOOLEAN.nullable(false).defaultValue(false))) - .execute(); - } - - public enum NonBreakingChangePreferenceType implements EnumType { - - IGNORE("ignore"), - DISABLE("disable"); - - private final String literal; - public static final String NAME = "non_breaking_change_preference_type"; - - NonBreakingChangePreferenceType(final String literal) { - this.literal = literal; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public String getLiteral() { - return literal; - } - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_12_001__AddWebhookOperationColumns.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_12_001__AddWebhookOperationColumns.java deleted file mode 100644 index 65610ac490e11..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_12_001__AddWebhookOperationColumns.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// TODO: update migration description in the class name -public class V0_40_12_001__AddWebhookOperationColumns extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_12_001__AddWebhookOperationColumns.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - addWebhookOperationConfigColumn(ctx); - addWebhookOperationType(ctx); - addWebhookConfigColumnsToWorkspaceTable(ctx); - } - - private void addWebhookConfigColumnsToWorkspaceTable(final DSLContext ctx) { - ctx.alterTable("workspace") - .addColumnIfNotExists(DSL.field( - "webhook_operation_configs", - SQLDataType.JSONB.nullable(true))) - .execute(); - } - - private void addWebhookOperationType(final DSLContext ctx) { - ctx.alterType("operator_type").addValue("webhook").execute(); - } - - private void addWebhookOperationConfigColumn(final DSLContext ctx) { - ctx.alterTable("operation").addColumnIfNotExists(DSL.field("operator_webhook", - SQLDataType.JSONB.nullable(true))).execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_001__AddInvalidProtocolFlagToConnections.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_001__AddInvalidProtocolFlagToConnections.java deleted file mode 100644 index 8cb9efe7ea178..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_001__AddInvalidProtocolFlagToConnections.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_18_001__AddInvalidProtocolFlagToConnections extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_001__AddInvalidProtocolFlagToConnections.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - addInvalidProtocolFlagToConnections(ctx); - } - - private void addInvalidProtocolFlagToConnections(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumn(DSL.field("unsupported_protocol_version", SQLDataType.BOOLEAN.nullable(false).defaultValue(false))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns.java deleted file mode 100644 index c61d99857658e..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - final DSLContext ctx = DSL.using(context.getConnection()); - addNormalizationRepositoryColumn(ctx); - addNormalizationTagColumn(ctx); - addSupportsDbtColumn(ctx); - } - - static void addNormalizationRepositoryColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field( - "normalization_repository", - SQLDataType.VARCHAR(255).nullable(true))) - .execute(); - } - - static void addNormalizationTagColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field( - "normalization_tag", - SQLDataType.VARCHAR(255).nullable(true))) - .execute(); - } - - static void addSupportsDbtColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field("supports_dbt", - SQLDataType.BOOLEAN.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_003__AddIndexToConnectionStatus.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_003__AddIndexToConnectionStatus.java deleted file mode 100644 index 2bc4cfdcb14f2..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_003__AddIndexToConnectionStatus.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_18_003__AddIndexToConnectionStatus extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_003__AddIndexToConnectionStatus.class); - private static final String CONNECTION_TABLE = "connection"; - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - try (final DSLContext ctx = DSL.using(context.getConnection())) { - ctx.createIndexIfNotExists("connection_status_idx").on(CONNECTION_TABLE, "status").execute(); - } - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.java deleted file mode 100644 index 8d5dfd45cfad8..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// TODO: update migration description in the class name -public class V0_40_18_004__BackfillActorDefinitionWorkspaceGrant extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger( - V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - - var customActorDefinitionIds = ctx.fetch("SELECT id FROM actor_definition WHERE public is false and tombstone is false;"); - var existingWorkspaces = ctx.fetch("SELECT id FROM WORKSPACE where tombstone is false;"); - - // Update for all custom connectors - set custom field to true; - ctx.execute("UPDATE actor_definition" - + " SET custom = true " - + " WHERE public is false and tombstone is false;"); - - for (final var customActorDefinitionIdRecord : customActorDefinitionIds) { - for (final var existingWorkspaceRecord : existingWorkspaces) { - // Populate a record for new table; - var customActorDefinitionIdValue = customActorDefinitionIdRecord.getValue("id", UUID.class); - var existingWorkspaceIdValue = existingWorkspaceRecord.getValue("id", UUID.class); - - ctx.execute("INSERT INTO actor_definition_workspace_grant(workspace_id, actor_definition_id) VALUES ({0}, {1})", - existingWorkspaceIdValue, customActorDefinitionIdValue); - } - } - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_23_001__AddFieldSelectionDataToConnections.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_23_001__AddFieldSelectionDataToConnections.java deleted file mode 100644 index baabe075bcc29..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_23_001__AddFieldSelectionDataToConnections.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_23_001__AddFieldSelectionDataToConnections extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_23_001__AddFieldSelectionDataToConnections.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - addFieldSelectionData(ctx); - } - - private static void addFieldSelectionData(final DSLContext ctx) { - ctx.alterTable("connection") - .addColumnIfNotExists(DSL.field("field_selection_data", SQLDataType.JSONB.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_23_002__AddNormalizationIntegrationTypeToActorDefinition.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_23_002__AddNormalizationIntegrationTypeToActorDefinition.java deleted file mode 100644 index 0e44a875f37f0..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_23_002__AddNormalizationIntegrationTypeToActorDefinition.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_23_002__AddNormalizationIntegrationTypeToActorDefinition extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger( - V0_40_23_002__AddNormalizationIntegrationTypeToActorDefinition.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - final DSLContext ctx = DSL.using(context.getConnection()); - addIntegrationTypeColumn(ctx); - } - - static void addIntegrationTypeColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field( - "normalization_integration_type", - SQLDataType.VARCHAR(255).nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_27_001__AddAllowedHosts.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_27_001__AddAllowedHosts.java deleted file mode 100644 index e5fcbc63555fc..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_27_001__AddAllowedHosts.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_27_001__AddAllowedHosts extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_27_001__AddAllowedHosts.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - try (final DSLContext ctx = DSL.using(context.getConnection())) { - addAllowedHostsToActorDefinition(ctx); - } - } - - private static void addAllowedHostsToActorDefinition(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field( - "allowed_hosts", - SQLDataType.JSONB.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_28_001__AddSuggestedStreams.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_28_001__AddSuggestedStreams.java deleted file mode 100644 index 5dbc0ef32a1fe..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_28_001__AddSuggestedStreams.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_28_001__AddSuggestedStreams extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_28_001__AddSuggestedStreams.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - try (final DSLContext ctx = DSL.using(context.getConnection())) { - addSuggestedStreams(ctx); - } - } - - private static void addSuggestedStreams(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field( - "suggested_streams", - SQLDataType.JSONB.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_3_001__AddProtocolVersionToActorDefinition.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_3_001__AddProtocolVersionToActorDefinition.java deleted file mode 100644 index 95cfdd8975f37..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_3_001__AddProtocolVersionToActorDefinition.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_3_001__AddProtocolVersionToActorDefinition extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_3_001__AddProtocolVersionToActorDefinition.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - addProtocolVersionColumn(ctx); - } - - private static void addProtocolVersionColumn(final DSLContext ctx) { - ctx.alterTable("actor_definition") - .addColumnIfNotExists(DSL.field( - "protocol_version", - SQLDataType.VARCHAR(255).nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTable.java deleted file mode 100644 index 55eee906c7518..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTable.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import com.google.common.annotations.VisibleForTesting; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - final DSLContext ctx = DSL.using(context.getConnection()); - removeActorDefinitionForeignKey(ctx); - } - - @VisibleForTesting - static void removeActorDefinitionForeignKey(final DSLContext ctx) { - ctx.alterTable("actor_oauth_parameter").dropForeignKey("actor_oauth_parameter_actor_definition_id_fkey").execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/DevDatabaseMigrator.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/DevDatabaseMigrator.java deleted file mode 100644 index 3c6a3be6f1ef6..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/DevDatabaseMigrator.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.development; - -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import org.flywaydb.core.Flyway; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.MigrationVersion; -import org.flywaydb.core.api.configuration.Configuration; -import org.flywaydb.core.api.configuration.FluentConfiguration; -import org.flywaydb.core.api.output.BaselineResult; -import org.flywaydb.core.api.output.MigrateResult; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This migrator can prepare the database with previous migrations, and only run the latest - * migration for testing. It is used in {@link MigrationDevHelper#runLastMigration}. - */ -public class DevDatabaseMigrator implements DatabaseMigrator { - - private static final Logger LOGGER = LoggerFactory.getLogger(DevDatabaseMigrator.class); - - // A migrator that can run all migrations. - private final DatabaseMigrator fullMigrator; - // A migrator that will not run the last migration. It prepares the database to the state right - // before the last migration. - private final DatabaseMigrator baselineMigrator; - - public DevDatabaseMigrator(final FlywayDatabaseMigrator fullMigrator) { - this.fullMigrator = fullMigrator; - this.baselineMigrator = getBaselineMigrator(fullMigrator); - } - - public DevDatabaseMigrator(final FlywayDatabaseMigrator fullMigrator, final MigrationVersion baselineVersion) { - this.fullMigrator = fullMigrator; - this.baselineMigrator = getBaselineMigratorForVersion(fullMigrator, baselineVersion); - } - - private static class NoOpDatabaseMigrator implements DatabaseMigrator { - - @Override - public MigrateResult migrate() { - return null; - } - - @Override - public List list() { - return Collections.emptyList(); - } - - @Override - public MigrationInfo getLatestMigration() { - return null; - } - - @Override - public BaselineResult createBaseline() { - return null; - } - - @Override - public String dumpSchema() { - return ""; - } - - } - - private static FluentConfiguration getBaselineConfig(final FlywayDatabaseMigrator fullMigrator) { - final Configuration fullConfig = fullMigrator.getFlyway().getConfiguration(); - - return Flyway.configure() - .dataSource(fullConfig.getDataSource()) - .baselineVersion(fullConfig.getBaselineVersion()) - .baselineDescription(fullConfig.getBaselineDescription()) - .baselineOnMigrate(fullConfig.isBaselineOnMigrate()) - .installedBy(fullConfig.getInstalledBy()) - .table(fullConfig.getTable()) - .locations(fullConfig.getLocations()); - } - - /** - * Create a baseline migration from a full migrator. The baseline migrator does not run the last - * migration, which will be usually the migration to be tested. - */ - private static DatabaseMigrator getBaselineMigrator(final FlywayDatabaseMigrator fullMigrator) { - final FluentConfiguration baselineConfig = getBaselineConfig(fullMigrator); - - final Optional secondToLastMigrationVersion = MigrationDevHelper.getSecondToLastMigrationVersion(fullMigrator); - if (secondToLastMigrationVersion.isEmpty()) { - LOGGER.info("There is zero or one migration. No extra baseline setup is needed."); - return new NoOpDatabaseMigrator(); - } - - // Set the baseline flyway config to not run the last migration by setting the target migration - // version. - LOGGER.info("Baseline migrator target version: {}", secondToLastMigrationVersion.get()); - baselineConfig.target(secondToLastMigrationVersion.get()); - - return new FlywayDatabaseMigrator(fullMigrator.getDatabase(), baselineConfig.load()); - } - - /** - * Create a baseline migration from a specified target migration version. - */ - private static DatabaseMigrator getBaselineMigratorForVersion(final FlywayDatabaseMigrator fullMigrator, final MigrationVersion migrationVersion) { - final FluentConfiguration baselineConfig = getBaselineConfig(fullMigrator); - - // Set the baseline flyway config to run up to a target migration version. - LOGGER.info("Baseline migrator target version: {}", migrationVersion); - baselineConfig.target(migrationVersion); - - return new FlywayDatabaseMigrator(fullMigrator.getDatabase(), baselineConfig.load()); - } - - @Override - public MigrateResult migrate() { - return fullMigrator.migrate(); - } - - @Override - public List list() { - return fullMigrator.list(); - } - - @Override - public MigrationInfo getLatestMigration() { - return fullMigrator.getLatestMigration(); - } - - @Override - public BaselineResult createBaseline() { - fullMigrator.createBaseline(); - // Run all previous migration except for the last one to establish the baseline database state. - baselineMigrator.migrate(); - return fullMigrator.createBaseline(); - } - - @Override - public String dumpSchema() throws IOException { - return fullMigrator.dumpSchema(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/FlywayFormatter.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/FlywayFormatter.java deleted file mode 100644 index 82c0c1670c640..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/FlywayFormatter.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.development; - -import static org.jooq.impl.DSL.field; - -import io.airbyte.db.instance.DatabaseMigrator; -import java.sql.Date; -import java.util.List; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.output.MigrateOutput; -import org.flywaydb.core.api.output.MigrateResult; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.Record4; -import org.jooq.Record5; -import org.jooq.Result; -import org.jooq.SQLDialect; -import org.jooq.impl.DefaultDSLContext; -import org.jooq.impl.SQLDataType; - -/** - * This class formats the Flyway outputs so that it is easier to inspect them and debug the - * migration. - */ -public class FlywayFormatter { - - private static final DSLContext CTX = new DefaultDSLContext(SQLDialect.DEFAULT); - - /** - * Format the {@link DatabaseMigrator#list} output. - */ - static String formatMigrationInfoList(final List migrationInfoList) { - final Field type = field("Type", SQLDataType.VARCHAR); - final Field version = field("Version", SQLDataType.VARCHAR); - final Field description = field("Description", SQLDataType.VARCHAR); - final Field state = field("State", SQLDataType.VARCHAR); - final Field migratedAt = field("MigratedAt", SQLDataType.DATE); - final Result> result = CTX.newResult(type, version, description, state, migratedAt); - migrationInfoList.forEach(info -> result.add(CTX.newRecord(type, version, description, state, migratedAt).values( - info.getType().name(), - info.getVersion().toString(), - info.getDescription(), - info.getState().getDisplayName(), - info.getInstalledOn() == null ? null : new Date(info.getInstalledOn().getTime())))); - return result.format(); - } - - static String formatMigrationOutputList(final List migrationOutputList) { - final Field type = field("Type", SQLDataType.VARCHAR); - final Field version = field("Version", SQLDataType.VARCHAR); - final Field description = field("Description", SQLDataType.VARCHAR); - final Field script = field("Script", SQLDataType.VARCHAR); - final Result> result = CTX.newResult(type, version, description, script); - migrationOutputList.forEach(output -> result.add(CTX.newRecord(type, version, description, script).values( - String.format("%s %s", output.type, output.category), - output.version, - output.description, - output.filepath))); - return result.format(); - } - - /** - * Format the {@link DatabaseMigrator#migrate} output. - */ - static String formatMigrationResult(final MigrateResult result) { - return String.format("Version: %s -> %s\n", result.initialSchemaVersion, result.targetSchemaVersion) - + String.format("Migrations executed: %s\n", result.migrationsExecuted) - + formatMigrationOutputList(result.migrations); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevCenter.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevCenter.java deleted file mode 100644 index 0e3c22cc63dd3..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevCenter.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.development; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DataSourceFactory; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrationDevCenter; -import io.airbyte.db.instance.jobs.JobsDatabaseMigrationDevCenter; -import java.io.IOException; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.ext.ScriptUtils; -import org.testcontainers.jdbc.JdbcDatabaseDelegate; - -/** - * Helper class for migration development. See README for details. - */ -public abstract class MigrationDevCenter { - - private enum Db { - CONFIGS, - JOBS - } - - private enum Command { - CREATE, - MIGRATE, - DUMP_SCHEMA - } - - private final String dbIdentifier; - private final String schemaDumpFile; - private final String initialScript; - - protected MigrationDevCenter(final String dbIdentifier, final String schemaDumpFile, final String initialScript) { - this.dbIdentifier = dbIdentifier; - this.schemaDumpFile = schemaDumpFile; - this.initialScript = initialScript; - } - - private PostgreSQLContainer createContainer() { - final PostgreSQLContainer container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName("airbyte") - .withUsername("docker") - .withPassword("docker"); - container.start(); - final var containerDelegate = new JdbcDatabaseDelegate(container, ""); - ScriptUtils.runInitScript(containerDelegate, initialScript); - return container; - } - - protected abstract FlywayDatabaseMigrator getMigrator(Database database, Flyway flyway); - - protected abstract Flyway getFlyway(DataSource dataSource); - - private Database getDatabase(final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - private void createMigration() { - try (final PostgreSQLContainer container = createContainer()) { - final DataSource dataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - try (final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES)) { - final Flyway flyway = getFlyway(dataSource); - final Database database = getDatabase(dslContext); - final FlywayDatabaseMigrator migrator = getMigrator(database, flyway); - MigrationDevHelper.createNextMigrationFile(dbIdentifier, migrator); - } - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - private void runLastMigration() { - try (final PostgreSQLContainer container = createContainer()) { - final DataSource dataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - try (final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES)) { - final Flyway flyway = getFlyway(dataSource); - final Database database = getDatabase(dslContext); - final FlywayDatabaseMigrator fullMigrator = getMigrator(database, flyway); - final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(fullMigrator); - MigrationDevHelper.runLastMigration(devDatabaseMigrator); - final String schema = fullMigrator.dumpSchema(); - MigrationDevHelper.dumpSchema(schema, schemaDumpFile, false); - } - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - @VisibleForTesting - public String dumpSchema(final boolean persistToFile) { - try (final PostgreSQLContainer container = createContainer()) { - final DataSource dataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - try (final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES)) { - final Flyway flyway = getFlyway(dataSource); - final Database database = getDatabase(dslContext); - final FlywayDatabaseMigrator migrator = getMigrator(database, flyway); - migrator.migrate(); - final String schema = migrator.dumpSchema(); - if (persistToFile) { - MigrationDevHelper.dumpSchema(schema, schemaDumpFile, true); - } - return schema; - } - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - public static void main(final String[] args) { - final MigrationDevCenter devCenter; - - final Db db = Db.valueOf(args[0].toUpperCase()); - switch (db) { - case CONFIGS -> devCenter = new ConfigsDatabaseMigrationDevCenter(); - case JOBS -> devCenter = new JobsDatabaseMigrationDevCenter(); - default -> throw new IllegalArgumentException("Unexpected database: " + args[0]); - } - - final Command command = Command.valueOf(args[1].toUpperCase()); - switch (command) { - case CREATE -> devCenter.createMigration(); - case MIGRATE -> devCenter.runLastMigration(); - case DUMP_SCHEMA -> devCenter.dumpSchema(true); - default -> throw new IllegalArgumentException("Unexpected command: " + args[1]); - } - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java deleted file mode 100644 index dedbf4856e922..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.development; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import org.apache.commons.io.FileUtils; -import org.flywaydb.core.api.ClassProvider; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.MigrationVersion; -import org.flywaydb.core.api.configuration.Configuration; -import org.flywaydb.core.api.migration.JavaMigration; -import org.flywaydb.core.api.output.MigrateResult; -import org.flywaydb.core.api.resolver.ResolvedMigration; -import org.flywaydb.core.internal.resolver.java.ScanningJavaMigrationResolver; -import org.flywaydb.core.internal.scanner.LocationScannerCache; -import org.flywaydb.core.internal.scanner.ResourceNameCache; -import org.flywaydb.core.internal.scanner.Scanner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MigrationDevHelper { - - private static final Logger LOGGER = LoggerFactory.getLogger(MigrationDevHelper.class); - - /** - * This method is used for migration development. Run it to see how your migration changes the - * database schema. - */ - public static void runLastMigration(final DevDatabaseMigrator migrator) throws IOException { - migrator.createBaseline(); - - final List preMigrationInfoList = migrator.list(); - LOGGER.info("\n==== Pre Migration Info ====\n" + FlywayFormatter.formatMigrationInfoList(preMigrationInfoList)); - LOGGER.info("\n==== Pre Migration Schema ====\n" + migrator.dumpSchema() + "\n"); - - final MigrateResult migrateResult = migrator.migrate(); - LOGGER.info("\n==== Migration Result ====\n" + FlywayFormatter.formatMigrationResult(migrateResult)); - - final List postMigrationInfoList = migrator.list(); - LOGGER.info("\n==== Post Migration Info ====\n" + FlywayFormatter.formatMigrationInfoList(postMigrationInfoList)); - LOGGER.info("\n==== Post Migration Schema ====\n" + migrator.dumpSchema() + "\n"); - } - - public static void createNextMigrationFile(final String dbIdentifier, final FlywayDatabaseMigrator migrator) throws IOException { - final String description = "New_migration"; - - final MigrationVersion nextMigrationVersion = getNextMigrationVersion(migrator); - final String versionId = nextMigrationVersion.toString().replaceAll("\\.", "_"); - - final String template = MoreResources.readResource("migration_template.txt"); - final String newMigration = template.replace("", dbIdentifier) - .replaceAll("", versionId) - .replaceAll("", description) - .strip(); - - final String fileName = String.format("V%s__%s.java", versionId, description); - final String filePath = String.format("src/main/java/io/airbyte/db/instance/%s/migrations/%s", dbIdentifier, fileName); - - LOGGER.info("\n==== New Migration File ====\n" + filePath); - - final File file = new File(Path.of(filePath).toUri()); - FileUtils.forceMkdirParent(file); - - try (final PrintWriter writer = new PrintWriter(file, StandardCharsets.UTF_8)) { - writer.println(newMigration); - } catch (final FileNotFoundException e) { - throw new IOException(e); - } - } - - public static Optional getSecondToLastMigrationVersion(final FlywayDatabaseMigrator migrator) { - final List migrations = getAllMigrations(migrator); - if (migrations.isEmpty() || migrations.size() == 1) { - return Optional.empty(); - } - return Optional.of(migrations.get(migrations.size() - 2).getVersion()); - } - - public static void dumpSchema(final String schema, final String schemaDumpFile, final boolean printSchema) throws IOException { - try (final PrintWriter writer = new PrintWriter(new File(Path.of(schemaDumpFile).toUri()), StandardCharsets.UTF_8)) { - writer.println(schema); - if (printSchema) { - LOGGER.info("\n==== Schema ====\n" + schema); - LOGGER.info("\n==== Dump File ====\nThe schema has been written to: " + schemaDumpFile); - } - } catch (final FileNotFoundException e) { - throw new IOException(e); - } - } - - /** - * This method is for migration development and testing purposes. So it is not exposed on the - * interface. Reference: - * https://github.com/flyway/flyway/blob/master/flyway-core/src/main/java/org/flywaydb/core/Flyway.java#L621. - */ - private static List getAllMigrations(final FlywayDatabaseMigrator migrator) { - final Configuration configuration = migrator.getFlyway().getConfiguration(); - final ClassProvider scanner = new Scanner<>( - JavaMigration.class, - Arrays.asList(configuration.getLocations()), - configuration.getClassLoader(), - configuration.getEncoding(), - configuration.getDetectEncoding(), - false, - new ResourceNameCache(), - new LocationScannerCache(), - configuration.getFailOnMissingLocations()); - final ScanningJavaMigrationResolver resolver = new ScanningJavaMigrationResolver(scanner, configuration); - return resolver.resolveMigrations(() -> configuration).stream() - // There may be duplicated migration from the resolver. - .distinct() - .collect(Collectors.toList()); - } - - private static Optional getLastMigrationVersion(final FlywayDatabaseMigrator migrator) { - final List migrations = getAllMigrations(migrator); - if (migrations.isEmpty()) { - return Optional.empty(); - } - return Optional.of(migrations.get(migrations.size() - 1).getVersion()); - } - - @VisibleForTesting - static AirbyteVersion getCurrentAirbyteVersion() { - try (final BufferedReader reader = new BufferedReader(new FileReader("../../gradle.properties", StandardCharsets.UTF_8))) { - String line = reader.readLine(); - while (line != null) { - if (line.startsWith("VERSION")) { - return new AirbyteVersion(line.split("=")[1]); - } - line = reader.readLine(); - } - } catch (final FileNotFoundException e) { - throw new IllegalStateException("Cannot find the gradle.properties file", e); - } catch (final IOException e) { - throw new RuntimeException(e); - } - throw new IllegalStateException("Cannot find the gradle.properties file"); - } - - /** - * Turn a migration version to airbyte version and drop the migration id. E.g. "0.29.10.004" -> - * "0.29.10". - */ - @VisibleForTesting - static AirbyteVersion getAirbyteVersion(final MigrationVersion version) { - final String[] splits = version.getVersion().split("\\."); - return new AirbyteVersion(splits[0], splits[1], splits[2]); - } - - /** - * Extract the major, minor, and patch version and join them with underscore. E.g. "0.29.10-alpha" - * -> "0_29_10", - */ - @VisibleForTesting - static String formatAirbyteVersion(final AirbyteVersion version) { - return String.format("%s_%s_%s", version.getMajorVersion(), version.getMinorVersion(), version.getPatchVersion()); - } - - /** - * Extract the migration id. E.g. "0.29.10.001" -> "001". - */ - @VisibleForTesting - static String getMigrationId(final MigrationVersion version) { - return version.getVersion().split("\\.")[3]; - } - - private static MigrationVersion getNextMigrationVersion(final FlywayDatabaseMigrator migrator) { - final Optional lastMigrationVersion = getLastMigrationVersion(migrator); - final AirbyteVersion currentAirbyteVersion = getCurrentAirbyteVersion(); - return getNextMigrationVersion(currentAirbyteVersion, lastMigrationVersion); - } - - @VisibleForTesting - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") - static MigrationVersion getNextMigrationVersion(final AirbyteVersion currentAirbyteVersion, final Optional lastMigrationVersion) { - // When there is no migration, use the current airbyte version. - if (lastMigrationVersion.isEmpty()) { - LOGGER.info("No migration exists. Use the current airbyte version {}", currentAirbyteVersion); - return MigrationVersion.fromVersion(String.format("%s_001", formatAirbyteVersion(currentAirbyteVersion))); - } - - // When the current airbyte version is greater, use the airbyte version. - final MigrationVersion migrationVersion = lastMigrationVersion.get(); - final AirbyteVersion migrationAirbyteVersion = getAirbyteVersion(migrationVersion); - if (currentAirbyteVersion.patchVersionCompareTo(migrationAirbyteVersion) > 0) { - LOGGER.info( - "Use the current airbyte version ({}), since it is greater than the last migration version ({})", - currentAirbyteVersion, - migrationAirbyteVersion); - return MigrationVersion.fromVersion(String.format("%s_001", formatAirbyteVersion(currentAirbyteVersion))); - } - - // When the last migration version is greater, which usually does not happen, use the migration - // version. - LOGGER.info( - "Use the last migration version ({}), since it is greater than or equal to the current airbyte version ({})", - migrationAirbyteVersion, - currentAirbyteVersion); - final String lastMigrationId = getMigrationId(migrationVersion); - LOGGER.info("lastMigrationId: " + lastMigrationId); - final String nextMigrationId = String.format("%03d", Integer.parseInt(lastMigrationId) + 1); - LOGGER.info("nextMigrationId: " + nextMigrationId); - return MigrationVersion.fromVersion(String.format("%s_%s", migrationAirbyteVersion.serialize(), nextMigrationId)); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenter.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenter.java deleted file mode 100644 index 86432909ec8e0..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import io.airbyte.db.instance.development.MigrationDevCenter; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; - -/** - * Helper class for migration development. See README for details. - */ -public class JobsDatabaseMigrationDevCenter extends MigrationDevCenter { - - public JobsDatabaseMigrationDevCenter() { - super("jobs", DatabaseConstants.JOBS_SCHEMA_DUMP_PATH, DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - } - - @Override - protected FlywayDatabaseMigrator getMigrator(final Database database, final Flyway flyway) { - return new JobsDatabaseMigrator(database, flyway); - } - - @Override - protected Flyway getFlyway(final DataSource dataSource) { - return FlywayFactory.create(dataSource, getClass().getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, - JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrator.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrator.java deleted file mode 100644 index f4eea0c33a619..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrator.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import io.airbyte.db.Database; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import org.flywaydb.core.Flyway; - -public class JobsDatabaseMigrator extends FlywayDatabaseMigrator { - - public static final String DB_IDENTIFIER = "jobs"; - public static final String MIGRATION_FILE_LOCATION = "classpath:io/airbyte/db/instance/jobs/migrations"; - - public JobsDatabaseMigrator(final Database database, final Flyway flyway) { - super(database, flyway); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseSchema.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseSchema.java deleted file mode 100644 index 238b13bb08c71..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseSchema.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.JsonSchemas; -import io.airbyte.db.instance.TableSchema; -import io.airbyte.validation.json.JsonSchemaValidator; -import java.io.File; -import java.nio.file.Path; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Whenever a new table is created in the Job Airbyte Database, we should also add a corresponding - * yaml file to validate the content of the table when it is exported/imported in files. - * - * This enum maps the table names to the yaml file where the Json Schema is stored. - */ -public enum JobsDatabaseSchema implements TableSchema { - - ATTEMPTS("Attempts.yaml"), - JOBS("Jobs.yaml"), - AIRBYTE_METADATA("AirbyteMetadata.yaml"); - - static final Path SCHEMAS_ROOT = JsonSchemas.prepareSchemas("jobs_database", JobsDatabaseSchema.class); - - private final String schemaFilename; - - JobsDatabaseSchema(final String schemaFilename) { - this.schemaFilename = schemaFilename; - } - - @Override - public String getTableName() { - return name().toLowerCase(); - } - - @Override - public JsonNode getTableDefinition() { - final File schemaFile = SCHEMAS_ROOT.resolve(schemaFilename).toFile(); - return JsonSchemaValidator.getSchema(schemaFile); - } - - /** - * @return table names in lower case - */ - public static Set getTableNames() { - return Stream.of(JobsDatabaseSchema.values()).map(JobsDatabaseSchema::getTableName).collect(Collectors.toSet()); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseTestProvider.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseTestProvider.java deleted file mode 100644 index 52fef99676441..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseTestProvider.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Database; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.test.TestDatabaseProvider; -import java.io.IOException; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -public class JobsDatabaseTestProvider implements TestDatabaseProvider { - - private final DSLContext dslContext; - private final Flyway flyway; - - public JobsDatabaseTestProvider(final DSLContext dslContext, final Flyway flyway) { - this.dslContext = dslContext; - this.flyway = flyway; - } - - @Override - public Database create(final boolean runMigration) throws IOException, DatabaseInitializationException { - final String initialSchema = MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - DatabaseCheckFactory.createJobsDatabaseInitializer(dslContext, DatabaseConstants.DEFAULT_CONNECTION_TIMEOUT_MS, initialSchema).initialize(); - - final Database jobsDatabase = new Database(dslContext); - - if (runMigration) { - final DatabaseMigrator migrator = new JobsDatabaseMigrator( - jobsDatabase, flyway); - migrator.createBaseline(); - migrator.migrate(); - } - - return jobsDatabase; - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsFlywayMigrationDatabase.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsFlywayMigrationDatabase.java deleted file mode 100644 index 56c62cb80a43f..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/JobsFlywayMigrationDatabase.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Database; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.FlywayMigrationDatabase; -import java.io.IOException; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -/** - * Jobs database for jOOQ code generation. - */ -public class JobsFlywayMigrationDatabase extends FlywayMigrationDatabase { - - @Override - protected Database getDatabase(final DSLContext dslContext) { - return new Database(dslContext); - } - - @Override - protected DatabaseMigrator getDatabaseMigrator(final Database database, final Flyway flyway) { - return new JobsDatabaseMigrator(database, flyway); - } - - @Override - protected String getInstalledBy() { - return JobsFlywayMigrationDatabase.class.getSimpleName(); - } - - @Override - protected String getDbIdentifier() { - return JobsDatabaseMigrator.DB_IDENTIFIER; - } - - @Override - protected String[] getMigrationFileLocations() { - return new String[] {JobsDatabaseMigrator.MIGRATION_FILE_LOCATION}; - } - - @Override - protected void initializeDatabase(final DSLContext dslContext) throws DatabaseInitializationException, IOException { - final String initialSchema = MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - DatabaseCheckFactory.createJobsDatabaseInitializer(dslContext, DatabaseConstants.DEFAULT_CONNECTION_TIMEOUT_MS, initialSchema).initialize(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.java deleted file mode 100644 index 0e0969c980a36..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - ctx.alterTable("attempts") - .addColumnIfNotExists(DSL.field("temporal_workflow_id", SQLDataType.VARCHAR(256).nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001__MigrateFailureReasonEnumValues.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001__MigrateFailureReasonEnumValues.java deleted file mode 100644 index 82dcb7ef0fea5..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001__MigrateFailureReasonEnumValues.java +++ /dev/null @@ -1,413 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.Metadata; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings({"PMD.AvoidLiteralsInIfCondition", "PMD.CompareObjectsWithEquals"}) -public class V0_35_40_001__MigrateFailureReasonEnumValues extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_40_001__MigrateFailureReasonEnumValues.class); - private static final String NULL = ""; - - @VisibleForTesting - static String OLD_MANUAL_CANCELLATION = "manualCancellation"; - static String NEW_MANUAL_CANCELLATION = "manual_cancellation"; - static String OLD_SYSTEM_ERROR = "systemError"; - static String NEW_SYSTEM_ERROR = "system_error"; - static String OLD_CONFIG_ERROR = "configError"; - static String NEW_CONFIG_ERROR = "config_error"; - static String OLD_REPLICATION_ORIGIN = "replicationWorker"; - static String NEW_REPLICATION_ORIGIN = "replication"; - static String OLD_UNKNOWN = "unknown"; - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - updateRecordsWithNewEnumValues(ctx); - } - - /** - * Finds all attempt record that have a failure summary containing a deprecated enum value. For each - * record, calls method to fix and update. - */ - static void updateRecordsWithNewEnumValues(final DSLContext ctx) { - final Result results = - ctx.fetch(String.format(""" - SELECT A.* FROM attempts A, jsonb_array_elements(A.failure_summary->'failures') as f - WHERE f->>'failureOrigin' = '%s' - OR f->>'failureOrigin' = '%s' - OR f->>'failureType' = '%s' - OR f->>'failureType' = '%s' - OR f->>'failureType' = '%s' - OR f->>'failureType' = '%s' - """, OLD_UNKNOWN, OLD_REPLICATION_ORIGIN, OLD_UNKNOWN, OLD_CONFIG_ERROR, OLD_SYSTEM_ERROR, OLD_MANUAL_CANCELLATION)); - results.forEach(record -> updateAttemptFailureReasons(ctx, record)); - } - - /** - * Takes in a single record from the above query and performs an UPDATE to set the failure summary - * to the fixed version. - */ - private static void updateAttemptFailureReasons(final DSLContext ctx, final Record record) { - final Field attemptIdField = DSL.field("id", SQLDataType.BIGINT); - final Field failureSummaryField = DSL.field("failure_summary", SQLDataType.JSONB.nullable(true)); - - final Long attemptId = record.get(attemptIdField); - final AttemptFailureSummaryForMigration oldFailureSummary = Jsons.deserialize( - record.get(failureSummaryField).data(), - AttemptFailureSummaryForMigration.class); - - final AttemptFailureSummaryForMigration fixedFailureSummary = getFixedAttemptFailureSummary(oldFailureSummary); - - ctx.update(DSL.table("attempts")) - .set(failureSummaryField, JSONB.valueOf(Jsons.serialize(fixedFailureSummary))) - .where(attemptIdField.eq(attemptId)) - .execute(); - } - - /** - * Takes in a FailureSummary and replaces deprecated enum values with their updated versions. - */ - private static AttemptFailureSummaryForMigration getFixedAttemptFailureSummary(final AttemptFailureSummaryForMigration failureSummary) { - final Map oldFailureTypeToFixedFailureType = ImmutableMap.of( - OLD_MANUAL_CANCELLATION, NEW_MANUAL_CANCELLATION, - OLD_SYSTEM_ERROR, NEW_SYSTEM_ERROR, - OLD_CONFIG_ERROR, NEW_CONFIG_ERROR); - - final Map oldFailureOriginToFixedFailureOrigin = ImmutableMap.of( - OLD_REPLICATION_ORIGIN, NEW_REPLICATION_ORIGIN); - - final List fixedFailureReasons = new ArrayList<>(); - - failureSummary.getFailures().stream().forEach(failureReason -> { - final String failureType = failureReason.getFailureType(); - final String failureOrigin = failureReason.getFailureOrigin(); - - // null failureType is valid and doesn't need correction - if (failureType != null) { - if (oldFailureTypeToFixedFailureType.containsKey(failureType)) { - failureReason.setFailureType(oldFailureTypeToFixedFailureType.get(failureType)); - } else if (failureType.equals(OLD_UNKNOWN)) { - failureReason.setFailureType(null); - } - } - - // null failureOrigin is valid and doesn't need correction - if (failureOrigin != null) { - if (oldFailureOriginToFixedFailureOrigin.containsKey(failureOrigin)) { - failureReason.setFailureOrigin(oldFailureOriginToFixedFailureOrigin.get(failureOrigin)); - } else if (failureOrigin.equals(OLD_UNKNOWN)) { - failureReason.setFailureOrigin(null); - } - } - - fixedFailureReasons.add(failureReason); - }); - - failureSummary.setFailures(fixedFailureReasons); - return failureSummary; - } - - /** - * The following classes are essentially a copy of the FailureReason and AttemptFailureSummary - * classes at the time of this migration. They support both deprecated and new enum values and are - * used for record deserialization in this migration because in the future, the real FailureReason - * class will have those deprecated enum values removed, which would break deserialization within - * this migration. - */ - - static class FailureReasonForMigration implements Serializable { - - private String failureOrigin; - private String failureType; - private String internalMessage; - private String externalMessage; - private Metadata metadata; - private String stacktrace; - private Boolean retryable; - private Long timestamp; - private final static long serialVersionUID = -1485119682657564218L; - - public String getFailureOrigin() { - return failureOrigin; - } - - public void setFailureOrigin(final String failureOrigin) { - this.failureOrigin = failureOrigin; - } - - public FailureReasonForMigration withFailureOrigin(final String failureOrigin) { - this.failureOrigin = failureOrigin; - return this; - } - - public String getFailureType() { - return failureType; - } - - public void setFailureType(final String failureType) { - this.failureType = failureType; - } - - public FailureReasonForMigration withFailureType(final String failureType) { - this.failureType = failureType; - return this; - } - - public String getInternalMessage() { - return internalMessage; - } - - public void setInternalMessage(final String internalMessage) { - this.internalMessage = internalMessage; - } - - public FailureReasonForMigration withInternalMessage(final String internalMessage) { - this.internalMessage = internalMessage; - return this; - } - - public String getExternalMessage() { - return externalMessage; - } - - public void setExternalMessage(final String externalMessage) { - this.externalMessage = externalMessage; - } - - public FailureReasonForMigration withExternalMessage(final String externalMessage) { - this.externalMessage = externalMessage; - return this; - } - - public Metadata getMetadata() { - return metadata; - } - - public void setMetadata(final Metadata metadata) { - this.metadata = metadata; - } - - public FailureReasonForMigration withMetadata(final Metadata metadata) { - this.metadata = metadata; - return this; - } - - public String getStacktrace() { - return stacktrace; - } - - public void setStacktrace(final String stacktrace) { - this.stacktrace = stacktrace; - } - - public FailureReasonForMigration withStacktrace(final String stacktrace) { - this.stacktrace = stacktrace; - return this; - } - - public Boolean getRetryable() { - return retryable; - } - - public void setRetryable(final Boolean retryable) { - this.retryable = retryable; - } - - public FailureReasonForMigration withRetryable(final Boolean retryable) { - this.retryable = retryable; - return this; - } - - public Long getTimestamp() { - return timestamp; - } - - public void setTimestamp(final Long timestamp) { - this.timestamp = timestamp; - } - - public FailureReasonForMigration withTimestamp(final Long timestamp) { - this.timestamp = timestamp; - return this; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append(FailureReasonForMigration.class.getName()).append('@').append(Integer.toHexString(System.identityHashCode(this))).append('['); - sb.append("failureOrigin"); - sb.append('='); - sb.append(((this.failureOrigin == null) ? NULL : this.failureOrigin)); - sb.append(','); - sb.append("failureType"); - sb.append('='); - sb.append(((this.failureType == null) ? NULL : this.failureType)); - sb.append(','); - sb.append("internalMessage"); - sb.append('='); - sb.append(((this.internalMessage == null) ? NULL : this.internalMessage)); - sb.append(','); - sb.append("externalMessage"); - sb.append('='); - sb.append(((this.externalMessage == null) ? NULL : this.externalMessage)); - sb.append(','); - sb.append("metadata"); - sb.append('='); - sb.append(((this.metadata == null) ? NULL : this.metadata)); - sb.append(','); - sb.append("stacktrace"); - sb.append('='); - sb.append(((this.stacktrace == null) ? NULL : this.stacktrace)); - sb.append(','); - sb.append("retryable"); - sb.append('='); - sb.append(((this.retryable == null) ? NULL : this.retryable)); - sb.append(','); - sb.append("timestamp"); - sb.append('='); - sb.append(((this.timestamp == null) ? NULL : this.timestamp)); - sb.append(','); - if (sb.charAt((sb.length() - 1)) == ',') { - sb.setCharAt((sb.length() - 1), ']'); - } else { - sb.append(']'); - } - return sb.toString(); - } - - @Override - public int hashCode() { - int result = 1; - result = ((result * 31) + ((this.retryable == null) ? 0 : this.retryable.hashCode())); - result = ((result * 31) + ((this.metadata == null) ? 0 : this.metadata.hashCode())); - result = ((result * 31) + ((this.stacktrace == null) ? 0 : this.stacktrace.hashCode())); - result = ((result * 31) + ((this.failureOrigin == null) ? 0 : this.failureOrigin.hashCode())); - result = ((result * 31) + ((this.failureType == null) ? 0 : this.failureType.hashCode())); - result = ((result * 31) + ((this.internalMessage == null) ? 0 : this.internalMessage.hashCode())); - result = ((result * 31) + ((this.externalMessage == null) ? 0 : this.externalMessage.hashCode())); - result = ((result * 31) + ((this.timestamp == null) ? 0 : this.timestamp.hashCode())); - return result; - } - - @Override - public boolean equals(final Object other) { - if (other == this) { - return true; - } - if ((other instanceof FailureReasonForMigration) == false) { - return false; - } - final FailureReasonForMigration rhs = ((FailureReasonForMigration) other); - return ((((((((Objects.equals(this.retryable, rhs.retryable)) - && (Objects.equals(this.metadata, rhs.metadata))) - && (Objects.equals(this.stacktrace, rhs.stacktrace))) - && (Objects.equals(this.failureOrigin, rhs.failureOrigin))) - && (Objects.equals(this.failureType, rhs.failureType))) - && (Objects.equals(this.internalMessage, rhs.internalMessage))) - && (Objects.equals(this.externalMessage, rhs.externalMessage))) - && (Objects.equals(this.timestamp, rhs.timestamp))); - } - - } - - static class AttemptFailureSummaryForMigration implements Serializable { - - private List failures = new ArrayList<>(); - private Boolean partialSuccess; - private final static long serialVersionUID = -9065693637249217586L; - - public List getFailures() { - return failures; - } - - public void setFailures(final List failures) { - this.failures = failures; - } - - public AttemptFailureSummaryForMigration withFailures(final List failures) { - this.failures = failures; - return this; - } - - public Boolean getPartialSuccess() { - return partialSuccess; - } - - public void setPartialSuccess(final Boolean partialSuccess) { - this.partialSuccess = partialSuccess; - } - - public AttemptFailureSummaryForMigration withPartialSuccess(final Boolean partialSuccess) { - this.partialSuccess = partialSuccess; - return this; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append(AttemptFailureSummaryForMigration.class.getName()).append('@').append(Integer.toHexString(System.identityHashCode(this))).append('['); - sb.append("failures"); - sb.append('='); - sb.append(((this.failures == null) ? NULL : this.failures)); - sb.append(','); - sb.append("partialSuccess"); - sb.append('='); - sb.append(((this.partialSuccess == null) ? NULL : this.partialSuccess)); - sb.append(','); - if (sb.charAt((sb.length() - 1)) == ',') { - sb.setCharAt((sb.length() - 1), ']'); - } else { - sb.append(']'); - } - return sb.toString(); - } - - @Override - public int hashCode() { - int result = 1; - result = ((result * 31) + ((this.partialSuccess == null) ? 0 : this.partialSuccess.hashCode())); - result = ((result * 31) + ((this.failures == null) ? 0 : this.failures.hashCode())); - return result; - } - - @Override - public boolean equals(final Object other) { - if (other == this) { - return true; - } - if ((other instanceof AttemptFailureSummaryForMigration) == false) { - return false; - } - final AttemptFailureSummaryForMigration rhs = ((AttemptFailureSummaryForMigration) other); - return (((this.partialSuccess == rhs.partialSuccess) || ((this.partialSuccess != null) && this.partialSuccess.equals(rhs.partialSuccess))) - && ((this.failures == rhs.failures) || ((this.failures != null) && this.failures.equals(rhs.failures)))); - } - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_Attempts.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_Attempts.java deleted file mode 100644 index 7a9d005553100..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_Attempts.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import com.google.common.annotations.VisibleForTesting; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_5_001__Add_failureSummary_col_to_Attempts extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_5_001__Add_failureSummary_col_to_Attempts.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - final DSLContext ctx = DSL.using(context.getConnection()); - migrate(ctx); - } - - @VisibleForTesting - public static void migrate(final DSLContext ctx) { - addFailureSummaryColumn(ctx); - } - - public static void addFailureSummaryColumn(final DSLContext ctx) { - ctx.alterTable("attempts") - .addColumnIfNotExists(DSL.field("failure_summary", SQLDataType.JSONB.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_62_001__AddJobIndices.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_62_001__AddJobIndices.java deleted file mode 100644 index 467c74d2d0a06..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_62_001__AddJobIndices.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_35_62_001__AddJobIndices extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_62_001__AddJobIndices.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - try (final DSLContext ctx = DSL.using(context.getConnection())) { - ctx.createIndexIfNotExists("jobs_config_type_idx").on("jobs", "config_type").execute(); - ctx.createIndexIfNotExists("jobs_scope_idx").on("jobs", "scope").execute(); - } - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_14_001__AddProcessingTaskQueueInAttempts.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_14_001__AddProcessingTaskQueueInAttempts.java deleted file mode 100644 index 31da1e965a6fd..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_14_001__AddProcessingTaskQueueInAttempts.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// TODO: update migration description in the class name -public class V0_40_14_001__AddProcessingTaskQueueInAttempts extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger( - V0_40_14_001__AddProcessingTaskQueueInAttempts.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - addProtocolVersionColumn(ctx); - } - - private static void addProtocolVersionColumn(final DSLContext ctx) { - ctx.alterTable("attempts") - .addColumnIfNotExists(DSL.field( - "processing_task_queue", - SQLDataType.VARCHAR(255).nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_001__AddIndexToAttemptsAndJobsStatus.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_001__AddIndexToAttemptsAndJobsStatus.java deleted file mode 100644 index 0e585dba669e2..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_001__AddIndexToAttemptsAndJobsStatus.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_18_001__AddIndexToAttemptsAndJobsStatus extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_001__AddIndexToAttemptsAndJobsStatus.class); - private static final String ATTEMPTS_TABLE = "attempts"; - private static final String JOBS_TABLE = "jobs"; - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - try (final DSLContext ctx = DSL.using(context.getConnection())) { - ctx.createIndexIfNotExists("attempts_status_idx").on(ATTEMPTS_TABLE, "status").execute(); - ctx.createIndexIfNotExists("jobs_status_idx").on(JOBS_TABLE, "status").execute(); - } - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_002__AddProgressBarStats.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_002__AddProgressBarStats.java deleted file mode 100644 index 4d008317733ba..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_002__AddProgressBarStats.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import static org.jooq.impl.DSL.currentOffsetDateTime; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.foreignKey; -import static org.jooq.impl.DSL.primaryKey; -import static org.jooq.impl.DSL.unique; - -import java.time.OffsetDateTime; -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * The estimated columns contains the overall estimated records and bytes for an attempt. - *

    - * The new stream_stats table contains the estimated and emitted records/bytes for an attempt at the - * per-stream level. This lets us track per-stream stats as an attempt is in progress. - */ -public class V0_40_18_002__AddProgressBarStats extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_002__AddProgressBarStats.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - try (final DSLContext ctx = DSL.using(context.getConnection())) { - addEstimatedColumnsToSyncStats(ctx); - addStreamStatsTable(ctx); - } - } - - private static void addEstimatedColumnsToSyncStats(final DSLContext ctx) { - ctx.alterTable("sync_stats") - .add( - field("estimated_records", SQLDataType.BIGINT.nullable(true)), - field("estimated_bytes", SQLDataType.BIGINT.nullable(true))) - .execute(); - } - - private static void addStreamStatsTable(final DSLContext ctx) { - // Metadata Columns - final Field id = field("id", SQLDataType.UUID.nullable(false)); - final Field attemptId = field("attempt_id", SQLDataType.INTEGER.nullable(false)); - final Field streamNamespace = field("stream_namespace", SQLDataType.VARCHAR.nullable(false)); - final Field streamName = field("stream_name", SQLDataType.VARCHAR.nullable(false)); - - // Stats Columns - final Field recordsEmitted = field("records_emitted", SQLDataType.BIGINT.nullable(true)); - final Field bytesEmitted = field("bytes_emitted", SQLDataType.BIGINT.nullable(true)); - final Field estimatedRecords = field("estimated_records", SQLDataType.BIGINT.nullable(true)); - final Field estimatedBytes = field("estimated_bytes", SQLDataType.BIGINT.nullable(true)); - - // Time Columns - final Field createdAt = - field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("stream_stats") - .columns( - id, attemptId, streamNamespace, streamName, recordsEmitted, bytesEmitted, estimatedRecords, estimatedBytes, createdAt, updatedAt) - .constraints( - primaryKey(id), - foreignKey(attemptId).references("attempts", "id").onDeleteCascade(), - // Prevent duplicate stat records of the same stream and attempt. - unique("attempt_id", "stream_name")) - .execute(); - - // Create an index on attempt_id, since all read queries on this table as of this migration will be - // WHERE clauses on the attempt id. - ctx.createIndex("index").on("stream_stats", "attempt_id").execute(); - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_26_001__CorrectStreamStatsTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_26_001__CorrectStreamStatsTable.java deleted file mode 100644 index c9e0898d34426..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_26_001__CorrectStreamStatsTable.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import static org.jooq.impl.DSL.constraint; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_26_001__CorrectStreamStatsTable extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_26_001__CorrectStreamStatsTable.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - try (final DSLContext ctx = DSL.using(context.getConnection())) { - // This actually needs to be bigint to match the id column on the attempts table. - String streamStats = "stream_stats"; - ctx.alterTable(streamStats).alter("attempt_id").set(SQLDataType.BIGINT.nullable(false)).execute(); - // Not all streams provide a namespace. - ctx.alterTable(streamStats).alter("stream_namespace").set(SQLDataType.VARCHAR.nullable(true)).execute(); - - // The constraint should also take into account the stream namespace. Drop the constraint and - // recreate it. - ctx.alterTable(streamStats).dropUnique("stream_stats_attempt_id_stream_name_key").execute(); - ctx.alterTable(streamStats).add(constraint("uniq_stream_attempt").unique("attempt_id", "stream_name", "stream_namespace")).execute(); - } - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_28_001__AddAttemptSyncConfig.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_28_001__AddAttemptSyncConfig.java deleted file mode 100644 index e07db90c5a237..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_28_001__AddAttemptSyncConfig.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_28_001__AddAttemptSyncConfig extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_28_001__AddAttemptSyncConfig.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - try (final DSLContext ctx = DSL.using(context.getConnection())) { - addAttemptSyncConfigToAttempts(ctx); - } - } - - private static void addAttemptSyncConfigToAttempts(final DSLContext ctx) { - ctx.alterTable("attempts") - .addColumnIfNotExists(DSL.field( - "attempt_sync_config", - SQLDataType.JSONB.nullable(true))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_3_001__CreateSyncStats.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_3_001__CreateSyncStats.java deleted file mode 100644 index ac232d934de93..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_3_001__CreateSyncStats.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import static org.jooq.impl.DSL.currentOffsetDateTime; -import static org.jooq.impl.DSL.foreignKey; -import static org.jooq.impl.DSL.primaryKey; - -import java.time.OffsetDateTime; -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_3_001__CreateSyncStats extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_3_001__CreateSyncStats.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - final DSLContext ctx = DSL.using(context.getConnection()); - createSyncStatsTable(ctx); - } - - private static void createSyncStatsTable(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field attemptId = DSL.field("attempt_id", SQLDataType.INTEGER.nullable(false)); - final Field recordsEmitted = DSL.field("records_emitted", SQLDataType.BIGINT.nullable(true)); - final Field bytesEmitted = DSL.field("bytes_emitted", SQLDataType.BIGINT.nullable(true)); - final Field sourceStateMessagesEmitted = DSL.field("source_state_messages_emitted", SQLDataType.BIGINT.nullable(true)); - final Field destinationStateMessagesEmitted = DSL.field("destination_state_messages_emitted", SQLDataType.BIGINT.nullable(true)); - final Field recordsCommitted = DSL.field("records_committed", SQLDataType.BIGINT.nullable(true)); - final Field meanSecondsBeforeSourceStateMessageEmitted = - DSL.field("mean_seconds_before_source_state_message_emitted", SQLDataType.BIGINT.nullable(true)); - final Field maxSecondsBeforeSourceStateMessageEmitted = - DSL.field("max_seconds_before_source_state_message_emitted", SQLDataType.BIGINT.nullable(true)); - final Field meanSecondsBetweenStateMessageEmittedandCommitted = - DSL.field("mean_seconds_between_state_message_emitted_and_committed", SQLDataType.BIGINT.nullable(true)); - final Field maxSecondsBetweenStateMessageEmittedandCommitted = - DSL.field("max_seconds_between_state_message_emitted_and_committed", SQLDataType.BIGINT.nullable(true)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("sync_stats") - .columns(id, attemptId, recordsEmitted, bytesEmitted, sourceStateMessagesEmitted, destinationStateMessagesEmitted, recordsCommitted, - meanSecondsBeforeSourceStateMessageEmitted, maxSecondsBeforeSourceStateMessageEmitted, meanSecondsBetweenStateMessageEmittedandCommitted, - maxSecondsBetweenStateMessageEmittedandCommitted, createdAt, updatedAt) - .constraints(primaryKey(id), foreignKey(attemptId).references("attempts", "id").onDeleteCascade()) - .execute(); - - ctx.createIndex("attempt_id_idx").on("sync_stats", "attempt_id").execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_4_001__ChangeSyncStatsForeignKey.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_4_001__ChangeSyncStatsForeignKey.java deleted file mode 100644 index df6b3b88477dd..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_4_001__ChangeSyncStatsForeignKey.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class V0_40_4_001__ChangeSyncStatsForeignKey extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_4_001__ChangeSyncStatsForeignKey.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - changeForeignKeyType(ctx); - } - - private void changeForeignKeyType(final DSLContext ctx) throws Exception { - ctx.alterTable("sync_stats").alter("attempt_id").set(SQLDataType.BIGINT.nullable(false)).execute(); - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_4_002__CreateNormalizationSummaries.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_4_002__CreateNormalizationSummaries.java deleted file mode 100644 index 1a34cdc7d031f..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_4_002__CreateNormalizationSummaries.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import static org.jooq.impl.DSL.currentOffsetDateTime; -import static org.jooq.impl.DSL.foreignKey; -import static org.jooq.impl.DSL.primaryKey; - -import java.time.OffsetDateTime; -import java.util.UUID; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// TODO: update migration description in the class name -public class V0_40_4_002__CreateNormalizationSummaries extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_4_002__CreateNormalizationSummaries.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - createNormalizationSummariesTable(ctx); - } - - private void createNormalizationSummariesTable(final DSLContext ctx) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field attemptId = DSL.field("attempt_id", SQLDataType.BIGINT.nullable(false)); - final Field startTime = DSL.field("start_time", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(true)); - final Field endTime = DSL.field("end_time", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(true)); - final Field failures = DSL.field("failures", SQLDataType.JSONB.nullable(true)); - final Field createdAt = - DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - final Field updatedAt = - DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); - - ctx.createTableIfNotExists("normalization_summaries") - .columns(id, attemptId, startTime, endTime, failures, createdAt, updatedAt) - .constraints(primaryKey(id), foreignKey(attemptId).references("attempts", "id").onDeleteCascade()) - .execute(); - - ctx.createIndex("normalization_summary_attempt_id_idx").on("normalization_summaries", "attempt_id").execute(); - - } - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProvider.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProvider.java deleted file mode 100644 index 12a19e836c559..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProvider.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.test; - -import io.airbyte.db.Database; -import io.airbyte.db.init.DatabaseInitializationException; -import java.io.IOException; - -/** - * Create mock database in unit tests. The implementation will be responsible for: 1) constructing - * and preparing the database, and 2) running the Flyway migration. - */ -public interface TestDatabaseProvider { - - /** - * @param runMigration Whether the mock database should run Flyway migration before it is used in - * unit test. Usually this parameter should be false only when the migration itself is being - * tested. - */ - Database create(final boolean runMigration) throws IOException, DatabaseInitializationException; - -} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProviders.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProviders.java deleted file mode 100644 index c33dde033f766..0000000000000 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProviders.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.test; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; -import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; -import io.airbyte.db.instance.jobs.JobsDatabaseMigrator; -import io.airbyte.db.instance.jobs.JobsDatabaseTestProvider; -import java.io.IOException; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -/** - * Use this class to create mock databases in unit tests. This class takes care of database - * initialization and migration. - */ -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") -public class TestDatabaseProviders { - - private final DataSource dataSource; - private final DSLContext dslContext; - private boolean runMigration = true; - - public TestDatabaseProviders(final DataSource dataSource, final DSLContext dslContext) { - this.dataSource = dataSource; - this.dslContext = dslContext; - } - - /** - * When creating mock databases in unit tests, migration should be run by default. Call this method - * to turn migration off, which is needed when unit testing migration code. - */ - public TestDatabaseProviders turnOffMigration() { - this.runMigration = false; - return this; - } - - public Database createNewConfigsDatabase() throws IOException, DatabaseInitializationException { - final Flyway flyway = FlywayFactory.create(dataSource, ConfigsDatabaseTestProvider.class.getSimpleName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - return new ConfigsDatabaseTestProvider(dslContext, flyway) - .create(runMigration); - } - - public Database createNewJobsDatabase() throws IOException, DatabaseInitializationException { - final Flyway flyway = FlywayFactory.create(dataSource, JobsDatabaseTestProvider.class.getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, - JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); - return new JobsDatabaseTestProvider(dslContext, flyway) - .create(runMigration); - } - -} diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/AirbyteConfigs.yaml b/airbyte-db/db-lib/src/main/resources/configs_database/AirbyteConfigs.yaml deleted file mode 100644 index d4ec38858899b..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/configs_database/AirbyteConfigs.yaml +++ /dev/null @@ -1,29 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -title: Jobs -description: representation of a jobs record as created in schema.sql -type: object -required: - - id - - config_id - - config_type - - config_blob - - created_at - - updated_at -additionalProperties: false -properties: - id: - type: number - config_id: - type: string - format: uuid - config_type: - type: string - config_blob: - type: object - created_at: - type: string - format: date-time - updated_at: - type: string - format: date-time diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/normalized_tables_schema.txt b/airbyte-db/db-lib/src/main/resources/configs_database/normalized_tables_schema.txt deleted file mode 100644 index 82533ef0e3971..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/configs_database/normalized_tables_schema.txt +++ /dev/null @@ -1,269 +0,0 @@ -// The file represents the schema of each of the config table after the normalization. -// It is created in https://github.com/airbytehq/airbyte/pull/8563. -// This schema is maintained manually by copying / pasting the output from Postgres' \d+ command. -// It is not consumed programmatically, and can be outdated. - - enum_schema | enum_name | enum_value --------------+---------------------------+------------------------------ - public | job_status | pending - public | job_status | running - public | job_status | incomplete - public | job_status | failed - public | job_status | succeeded - public | job_status | cancelled - public | attempt_status | running - public | attempt_status | failed - public | attempt_status | succeeded - public | job_config_type | check_connection_source - public | job_config_type | check_connection_destination - public | job_config_type | discover_schema - public | job_config_type | get_spec - public | job_config_type | sync - public | job_config_type | reset_connection - public | source_type | api - public | source_type | file - public | source_type | database - public | source_type | custom - public | actor_type | source - public | actor_type | destination - public | operator_type | normalization - public | operator_type | dbt - public | namespace_definition_type | source - public | namespace_definition_type | destination - public | namespace_definition_type | customformat - public | status_type | active - public | status_type | inactive - public | status_type | deprecated - public | release_stage | alpha - public | release_stage | beta - public | release_stage | generally_available - public | release_stage | custom - - - - - Table "public.workspace" - Column | Type | Collation | Nullable | Default ----------------------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - customer_id | uuid | | | - name | character varying(256) | | not null | - slug | character varying(256) | | not null | - email | character varying(256) | | | - initial_setup_complete | boolean | | not null | - anonymous_data_collection | boolean | | | - send_newsletter | boolean | | | - send_security_updates | boolean | | | - display_setup_wizard | boolean | | | - tombstone | boolean | | not null | false - notifications | jsonb | | | - first_sync_complete | boolean | | | - feedback_complete | boolean | | | - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "workspace_pkey" PRIMARY KEY, btree (id) -Referenced by: - TABLE "actor" CONSTRAINT "actor_workspace_id_fkey" FOREIGN KEY (workspace_id) REFERENCES workspace(id) ON DELETE CASCADE - TABLE "operation" CONSTRAINT "operation_workspace_id_fkey" FOREIGN KEY (workspace_id) REFERENCES workspace(id) ON DELETE CASCADE - - - - - Table "public.actor_definition" - Column | Type | Collation | Nullable | Default ---------------------------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - name | character varying(256) | | not null | - docker_repository | character varying(256) | | not null | - docker_image_tag | character varying(256) | | not null | - documentation_url | character varying(256) | | | - icon | character varying(256) | | | - actor_type | actor_type | | not null | - source_type | source_type | | | - spec | jsonb | | not null | - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - tombstone | boolean | | not null | false - release_stage | release_stage | | | - release_date | date | | | - resource_requirements | jsonb | | | - normalization_repository | character varying(255) | | | - normalization_tag | character varying(255) | | | - supports_dbt | boolean | | | - normalization_integration_type | character varying(255) | | | -Indexes: - "actor_definition_pkey" PRIMARY KEY, btree (id) -Referenced by: - TABLE "actor" CONSTRAINT "actor_actor_definition_id_fkey" FOREIGN KEY (actor_definition_id) REFERENCES actor_definition(id) ON DELETE CASCADE - TABLE "actor_oauth_parameter" CONSTRAINT "actor_oauth_parameter_actor_definition_id_fkey" FOREIGN KEY (actor_definition_id) REFERENCES actor_definition(id) ON DELETE CASCADE - - - - - Table "public.actor" - Column | Type | Collation | Nullable | Default ----------------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - workspace_id | uuid | | not null | - actor_definition_id | uuid | | not null | - name | character varying(256) | | not null | - configuration | jsonb | | not null | - actor_type | actor_type | | not null | - tombstone | boolean | | not null | false - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "actor_pkey" PRIMARY KEY, btree (id) - "actor_actor_definition_id_idx" btree (actor_definition_id) -Foreign-key constraints: - "actor_actor_definition_id_fkey" FOREIGN KEY (actor_definition_id) REFERENCES actor_definition(id) ON DELETE CASCADE - "actor_workspace_id_fkey" FOREIGN KEY (workspace_id) REFERENCES workspace(id) ON DELETE CASCADE -Referenced by: - TABLE "actor_catalog_fetch_event" CONSTRAINT "actor_catalog_fetch_event_actor_id_fkey" FOREIGN KEY (actor_id) REFERENCES actor(id) ON DELETE CASCADE - TABLE "connection" CONSTRAINT "connection_destination_id_fkey" FOREIGN KEY (destination_id) REFERENCES actor(id) ON DELETE CASCADE - TABLE "connection" CONSTRAINT "connection_source_id_fkey" FOREIGN KEY (source_id) REFERENCES actor(id) ON DELETE CASCADE - - - - - Table "public.actor_oauth_parameter" - Column | Type | Collation | Nullable | Default ----------------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - workspace_id | uuid | | | - actor_definition_id | uuid | | not null | - configuration | jsonb | | not null | - actor_type | actor_type | | not null | - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "actor_oauth_parameter_pkey" PRIMARY KEY, btree (id) -Foreign-key constraints: - "actor_oauth_parameter_actor_definition_id_fkey" FOREIGN KEY (actor_definition_id) REFERENCES actor_definition(id) ON DELETE CASCADE - - - - Table "public.operation" - Column | Type | Collation | Nullable | Default -------------------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - workspace_id | uuid | | not null | - name | character varying(256) | | not null | - operator_type | operator_type | | not null | - operator_normalization | jsonb | | | - operator_dbt | jsonb | | | - tombstone | boolean | | not null | false - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "operation_pkey" PRIMARY KEY, btree (id) -Foreign-key constraints: - "operation_workspace_id_fkey" FOREIGN KEY (workspace_id) REFERENCES workspace(id) ON DELETE CASCADE -Referenced by: - TABLE "connection_operation" CONSTRAINT "connection_operation_operation_id_fkey" FOREIGN KEY (operation_id) REFERENCES operation(id) ON DELETE CASCADE - - - - - Table "public.connection" - Column | Type | Collation | Nullable | Default ------------------------+---------------------------+-----------+----------+------------------- - id | uuid | | not null | - namespace_definition | namespace_definition_type | | not null | - namespace_format | character varying(256) | | | - prefix | character varying(256) | | | - source_id | uuid | | not null | - destination_id | uuid | | not null | - name | character varying(256) | | not null | - catalog | jsonb | | not null | - status | status_type | | | - schedule | jsonb | | | - manual | boolean | | not null | - resource_requirements | jsonb | | | - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - source_catalog_id | uuid | | | -Indexes: - "connection_pkey" PRIMARY KEY, btree (id) - "connection_destination_id_idx" btree (destination_id) - "connection_source_id_idx" btree (source_id) -Foreign-key constraints: - "connection_actor_catalog_id_fk" FOREIGN KEY (source_catalog_id) REFERENCES actor_catalog(id) ON DELETE CASCADE - "connection_destination_id_fkey" FOREIGN KEY (destination_id) REFERENCES actor(id) ON DELETE CASCADE - "connection_source_id_fkey" FOREIGN KEY (source_id) REFERENCES actor(id) ON DELETE CASCADE -Referenced by: - TABLE "connection_operation" CONSTRAINT "connection_operation_connection_id_fkey" FOREIGN KEY (connection_id) REFERENCES connection(id) ON DELETE CASCADE - TABLE "state" CONSTRAINT "state_connection_id_fkey" FOREIGN KEY (connection_id) REFERENCES connection(id) ON DELETE CASCADE - - - - - Table "public.connection_operation" - Column | Type | Collation | Nullable | Default ----------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - connection_id | uuid | | not null | - operation_id | uuid | | not null | - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "connection_operation_pkey" PRIMARY KEY, btree (id, connection_id, operation_id) -Foreign-key constraints: - "connection_operation_connection_id_fkey" FOREIGN KEY (connection_id) REFERENCES connection(id) ON DELETE CASCADE - "connection_operation_operation_id_fkey" FOREIGN KEY (operation_id) REFERENCES operation(id) ON DELETE CASCADE - - - - - Table "public.state" - Column | Type | Collation | Nullable | Default ----------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - connection_id | uuid | | not null | - state | jsonb | | | - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - updated_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "state_pkey" PRIMARY KEY, btree (id, connection_id) -Foreign-key constraints: - "state_connection_id_fkey" FOREIGN KEY (connection_id) REFERENCES connection(id) ON DELETE CASCADE - - - - - Table "public.actor_catalog" - Column | Type | Collation | Nullable | Default ---------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - catalog | jsonb | | not null | - catalog_hash | character varying(32) | | not null | - created_at | timestamp with time zone | | not null | - modified_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "actor_catalog_pkey" PRIMARY KEY, btree (id) - "actor_catalog_catalog_hash_id_idx" btree (catalog_hash) -Referenced by: - TABLE "actor_catalog_fetch_event" CONSTRAINT "actor_catalog_fetch_event_actor_catalog_id_fkey" FOREIGN KEY (actor_catalog_id) REFERENCES actor_catalog(id) ON DELETE CASCADE - TABLE "connection" CONSTRAINT "connection_actor_catalog_id_fk" FOREIGN KEY (source_catalog_id) REFERENCES actor_catalog(id) ON DELETE CASCADE - - - - - Table "public.actor_catalog_fetch_event" - Column | Type | Collation | Nullable | Default -------------------+--------------------------+-----------+----------+------------------- - id | uuid | | not null | - actor_catalog_id | uuid | | not null | - actor_id | uuid | | not null | - config_hash | character varying(32) | | not null | - actor_version | character varying(256) | | not null | - created_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP - modified_at | timestamp with time zone | | not null | CURRENT_TIMESTAMP -Indexes: - "actor_catalog_fetch_event_pkey" PRIMARY KEY, btree (id) - "actor_catalog_fetch_event_actor_catalog_id_idx" btree (actor_catalog_id) - "actor_catalog_fetch_event_actor_id_idx" btree (actor_id) -Foreign-key constraints: - "actor_catalog_fetch_event_actor_catalog_id_fkey" FOREIGN KEY (actor_catalog_id) REFERENCES actor_catalog(id) ON DELETE CASCADE - "actor_catalog_fetch_event_actor_id_fkey" FOREIGN KEY (actor_id) REFERENCES actor(id) ON DELETE CASCADE diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema.sql b/airbyte-db/db-lib/src/main/resources/configs_database/schema.sql deleted file mode 100644 index 4916dc0f61aa3..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema.sql +++ /dev/null @@ -1,23 +0,0 @@ --- tables -CREATE - TABLE - IF NOT EXISTS AIRBYTE_CONFIGS( - id BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - config_id VARCHAR(36) NOT NULL, - config_type VARCHAR(60) NOT NULL, - config_blob JSONB NOT NULL, - created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP - ); - --- indices -CREATE - UNIQUE INDEX IF NOT EXISTS airbyte_configs_type_id_idx ON - AIRBYTE_CONFIGS( - config_type, - config_id - ); - -CREATE - INDEX IF NOT EXISTS airbyte_configs_id_idx ON - AIRBYTE_CONFIGS(config_id); diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt deleted file mode 100644 index d83b9eabad512..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt +++ /dev/null @@ -1,318 +0,0 @@ -// The content of the file is just to have a basic idea of the current state of the database and is not fully accurate. -// It is also not used by any piece of code to generate anything. -// It doesn't contain the enums created in the database and the default values might also be buggy. - -create table "public"."actor"( - "id" uuid not null, - "workspace_id" uuid not null, - "actor_definition_id" uuid not null, - "name" varchar(256) not null, - "configuration" jsonb not null, - "actor_type" actor_type not null, - "tombstone" bool not null default false, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - constraint "actor_pkey" - primary key ("id") -); -create table "public"."actor_catalog"( - "id" uuid not null, - "catalog" jsonb not null, - "catalog_hash" varchar(32) not null, - "created_at" timestamptz(35) not null, - "modified_at" timestamptz(35) not null default null, - constraint "actor_catalog_pkey" - primary key ("id") -); -create table "public"."actor_catalog_fetch_event"( - "id" uuid not null, - "actor_catalog_id" uuid not null, - "actor_id" uuid not null, - "config_hash" varchar(32) not null, - "actor_version" varchar(256) not null, - "created_at" timestamptz(35) not null default null, - "modified_at" timestamptz(35) not null default null, - constraint "actor_catalog_fetch_event_pkey" - primary key ("id") -); -create table "public"."actor_definition"( - "id" uuid not null, - "name" varchar(256) not null, - "docker_repository" varchar(256) not null, - "docker_image_tag" varchar(256) not null, - "documentation_url" varchar(256) null, - "icon" varchar(256) null, - "actor_type" actor_type not null, - "source_type" source_type null, - "spec" jsonb not null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - "tombstone" bool not null default false, - "release_stage" release_stage null, - "release_date" date null, - "resource_requirements" jsonb null, - "public" bool not null default false, - "custom" bool not null default false, - "protocol_version" varchar(255) null, - "normalization_repository" varchar(255) null, - "normalization_tag" varchar(255) null, - "supports_dbt" bool null, - "normalization_integration_type" varchar(255) null, - "allowed_hosts" jsonb null, - "suggested_streams" jsonb null, - constraint "actor_definition_pkey" - primary key ("id") -); -create table "public"."actor_definition_workspace_grant"( - "actor_definition_id" uuid not null, - "workspace_id" uuid not null -); -create table "public"."actor_oauth_parameter"( - "id" uuid not null, - "workspace_id" uuid null, - "actor_definition_id" uuid not null, - "configuration" jsonb not null, - "actor_type" actor_type not null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - constraint "actor_oauth_parameter_pkey" - primary key ("id") -); -create table "public"."airbyte_configs_migrations"( - "installed_rank" int4 not null, - "version" varchar(50) null, - "description" varchar(200) not null, - "type" varchar(20) not null, - "script" varchar(1000) not null, - "checksum" int4 null, - "installed_by" varchar(100) not null, - "installed_on" timestamp(29) not null default null, - "execution_time" int4 not null, - "success" bool not null, - constraint "airbyte_configs_migrations_pk" - primary key ("installed_rank") -); -create table "public"."connection"( - "id" uuid not null, - "namespace_definition" namespace_definition_type not null, - "namespace_format" varchar(256) null, - "prefix" varchar(256) null, - "source_id" uuid not null, - "destination_id" uuid not null, - "name" varchar(256) not null, - "catalog" jsonb not null, - "status" status_type null, - "schedule" jsonb null, - "manual" bool not null, - "resource_requirements" jsonb null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - "source_catalog_id" uuid null, - "schedule_type" schedule_type null, - "schedule_data" jsonb null, - "geography" geography_type not null default null, - "notify_schema_changes" bool not null default true, - "non_breaking_change_preference" varchar(7) not null default '''ignore''::character varying', - "breaking_change" bool not null default false, - "unsupported_protocol_version" bool not null default false, - "field_selection_data" jsonb null, - constraint "connection_pkey" - primary key ("id") -); -create table "public"."connection_operation"( - "id" uuid not null, - "connection_id" uuid not null, - "operation_id" uuid not null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - constraint "connection_operation_pkey" - primary key ( - "id", - "connection_id", - "operation_id" - ) -); -create table "public"."operation"( - "id" uuid not null, - "workspace_id" uuid not null, - "name" varchar(256) not null, - "operator_type" operator_type not null, - "operator_normalization" jsonb null, - "operator_dbt" jsonb null, - "tombstone" bool not null default false, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - "operator_webhook" jsonb null, - constraint "operation_pkey" - primary key ("id") -); -create table "public"."state"( - "id" uuid not null, - "connection_id" uuid not null, - "state" jsonb null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - "stream_name" text null, - "namespace" text null, - "type" state_type not null default null, - constraint "state_pkey" - primary key ( - "id", - "connection_id" - ) -); -create table "public"."stream_reset"( - "id" uuid not null, - "connection_id" uuid not null, - "stream_namespace" text null, - "stream_name" text not null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null -); -create table "public"."workspace"( - "id" uuid not null, - "customer_id" uuid null, - "name" varchar(256) not null, - "slug" varchar(256) not null, - "email" varchar(256) null, - "initial_setup_complete" bool not null, - "anonymous_data_collection" bool null, - "send_newsletter" bool null, - "send_security_updates" bool null, - "display_setup_wizard" bool null, - "tombstone" bool not null default false, - "notifications" jsonb null, - "first_sync_complete" bool null, - "feedback_complete" bool null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - "geography" geography_type not null default null, - "webhook_operation_configs" jsonb null, - constraint "workspace_pkey" - primary key ("id") -); -create table "public"."workspace_service_account"( - "workspace_id" uuid not null, - "service_account_id" varchar(31) not null, - "service_account_email" varchar(256) not null, - "json_credential" jsonb not null, - "hmac_key" jsonb not null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - constraint "workspace_service_account_pkey" - primary key ( - "workspace_id", - "service_account_id" - ) -); -alter table "public"."actor" - add constraint "actor_actor_definition_id_fkey" - foreign key ("actor_definition_id") - references "public"."actor_definition" ("id"); -alter table "public"."actor" - add constraint "actor_workspace_id_fkey" - foreign key ("workspace_id") - references "public"."workspace" ("id"); -alter table "public"."actor_catalog_fetch_event" - add constraint "actor_catalog_fetch_event_actor_catalog_id_fkey" - foreign key ("actor_catalog_id") - references "public"."actor_catalog" ("id"); -alter table "public"."actor_catalog_fetch_event" - add constraint "actor_catalog_fetch_event_actor_id_fkey" - foreign key ("actor_id") - references "public"."actor" ("id"); -alter table "public"."actor_definition_workspace_grant" - add constraint "actor_definition_workspace_grant_actor_definition_id_fkey" - foreign key ("actor_definition_id") - references "public"."actor_definition" ("id"); -alter table "public"."actor_definition_workspace_grant" - add constraint "actor_definition_workspace_grant_workspace_id_fkey" - foreign key ("workspace_id") - references "public"."workspace" ("id"); -alter table "public"."connection" - add constraint "connection_actor_catalog_id_fk" - foreign key ("source_catalog_id") - references "public"."actor_catalog" ("id"); -alter table "public"."connection" - add constraint "connection_destination_id_fkey" - foreign key ("destination_id") - references "public"."actor" ("id"); -alter table "public"."connection" - add constraint "connection_source_id_fkey" - foreign key ("source_id") - references "public"."actor" ("id"); -alter table "public"."connection_operation" - add constraint "connection_operation_connection_id_fkey" - foreign key ("connection_id") - references "public"."connection" ("id"); -alter table "public"."connection_operation" - add constraint "connection_operation_operation_id_fkey" - foreign key ("operation_id") - references "public"."operation" ("id"); -alter table "public"."operation" - add constraint "operation_workspace_id_fkey" - foreign key ("workspace_id") - references "public"."workspace" ("id"); -alter table "public"."state" - add constraint "state_connection_id_fkey" - foreign key ("connection_id") - references "public"."connection" ("id"); -alter table "public"."workspace_service_account" - add constraint "workspace_service_account_workspace_id_fkey" - foreign key ("workspace_id") - references "public"."workspace" ("id"); -create index "actor_actor_definition_id_idx" on "public"."actor"("actor_definition_id" asc); -create unique index "actor_pkey" on "public"."actor"("id" asc); -create index "actor_workspace_id_idx" on "public"."actor"("workspace_id" asc); -create index "actor_catalog_catalog_hash_id_idx" on "public"."actor_catalog"("catalog_hash" asc); -create unique index "actor_catalog_pkey" on "public"."actor_catalog"("id" asc); -create index "actor_catalog_fetch_event_actor_catalog_id_idx" on "public"."actor_catalog_fetch_event"("actor_catalog_id" asc); -create index "actor_catalog_fetch_event_actor_id_idx" on "public"."actor_catalog_fetch_event"("actor_id" asc); -create unique index "actor_catalog_fetch_event_pkey" on "public"."actor_catalog_fetch_event"("id" asc); -create unique index "actor_definition_pkey" on "public"."actor_definition"("id" asc); -create unique index "actor_definition_workspace_gr_workspace_id_actor_definition_key" on "public"."actor_definition_workspace_grant"( - "workspace_id" asc, - "actor_definition_id" asc -); -create unique index "actor_oauth_parameter_pkey" on "public"."actor_oauth_parameter"("id" asc); -create index "actor_oauth_parameter_workspace_definition_idx" on "public"."actor_oauth_parameter"( - "workspace_id" asc, - "actor_definition_id" asc -); -create unique index "airbyte_configs_migrations_pk" on "public"."airbyte_configs_migrations"("installed_rank" asc); -create index "airbyte_configs_migrations_s_idx" on "public"."airbyte_configs_migrations"("success" asc); -create index "connection_destination_id_idx" on "public"."connection"("destination_id" asc); -create unique index "connection_pkey" on "public"."connection"("id" asc); -create index "connection_source_id_idx" on "public"."connection"("source_id" asc); -create index "connection_status_idx" on "public"."connection"("status" asc); -create index "connection_operation_connection_id_idx" on "public"."connection_operation"("connection_id" asc); -create unique index "connection_operation_pkey" on "public"."connection_operation"( - "id" asc, - "connection_id" asc, - "operation_id" asc -); -create unique index "operation_pkey" on "public"."operation"("id" asc); -create unique index "state__connection_id__stream_name__namespace__uq" on "public"."state"( - "connection_id" asc, - "stream_name" asc, - "namespace" asc -); -create unique index "state_pkey" on "public"."state"( - "id" asc, - "connection_id" asc -); -create index "connection_id_stream_name_namespace_idx" on "public"."stream_reset"( - "connection_id" asc, - "stream_name" asc, - "stream_namespace" asc -); -create unique index "stream_reset_connection_id_stream_name_stream_namespace_key" on "public"."stream_reset"( - "connection_id" asc, - "stream_name" asc, - "stream_namespace" asc -); -create unique index "workspace_pkey" on "public"."workspace"("id" asc); -create unique index "workspace_service_account_pkey" on "public"."workspace_service_account"( - "workspace_id" asc, - "service_account_id" asc -); diff --git a/airbyte-db/db-lib/src/main/resources/init.sql b/airbyte-db/db-lib/src/main/resources/init.sql deleted file mode 100644 index 088db3a8e97f3..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/init.sql +++ /dev/null @@ -1,12 +0,0 @@ --- Default is 100. Give this slightly more to accommodate the multiple setup applications running at the start. -ALTER SYSTEM -SET -max_connections = 150; - -CREATE - DATABASE airbyte; - -\connect airbyte; - -GRANT ALL ON -DATABASE airbyte TO docker; diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/AirbyteMetadata.yaml b/airbyte-db/db-lib/src/main/resources/jobs_database/AirbyteMetadata.yaml deleted file mode 100644 index 8fddec8e41a8c..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/AirbyteMetadata.yaml +++ /dev/null @@ -1,14 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -title: AirbyteMetadata -description: representation of a airbyte_metadata record as created in schema.sql -type: object -required: - - key - - value -additionalProperties: false -properties: - key: - type: string - value: - type: string diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/Attempts.yaml b/airbyte-db/db-lib/src/main/resources/jobs_database/Attempts.yaml deleted file mode 100644 index 5efdc4ef1097f..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/Attempts.yaml +++ /dev/null @@ -1,39 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -title: Attempts -description: representation of a attempts record as created in schema.sql -type: object -required: - - id - - job_id - - attempt_number - - log_path - - status - - created_at - - updated_at -additionalProperties: true -properties: - id: - type: number - job_id: - type: number - attempt_number: - type: number - attempt_sync_config: - type: ["null", object] - log_path: - type: string - output: - type: ["null", object] - status: - type: string - created_at: - # todo should be datetime. - type: string - description: Integration specific blob. Must be a valid JSON string. - updated_at: - # todo should be datetime. - type: string - ended_at: - # todo should be datetime. - type: ["null", string] diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/Jobs.yaml b/airbyte-db/db-lib/src/main/resources/jobs_database/Jobs.yaml deleted file mode 100644 index 1e896c20d8e49..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/Jobs.yaml +++ /dev/null @@ -1,35 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -title: Jobs -description: representation of a jobs record as created in schema.sql -type: object -required: - - id - - scope - - config - - status - - created_at - - updated_at -additionalProperties: false -properties: - id: - type: number - scope: - type: string - config_type: - type: string - config: - type: object - status: - type: string - created_at: - # todo should be datetime. - type: string - description: Integration specific blob. Must be a valid JSON string. - started_at: - # todo should be datetime. - type: ["null", string] - description: Integration specific blob. Must be a valid JSON string. - updated_at: - # todo should be datetime. - type: string diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/schema.sql b/airbyte-db/db-lib/src/main/resources/jobs_database/schema.sql deleted file mode 100644 index 32781775be54e..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/schema.sql +++ /dev/null @@ -1,69 +0,0 @@ --- types -CREATE - TYPE JOB_STATUS AS ENUM( - 'pending', - 'running', - 'incomplete', - 'failed', - 'succeeded', - 'cancelled' - ); - -CREATE - TYPE ATTEMPT_STATUS AS ENUM( - 'running', - 'failed', - 'succeeded' - ); - -CREATE - TYPE JOB_CONFIG_TYPE AS ENUM( - 'check_connection_source', - 'check_connection_destination', - 'discover_schema', - 'get_spec', - 'sync', - 'reset_connection' - ); - --- tables -CREATE - TABLE - IF NOT EXISTS AIRBYTE_METADATA( - KEY VARCHAR(255) PRIMARY KEY, - value VARCHAR(255) - ); - -CREATE - TABLE - IF NOT EXISTS JOBS( - id BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - config_type JOB_CONFIG_TYPE, - SCOPE VARCHAR(255), - config JSONB, - status JOB_STATUS, - started_at TIMESTAMPTZ, - created_at TIMESTAMPTZ, - updated_at TIMESTAMPTZ - ); - -CREATE - TABLE - IF NOT EXISTS ATTEMPTS( - id BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - job_id BIGINT, - attempt_number INTEGER, - log_path VARCHAR(255), - OUTPUT JSONB, - status ATTEMPT_STATUS, - created_at TIMESTAMPTZ, - updated_at TIMESTAMPTZ, - ended_at TIMESTAMPTZ - ); - -CREATE - UNIQUE INDEX IF NOT EXISTS job_attempt_idx ON - ATTEMPTS( - job_id, - attempt_number - ); diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt deleted file mode 100644 index 6e959d581f7af..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt +++ /dev/null @@ -1,133 +0,0 @@ -// The content of the file is just to have a basic idea of the current state of the database and is not fully accurate. -// It is also not used by any piece of code to generate anything. -// It doesn't contain the enums created in the database and the default values might also be buggy. - -create table "public"."airbyte_jobs_migrations"( - "installed_rank" int4 not null, - "version" varchar(50) null, - "description" varchar(200) not null, - "type" varchar(20) not null, - "script" varchar(1000) not null, - "checksum" int4 null, - "installed_by" varchar(100) not null, - "installed_on" timestamp(29) not null default null, - "execution_time" int4 not null, - "success" bool not null, - constraint "airbyte_jobs_migrations_pk" - primary key ("installed_rank") -); -create table "public"."airbyte_metadata"( - "key" varchar(255) not null, - "value" varchar(255) null, - constraint "airbyte_metadata_pkey" - primary key ("key") -); -create table "public"."attempts"( - "id" int8 generated by default as identity not null, - "job_id" int8 null, - "attempt_number" int4 null, - "log_path" varchar(255) null, - "output" jsonb null, - "status" any null, - "created_at" timestamptz(35) null, - "updated_at" timestamptz(35) null, - "ended_at" timestamptz(35) null, - "temporal_workflow_id" varchar(256) null, - "failure_summary" jsonb null, - "processing_task_queue" varchar(255) null, - "attempt_sync_config" jsonb null, - constraint "attempts_pkey" - primary key ("id") -); -create table "public"."jobs"( - "id" int8 generated by default as identity not null, - "config_type" any null, - "scope" varchar(255) null, - "config" jsonb null, - "status" any null, - "started_at" timestamptz(35) null, - "created_at" timestamptz(35) null, - "updated_at" timestamptz(35) null, - constraint "jobs_pkey" - primary key ("id") -); -create table "public"."normalization_summaries"( - "id" uuid not null, - "attempt_id" int8 not null, - "start_time" timestamptz(35) null, - "end_time" timestamptz(35) null, - "failures" jsonb null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - constraint "normalization_summaries_pkey" - primary key ("id") -); -create table "public"."stream_stats"( - "id" uuid not null, - "attempt_id" int8 not null, - "stream_namespace" varchar(2147483647) null, - "stream_name" varchar(2147483647) not null, - "records_emitted" int8 null, - "bytes_emitted" int8 null, - "estimated_records" int8 null, - "estimated_bytes" int8 null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - constraint "stream_stats_pkey" - primary key ("id") -); -create table "public"."sync_stats"( - "id" uuid not null, - "attempt_id" int8 not null, - "records_emitted" int8 null, - "bytes_emitted" int8 null, - "source_state_messages_emitted" int8 null, - "destination_state_messages_emitted" int8 null, - "records_committed" int8 null, - "mean_seconds_before_source_state_message_emitted" int8 null, - "max_seconds_before_source_state_message_emitted" int8 null, - "mean_seconds_between_state_message_emitted_and_committed" int8 null, - "max_seconds_between_state_message_emitted_and_committed" int8 null, - "created_at" timestamptz(35) not null default null, - "updated_at" timestamptz(35) not null default null, - "estimated_records" int8 null, - "estimated_bytes" int8 null, - constraint "sync_stats_pkey" - primary key ("id") -); -alter table "public"."normalization_summaries" - add constraint "normalization_summaries_attempt_id_fkey" - foreign key ("attempt_id") - references "public"."attempts" ("id"); -alter table "public"."stream_stats" - add constraint "stream_stats_attempt_id_fkey" - foreign key ("attempt_id") - references "public"."attempts" ("id"); -alter table "public"."sync_stats" - add constraint "sync_stats_attempt_id_fkey" - foreign key ("attempt_id") - references "public"."attempts" ("id"); -create unique index "airbyte_jobs_migrations_pk" on "public"."airbyte_jobs_migrations"("installed_rank" asc); -create index "airbyte_jobs_migrations_s_idx" on "public"."airbyte_jobs_migrations"("success" asc); -create unique index "airbyte_metadata_pkey" on "public"."airbyte_metadata"("key" asc); -create unique index "attempts_pkey" on "public"."attempts"("id" asc); -create index "attempts_status_idx" on "public"."attempts"("status" asc); -create unique index "job_attempt_idx" on "public"."attempts"( - "job_id" asc, - "attempt_number" asc -); -create index "jobs_config_type_idx" on "public"."jobs"("config_type" asc); -create unique index "jobs_pkey" on "public"."jobs"("id" asc); -create index "jobs_scope_idx" on "public"."jobs"("scope" asc); -create index "jobs_status_idx" on "public"."jobs"("status" asc); -create unique index "normalization_summaries_pkey" on "public"."normalization_summaries"("id" asc); -create index "normalization_summary_attempt_id_idx" on "public"."normalization_summaries"("attempt_id" asc); -create index "index" on "public"."stream_stats"("attempt_id" asc); -create unique index "stream_stats_pkey" on "public"."stream_stats"("id" asc); -create unique index "uniq_stream_attempt" on "public"."stream_stats"( - "attempt_id" asc, - "stream_name" asc, - "stream_namespace" asc -); -create index "attempt_id_idx" on "public"."sync_stats"("attempt_id" asc); -create unique index "sync_stats_pkey" on "public"."sync_stats"("id" asc); diff --git a/airbyte-db/db-lib/src/main/resources/migration_template.txt b/airbyte-db/db-lib/src/main/resources/migration_template.txt deleted file mode 100644 index 074c2d97b9bee..0000000000000 --- a/airbyte-db/db-lib/src/main/resources/migration_template.txt +++ /dev/null @@ -1,25 +0,0 @@ -package io.airbyte.db.instance..migrations; - -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// TODO: update migration description in the class name -public class V__ extends BaseJavaMigration { - - private static final Logger LOGGER = LoggerFactory.getLogger(V__.class); - - @Override - public void migrate(final Context context) throws Exception { - LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); - - // Warning: please do not use any jOOQ generated code to write a migration. - // As database schema changes, the generated jOOQ code can be deprecated. So - // old migration may not compile if there is any generated code. - final DSLContext ctx = DSL.using(context.getConnection()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/ConfigsDatabaseAvailabilityCheckTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/ConfigsDatabaseAvailabilityCheckTest.java deleted file mode 100644 index 62ccc54f7d5dc..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/ConfigsDatabaseAvailabilityCheckTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.db.check.DatabaseCheckException; -import org.jooq.DSLContext; -import org.jooq.Select; -import org.jooq.exception.DataAccessException; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link ConfigsDatabaseAvailabilityCheck} class. - */ -class ConfigsDatabaseAvailabilityCheckTest extends CommonDatabaseCheckTest { - - @Test - void checkDatabaseAvailability() { - final var check = new ConfigsDatabaseAvailabilityCheck(dslContext, TIMEOUT_MS); - Assertions.assertDoesNotThrow(() -> check.check()); - } - - @Test - void checkDatabaseAvailabilityTimeout() { - try (final DSLContext dslContext = mock(DSLContext.class)) { - when(dslContext.fetchExists(any(Select.class))).thenThrow(new DataAccessException("test")); - final var check = new ConfigsDatabaseAvailabilityCheck(dslContext, TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - } - - @Test - void checkDatabaseAvailabilityNullDslContext() { - final var check = new ConfigsDatabaseAvailabilityCheck(null, TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/ConfigsDatabaseMigrationCheckTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/ConfigsDatabaseMigrationCheckTest.java deleted file mode 100644 index 8fdeb66d91db4..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/ConfigsDatabaseMigrationCheckTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.db.check.DatabaseCheckException; -import org.flywaydb.core.Flyway; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.MigrationInfoService; -import org.flywaydb.core.api.MigrationVersion; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link ConfigsDatabaseMigrationCheck} class. - */ -class ConfigsDatabaseMigrationCheckTest { - - private static final String CURRENT_VERSION = "1.2.3"; - private static final String VERSION_2 = "2.0.0"; - - @Test - void testMigrationCheck() { - final var minimumVersion = "1.0.0"; - final var currentVersion = CURRENT_VERSION; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new ConfigsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertDoesNotThrow(() -> check.check()); - } - - @Test - void testMigrationCheckEqualVersion() { - final var minimumVersion = "1.2.3"; - final var currentVersion = minimumVersion; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new ConfigsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertDoesNotThrow(() -> check.check()); - } - - @Test - void testMigrationCheckTimeout() { - final var minimumVersion = VERSION_2; - final var currentVersion = CURRENT_VERSION; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new ConfigsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - - @Test - void testMigrationCheckNullDatabaseAvailibilityCheck() { - final var minimumVersion = VERSION_2; - final var currentVersion = CURRENT_VERSION; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new ConfigsDatabaseMigrationCheck(null, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - - @Test - void testMigrationCheckNullFlyway() { - final var minimumVersion = VERSION_2; - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - final var check = new ConfigsDatabaseMigrationCheck(databaseAvailabilityCheck, null, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - - @Test - void unavailableFlywayMigrationVersion() { - final var minimumVersion = VERSION_2; - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - - when(migrationInfoService.current()).thenReturn(null); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new ConfigsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/JobsDatabaseAvailabilityCheckTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/JobsDatabaseAvailabilityCheckTest.java deleted file mode 100644 index 050ae6e48eeb0..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/JobsDatabaseAvailabilityCheckTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.db.check.DatabaseCheckException; -import org.jooq.DSLContext; -import org.jooq.Select; -import org.jooq.exception.DataAccessException; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link JobsDatabaseAvailabilityCheck} class. - */ -class JobsDatabaseAvailabilityCheckTest extends CommonDatabaseCheckTest { - - @Test - void checkDatabaseAvailability() { - final var check = new JobsDatabaseAvailabilityCheck(dslContext, TIMEOUT_MS); - Assertions.assertDoesNotThrow(() -> check.check()); - } - - @Test - void checkDatabaseAvailabilityTimeout() { - try (final DSLContext dslContext = mock(DSLContext.class)) { - when(dslContext.fetchExists(any(Select.class))).thenThrow(new DataAccessException("test")); - final var check = new JobsDatabaseAvailabilityCheck(dslContext, TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - } - - @Test - void checkDatabaseAvailabilityNullDslContext() { - final var check = new JobsDatabaseAvailabilityCheck(null, TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/JobsDatabaseMigrationCheckTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/JobsDatabaseMigrationCheckTest.java deleted file mode 100644 index d4abc7b21526a..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/check/impl/JobsDatabaseMigrationCheckTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.check.impl; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.db.check.DatabaseCheckException; -import org.flywaydb.core.Flyway; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.MigrationInfoService; -import org.flywaydb.core.api.MigrationVersion; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link JobsDatabaseMigrationCheck} class. - */ -class JobsDatabaseMigrationCheckTest { - - private static final String CURRENT_VERSION = "1.2.3"; - private static final String VERSION_2 = "2.0.0"; - - @Test - void testMigrationCheck() { - final var minimumVersion = "1.0.0"; - final var currentVersion = CURRENT_VERSION; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new JobsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertDoesNotThrow(() -> check.check()); - } - - @Test - void testMigrationCheckEqualVersion() { - final var minimumVersion = CURRENT_VERSION; - final var currentVersion = minimumVersion; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new JobsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertDoesNotThrow(() -> check.check()); - } - - @Test - void testMigrationCheckTimeout() { - final var minimumVersion = VERSION_2; - final var currentVersion = CURRENT_VERSION; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new JobsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - - @Test - void testMigrationCheckNullDatabaseAvailabilityCheck() { - final var minimumVersion = VERSION_2; - final var currentVersion = CURRENT_VERSION; - final var migrationVersion = MigrationVersion.fromVersion(currentVersion); - final var migrationInfo = mock(MigrationInfo.class); - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - - when(migrationInfo.getVersion()).thenReturn(migrationVersion); - when(migrationInfoService.current()).thenReturn(migrationInfo); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new JobsDatabaseMigrationCheck(null, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - - @Test - void testMigrationCheckNullFlyway() { - final var minimumVersion = VERSION_2; - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - final var check = new JobsDatabaseMigrationCheck(databaseAvailabilityCheck, null, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - - @Test - void unavailableFlywayMigrationVersion() { - final var minimumVersion = VERSION_2; - final var migrationInfoService = mock(MigrationInfoService.class); - final var flyway = mock(Flyway.class); - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - - when(migrationInfoService.current()).thenReturn(null); - when(flyway.info()).thenReturn(migrationInfoService); - - final var check = new JobsDatabaseMigrationCheck(databaseAvailabilityCheck, flyway, minimumVersion, CommonDatabaseCheckTest.TIMEOUT_MS); - Assertions.assertThrows(DatabaseCheckException.class, () -> check.check()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/DatabaseCheckFactoryTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/DatabaseCheckFactoryTest.java deleted file mode 100644 index 7635ec34b1175..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/DatabaseCheckFactoryTest.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.factory; - -import static org.mockito.Mockito.mock; - -import io.airbyte.db.check.impl.ConfigsDatabaseAvailabilityCheck; -import io.airbyte.db.check.impl.ConfigsDatabaseMigrationCheck; -import io.airbyte.db.check.impl.JobsDatabaseAvailabilityCheck; -import io.airbyte.db.check.impl.JobsDatabaseMigrationCheck; -import io.airbyte.db.init.impl.ConfigsDatabaseInitializer; -import io.airbyte.db.init.impl.JobsDatabaseInitializer; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link DatabaseCheckFactory} class. - */ -class DatabaseCheckFactoryTest { - - @Test - void testCreateConfigsDatabaseAvailabilityCheck() { - final var dslContext = mock(DSLContext.class); - final var timeoutMs = 500L; - final var check = DatabaseCheckFactory.createConfigsDatabaseAvailabilityCheck(dslContext, timeoutMs); - - Assertions.assertNotNull(check); - Assertions.assertEquals(ConfigsDatabaseAvailabilityCheck.class, check.getClass()); - Assertions.assertEquals(timeoutMs, check.getTimeoutMs()); - Assertions.assertTrue(check.getDslContext().isPresent()); - Assertions.assertEquals(dslContext, check.getDslContext().get()); - } - - @Test - void testCreateJobsDatabaseAvailabilityCheck() { - final var dslContext = mock(DSLContext.class); - final var timeoutMs = 500L; - final var check = DatabaseCheckFactory.createJobsDatabaseAvailabilityCheck(dslContext, timeoutMs); - - Assertions.assertNotNull(check); - Assertions.assertEquals(JobsDatabaseAvailabilityCheck.class, check.getClass()); - Assertions.assertEquals(timeoutMs, check.getTimeoutMs()); - Assertions.assertTrue(check.getDslContext().isPresent()); - Assertions.assertEquals(dslContext, check.getDslContext().get()); - } - - @Test - void testCreateConfigsDatabaseMigrationCheck() { - final var dslContext = mock(DSLContext.class); - final var flyway = mock(Flyway.class); - final var minimumMigrationVersion = "1.2.3"; - final var timeoutMs = 500L; - final var check = DatabaseCheckFactory.createConfigsDatabaseMigrationCheck(dslContext, flyway, minimumMigrationVersion, timeoutMs); - - Assertions.assertNotNull(check); - Assertions.assertEquals(ConfigsDatabaseMigrationCheck.class, check.getClass()); - Assertions.assertTrue(check.getDatabaseAvailabilityCheck().isPresent()); - Assertions.assertEquals(ConfigsDatabaseAvailabilityCheck.class, check.getDatabaseAvailabilityCheck().get().getClass()); - Assertions.assertEquals(minimumMigrationVersion, check.getMinimumFlywayVersion()); - Assertions.assertEquals(timeoutMs, check.getTimeoutMs()); - Assertions.assertTrue(check.getFlyway().isPresent()); - Assertions.assertEquals(flyway, check.getFlyway().get()); - } - - @Test - void testCreateJobsDatabaseMigrationCheck() { - final var dslContext = mock(DSLContext.class); - final var flyway = mock(Flyway.class); - final var minimumMigrationVersion = "1.2.3"; - final var timeoutMs = 500L; - final var check = DatabaseCheckFactory.createJobsDatabaseMigrationCheck(dslContext, flyway, minimumMigrationVersion, timeoutMs); - - Assertions.assertNotNull(check); - Assertions.assertEquals(JobsDatabaseMigrationCheck.class, check.getClass()); - Assertions.assertTrue(check.getDatabaseAvailabilityCheck().isPresent()); - Assertions.assertEquals(JobsDatabaseAvailabilityCheck.class, check.getDatabaseAvailabilityCheck().get().getClass()); - Assertions.assertEquals(minimumMigrationVersion, check.getMinimumFlywayVersion()); - Assertions.assertEquals(timeoutMs, check.getTimeoutMs()); - Assertions.assertTrue(check.getFlyway().isPresent()); - Assertions.assertEquals(flyway, check.getFlyway().get()); - } - - @Test - void testCreateConfigsDatabaseInitializer() { - final var dslContext = mock(DSLContext.class); - final var initialSchema = "SELECT 1;"; - final var timeoutMs = 500L; - final var initializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(dslContext, timeoutMs, initialSchema); - - Assertions.assertNotNull(initializer); - Assertions.assertEquals(ConfigsDatabaseInitializer.class, initializer.getClass()); - Assertions.assertTrue(initializer.getDatabaseAvailabilityCheck().isPresent()); - Assertions.assertEquals(ConfigsDatabaseAvailabilityCheck.class, initializer.getDatabaseAvailabilityCheck().get().getClass()); - Assertions.assertEquals(timeoutMs, initializer.getDatabaseAvailabilityCheck().get().getTimeoutMs()); - Assertions.assertTrue(initializer.getDslContext().isPresent()); - Assertions.assertEquals(dslContext, initializer.getDslContext().get()); - Assertions.assertEquals(initialSchema, initializer.getInitialSchema()); - } - - @Test - void testCreateJobsDatabaseInitializer() { - final var dslContext = mock(DSLContext.class); - final var initialSchema = "SELECT 1;"; - final var timeoutMs = 500L; - final var initializer = DatabaseCheckFactory.createJobsDatabaseInitializer(dslContext, timeoutMs, initialSchema); - - Assertions.assertNotNull(initializer); - Assertions.assertEquals(JobsDatabaseInitializer.class, initializer.getClass()); - Assertions.assertTrue(initializer.getDatabaseAvailabilityCheck().isPresent()); - Assertions.assertEquals(JobsDatabaseAvailabilityCheck.class, initializer.getDatabaseAvailabilityCheck().get().getClass()); - Assertions.assertEquals(timeoutMs, initializer.getDatabaseAvailabilityCheck().get().getTimeoutMs()); - Assertions.assertTrue(initializer.getDslContext().isPresent()); - Assertions.assertEquals(dslContext, initializer.getDslContext().get()); - Assertions.assertEquals(initialSchema, initializer.getInitialSchema()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java deleted file mode 100644 index 69dbfb8eecb68..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.factory; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.test.utils.DatabaseConnectionHelper; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link FlywayFactory} class. - */ -class FlywayFactoryTest extends CommonFactoryTest { - - private static final String INSTALLED_BY = "test"; - private static final String DB_IDENTIFIER = "test"; - - @Test - void testCreatingAFlywayInstance() { - final String baselineVersion = "1.2.3"; - final String baselineDescription = "A test baseline description"; - final boolean baselineOnMigrate = true; - final String migrationFileLocation = "classpath:io/airbyte/db/instance/toys/migrations"; - final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); - final Flyway flyway = - FlywayFactory.create(dataSource, INSTALLED_BY, DB_IDENTIFIER, baselineVersion, baselineDescription, baselineOnMigrate, migrationFileLocation); - assertNotNull(flyway); - assertTrue(flyway.getConfiguration().isBaselineOnMigrate()); - assertEquals(baselineDescription, flyway.getConfiguration().getBaselineDescription()); - assertEquals(baselineVersion, flyway.getConfiguration().getBaselineVersion().getVersion()); - assertEquals(baselineOnMigrate, flyway.getConfiguration().isBaselineOnMigrate()); - assertEquals(INSTALLED_BY, flyway.getConfiguration().getInstalledBy()); - assertEquals(String.format(FlywayFactory.MIGRATION_TABLE_FORMAT, DB_IDENTIFIER), flyway.getConfiguration().getTable()); - assertEquals(migrationFileLocation, flyway.getConfiguration().getLocations()[0].getDescriptor()); - } - - @Test - void testCreatingAFlywayInstanceWithDefaults() { - final String migrationFileLocation = "classpath:io/airbyte/db/instance/toys/migrations"; - final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); - final Flyway flyway = FlywayFactory.create(dataSource, INSTALLED_BY, DB_IDENTIFIER, migrationFileLocation); - assertNotNull(flyway); - assertTrue(flyway.getConfiguration().isBaselineOnMigrate()); - assertEquals(FlywayFactory.BASELINE_DESCRIPTION, flyway.getConfiguration().getBaselineDescription()); - assertEquals(FlywayFactory.BASELINE_VERSION, flyway.getConfiguration().getBaselineVersion().getVersion()); - assertEquals(FlywayFactory.BASELINE_ON_MIGRATION, flyway.getConfiguration().isBaselineOnMigrate()); - assertEquals(INSTALLED_BY, flyway.getConfiguration().getInstalledBy()); - assertEquals(String.format(FlywayFactory.MIGRATION_TABLE_FORMAT, DB_IDENTIFIER), flyway.getConfiguration().getTable()); - assertEquals(migrationFileLocation, flyway.getConfiguration().getLocations()[0].getDescriptor()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/DatabaseInitializerTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/DatabaseInitializerTest.java deleted file mode 100644 index 70fc4fe53a4e4..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/DatabaseInitializerTest.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init; - -import static org.mockito.Mockito.mock; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import java.util.Collection; -import java.util.Optional; -import org.jooq.DSLContext; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class DatabaseInitializerTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseInitializerTest.class); - - @Test - void testExceptionHandling() { - final var initializer = new DatabaseInitializer() { - - @Override - public void initialize() throws DatabaseInitializationException { - throw new DatabaseInitializationException("test"); - } - - @Override - public Optional getDatabaseAvailabilityCheck() { - return Optional.empty(); - } - - @Override - public String getDatabaseName() { - return null; - } - - @Override - public Optional getDslContext() { - return Optional.empty(); - } - - @Override - public String getInitialSchema() { - return null; - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public Optional> getTableNames() { - return Optional.empty(); - } - - }; - - Assertions.assertThrows(DatabaseInitializationException.class, () -> initializer.initialize()); - } - - @Test - void testEmptyTableNames() { - final var dslContext = mock(DSLContext.class); - final var initializer = new DatabaseInitializer() { - - @Override - public Optional getDatabaseAvailabilityCheck() { - return Optional.of(mock(DatabaseAvailabilityCheck.class)); - } - - @Override - public String getDatabaseName() { - return null; - } - - @Override - public Optional getDslContext() { - return Optional.of(dslContext); - } - - @Override - public String getInitialSchema() { - return null; - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public Optional> getTableNames() { - return Optional.empty(); - } - - }; - - Assertions.assertEquals(false, initializer.initializeSchema(dslContext)); - Assertions.assertNotNull(initializer.getTableNames()); - Assertions.assertEquals(false, initializer.getTableNames().isPresent()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/CommonDatabaseInitializerTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/CommonDatabaseInitializerTest.java deleted file mode 100644 index 46d74bf3ce5a3..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/CommonDatabaseInitializerTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init.impl; - -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DataSourceFactory; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.testcontainers.containers.PostgreSQLContainer; - -/** - * Common test setup for database initialization tests. - */ -class CommonDatabaseInitializerTest { - - protected PostgreSQLContainer container; - - protected DataSource dataSource; - - protected DSLContext dslContext; - - @BeforeEach - void setup() { - container = new PostgreSQLContainer<>("postgres:13-alpine"); - container.start(); - - dataSource = DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - } - - @SuppressWarnings("PMD.SignatureDeclareThrowsException") - @AfterEach - void cleanup() throws Exception { - DataSourceFactory.close(dataSource); - dslContext.close(); - container.stop(); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/ConfigsDatabaseInitializerTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/ConfigsDatabaseInitializerTest.java deleted file mode 100644 index 0782fdb409f60..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/ConfigsDatabaseInitializerTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init.impl; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.check.DatabaseCheckException; -import io.airbyte.db.check.impl.ConfigsDatabaseAvailabilityCheck; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.DatabaseConstants; -import java.io.IOException; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link ConfigsDatabaseInitializer} class. - */ -class ConfigsDatabaseInitializerTest extends CommonDatabaseInitializerTest { - - @Test - void testInitializingSchema() throws IOException { - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - final var initialSchema = MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH); - final var initializer = new ConfigsDatabaseInitializer(databaseAvailabilityCheck, dslContext, initialSchema); - - Assertions.assertDoesNotThrow(() -> initializer.initialize()); - assertTrue(initializer.hasTable(dslContext, initializer.getTableNames().get().stream().findFirst().get())); - } - - @Test - void testInitializingSchemaAlreadyExists() throws IOException { - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - final var initialSchema = MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH); - dslContext.execute(initialSchema); - final var initializer = new ConfigsDatabaseInitializer(databaseAvailabilityCheck, dslContext, initialSchema); - - Assertions.assertDoesNotThrow(() -> initializer.initialize()); - assertTrue(initializer.hasTable(dslContext, initializer.getTableNames().get().stream().findFirst().get())); - } - - @Test - void testInitializationException() throws IOException, DatabaseCheckException { - final var databaseAvailabilityCheck = mock(ConfigsDatabaseAvailabilityCheck.class); - final var initialSchema = MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH); - - doThrow(new DatabaseCheckException("test")).when(databaseAvailabilityCheck).check(); - - final var initializer = new ConfigsDatabaseInitializer(databaseAvailabilityCheck, dslContext, initialSchema); - Assertions.assertThrows(DatabaseInitializationException.class, () -> initializer.initialize()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/JobsDatabaseInitializerTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/JobsDatabaseInitializerTest.java deleted file mode 100644 index 544559f99b99c..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/init/impl/JobsDatabaseInitializerTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.init.impl; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.check.DatabaseCheckException; -import io.airbyte.db.check.impl.JobsDatabaseAvailabilityCheck; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.DatabaseConstants; -import java.io.IOException; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link JobsDatabaseInitializer} class. - */ -class JobsDatabaseInitializerTest extends CommonDatabaseInitializerTest { - - @Test - void testInitializingSchema() throws IOException { - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - final var initialSchema = MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - final var initializer = new JobsDatabaseInitializer(databaseAvailabilityCheck, dslContext, initialSchema); - - Assertions.assertDoesNotThrow(() -> initializer.initialize()); - assertTrue(initializer.hasTable(dslContext, initializer.getTableNames().get().stream().findFirst().get())); - } - - @Test - void testInitializingSchemaAlreadyExists() throws IOException { - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - final var initialSchema = MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - dslContext.execute(initialSchema); - final var initializer = new JobsDatabaseInitializer(databaseAvailabilityCheck, dslContext, initialSchema); - - Assertions.assertDoesNotThrow(() -> initializer.initialize()); - assertTrue(initializer.hasTable(dslContext, initializer.getTableNames().get().stream().findFirst().get())); - } - - @Test - void testInitializationException() throws IOException, DatabaseCheckException { - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - final var initialSchema = MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - - doThrow(new DatabaseCheckException("test")).when(databaseAvailabilityCheck).check(); - - final var initializer = new JobsDatabaseInitializer(databaseAvailabilityCheck, dslContext, initialSchema); - Assertions.assertThrows(DatabaseInitializationException.class, () -> initializer.initialize()); - } - - @Test - void testInitializationNullAvailabilityCheck() throws IOException { - final var initialSchema = MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - final var initializer = new JobsDatabaseInitializer(null, dslContext, initialSchema); - Assertions.assertThrows(DatabaseInitializationException.class, () -> initializer.initialize()); - } - - @Test - void testInitializationNullDslContext() throws IOException { - final var databaseAvailabilityCheck = mock(JobsDatabaseAvailabilityCheck.class); - final var initialSchema = MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH); - final var initializer = new JobsDatabaseInitializer(databaseAvailabilityCheck, null, initialSchema); - Assertions.assertThrows(DatabaseInitializationException.class, () -> initializer.initialize()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java deleted file mode 100644 index aa66f4309ef80..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DataSourceFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.test.utils.DatabaseConnectionHelper; -import java.io.IOException; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.testcontainers.containers.PostgreSQLContainer; - -public abstract class AbstractDatabaseTest { - - protected static PostgreSQLContainer container; - - @BeforeAll - public static void dbSetup() { - container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName("airbyte") - .withUsername("docker") - .withPassword("docker"); - container.start(); - } - - @AfterAll - public static void dbDown() { - container.close(); - } - - protected Database database; - protected DataSource dataSource; - protected DSLContext dslContext; - - @BeforeEach - public void setup() throws IOException, DatabaseInitializationException { - dataSource = DatabaseConnectionHelper.createDataSource(container); - dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - database = getDatabase(dataSource, dslContext); - } - - @AfterEach - void tearDown() throws Exception { - dslContext.close(); - DataSourceFactory.close(dataSource); - } - - /** - * Create a {@link Database}. The downstream implementation should call - * {@link DatabaseMigrator#migrate} if necessary. - * - * @param dataSource The {@link DataSource} used to access the database. - * @param dslContext The {@link DSLContext} used to execute queries. - * @return an initialized {@link Database} instance. - */ - public abstract Database getDatabase(DataSource dataSource, DSLContext dslContext) throws IOException, DatabaseInitializationException; - - public DataSource getDataSource() { - return dataSource; - } - - public DSLContext getDslContext() { - return dslContext; - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java deleted file mode 100644 index a2791a4ab0215..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import io.airbyte.db.Database; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.AbstractDatabaseTest; -import io.airbyte.db.instance.test.TestDatabaseProviders; -import java.io.IOException; -import javax.sql.DataSource; -import org.jooq.DSLContext; - -public abstract class AbstractConfigsDatabaseTest extends AbstractDatabaseTest { - - @Override - public Database getDatabase(final DataSource dataSource, final DSLContext dslContext) throws IOException, DatabaseInitializationException { - return new TestDatabaseProviders(dataSource, dslContext).turnOffMigration().createNewConfigsDatabase(); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenterTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenterTest.java deleted file mode 100644 index 97b6fcaf66d36..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenterTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.commons.io.IOs; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.development.MigrationDevCenter; -import java.nio.file.Path; -import org.junit.jupiter.api.Test; - -class ConfigsDatabaseMigrationDevCenterTest { - - /** - * This test ensures that the dev center is working correctly end-to-end. If it fails, it means - * either the migration is not run properly, or the database initialization is incorrect in the dev - * center implementation. - */ - @Test - void testSchemaDump() { - final MigrationDevCenter devCenter = new ConfigsDatabaseMigrationDevCenter(); - final String schemaDump = IOs.readFile(Path.of(DatabaseConstants.CONFIGS_SCHEMA_DUMP_PATH)); - assertEquals(schemaDump.trim(), devCenter.dumpSchema(false)); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigratorTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigratorTest.java deleted file mode 100644 index 63cbb17e17cdc..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigratorTest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs; - -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.development.MigrationDevHelper; -import java.io.IOException; -import org.flywaydb.core.Flyway; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class ConfigsDatabaseMigratorTest extends AbstractConfigsDatabaseTest { - - @Test - void dumpSchema() throws IOException { - final Flyway flyway = FlywayFactory.create(getDataSource(), getClass().getSimpleName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - final DatabaseMigrator migrator = new ConfigsDatabaseMigrator(database, flyway); - migrator.migrate(); - final String schema = migrator.dumpSchema(); - MigrationDevHelper.dumpSchema(schema, DatabaseConstants.CONFIGS_SCHEMA_DUMP_PATH, false); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/SetupForNormalizedTablesTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/SetupForNormalizedTablesTest.java deleted file mode 100644 index 2d7c731caece5..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/SetupForNormalizedTablesTest.java +++ /dev/null @@ -1,429 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.table; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.AirbyteConfig; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.Notification; -import io.airbyte.config.Notification.NotificationType; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.Schedule; -import io.airbyte.config.Schedule.TimeUnit; -import io.airbyte.config.SlackNotificationConfiguration; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSourceDefinition.SourceType; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.config.StandardSyncState; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.AuthSpecification; -import io.airbyte.protocol.models.AuthSpecification.AuthType; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.DestinationSyncMode; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.SyncMode; -import java.net.URI; -import java.time.Instant; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import javax.annotation.Nullable; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Table; - -public class SetupForNormalizedTablesTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final UUID WORKSPACE_CUSTOMER_ID = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID_1 = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID_2 = UUID.randomUUID(); - private static final UUID DESTINATION_DEFINITION_ID_1 = UUID.randomUUID(); - private static final UUID DESTINATION_DEFINITION_ID_2 = UUID.randomUUID(); - private static final UUID SOURCE_ID_1 = UUID.randomUUID(); - private static final UUID SOURCE_ID_2 = UUID.randomUUID(); - private static final UUID DESTINATION_ID_1 = UUID.randomUUID(); - private static final UUID DESTINATION_ID_2 = UUID.randomUUID(); - private static final UUID OPERATION_ID_1 = UUID.randomUUID(); - private static final UUID OPERATION_ID_2 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_1 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_2 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_3 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_4 = UUID.randomUUID(); - private static final UUID SOURCE_OAUTH_PARAMETER_ID_1 = UUID.randomUUID(); - private static final UUID SOURCE_OAUTH_PARAMETER_ID_2 = UUID.randomUUID(); - private static final UUID DESTINATION_OAUTH_PARAMETER_ID_1 = UUID.randomUUID(); - private static final UUID DESTINATION_OAUTH_PARAMETER_ID_2 = UUID.randomUUID(); - private static final Table AIRBYTE_CONFIGS = table("airbyte_configs"); - private static final Field CONFIG_ID = field("config_id", String.class); - private static final Field CONFIG_TYPE = field("config_type", String.class); - private static final Field CONFIG_BLOB = field("config_blob", JSONB.class); - private static final Field CREATED_AT = field("created_at", OffsetDateTime.class); - private static final Field UPDATED_AT = field("updated_at", OffsetDateTime.class); - private static final Instant NOW = Instant.parse("2021-12-15T20:30:40.00Z"); - private static final String CONNECTION_SPEC = "'{\"name\":\"John\", \"age\":30, \"car\":null}'"; - - public static void setup(final DSLContext context) { - createConfigInOldTable(context, standardWorkspace(), ConfigSchema.STANDARD_WORKSPACE); - - for (final StandardSourceDefinition standardSourceDefinition : standardSourceDefinitions()) { - createConfigInOldTable(context, standardSourceDefinition, ConfigSchema.STANDARD_SOURCE_DEFINITION); - } - - for (final StandardDestinationDefinition standardDestinationDefinition : standardDestinationDefinitions()) { - createConfigInOldTable(context, standardDestinationDefinition, ConfigSchema.STANDARD_DESTINATION_DEFINITION); - } - - for (final SourceConnection sourceConnection : sourceConnections()) { - createConfigInOldTable(context, sourceConnection, ConfigSchema.SOURCE_CONNECTION); - } - - for (final DestinationConnection destinationConnection : destinationConnections()) { - createConfigInOldTable(context, destinationConnection, ConfigSchema.DESTINATION_CONNECTION); - } - - for (final SourceOAuthParameter sourceOAuthParameter : sourceOauthParameters()) { - createConfigInOldTable(context, sourceOAuthParameter, ConfigSchema.SOURCE_OAUTH_PARAM); - } - - for (final DestinationOAuthParameter destinationOAuthParameter : destinationOauthParameters()) { - createConfigInOldTable(context, destinationOAuthParameter, ConfigSchema.DESTINATION_OAUTH_PARAM); - } - - for (final StandardSyncOperation standardSyncOperation : standardSyncOperations()) { - createConfigInOldTable(context, standardSyncOperation, ConfigSchema.STANDARD_SYNC_OPERATION); - } - - for (final StandardSync standardSync : standardSyncs()) { - createConfigInOldTable(context, standardSync, ConfigSchema.STANDARD_SYNC); - } - for (final StandardSyncState standardSyncState : standardSyncStates()) { - createConfigInOldTable(context, standardSyncState, ConfigSchema.STANDARD_SYNC_STATE); - } - } - - private static void createConfigInOldTable(final DSLContext context, final T config, final AirbyteConfig configType) { - insertConfigRecord( - context, - configType.name(), - Jsons.jsonNode(config), - configType.getIdFieldName()); - } - - private static void insertConfigRecord( - final DSLContext context, - final String configType, - final JsonNode configJson, - @Nullable final String idFieldName) { - final String configId = idFieldName == null ? UUID.randomUUID().toString() : configJson.get(idFieldName).asText(); - context.insertInto(AIRBYTE_CONFIGS) - .set(CONFIG_ID, configId) - .set(CONFIG_TYPE, configType) - .set(CONFIG_BLOB, JSONB.valueOf(Jsons.serialize(configJson))) - .set(CREATED_AT, OffsetDateTime.ofInstant(NOW, ZoneOffset.UTC)) - .set(UPDATED_AT, OffsetDateTime.ofInstant(NOW, ZoneOffset.UTC)) - .onConflict(CONFIG_TYPE, CONFIG_ID) - .doNothing() - .execute(); - } - - public static StandardWorkspace standardWorkspace() { - final Notification notification = new Notification() - .withNotificationType(NotificationType.SLACK) - .withSendOnFailure(true) - .withSendOnSuccess(true) - .withSlackConfiguration(new SlackNotificationConfiguration().withWebhook("webhook-url")); - return new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID) - .withCustomerId(WORKSPACE_CUSTOMER_ID) - .withName("test-workspace") - .withSlug("random-string") - .withEmail("abc@xyz.com") - .withInitialSetupComplete(true) - .withAnonymousDataCollection(true) - .withNews(true) - .withSecurityUpdates(true) - .withDisplaySetupWizard(true) - .withTombstone(false) - .withNotifications(Collections.singletonList(notification)) - .withFirstCompletedSync(true) - .withFeedbackDone(true); - } - - public static List standardSourceDefinitions() { - final ConnectorSpecification connectorSpecification = connectorSpecification(); - final StandardSourceDefinition standardSourceDefinition1 = new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEFINITION_ID_1) - .withSourceType(SourceType.API) - .withName("random-source-1") - .withDockerImageTag("tag-1") - .withDockerRepository("repository-1") - .withDocumentationUrl("documentation-url-1") - .withIcon("icon-1") - .withSpec(connectorSpecification); - final StandardSourceDefinition standardSourceDefinition2 = new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEFINITION_ID_2) - .withSourceType(SourceType.DATABASE) - .withName("random-source-2") - .withDockerImageTag("tag-2") - .withDockerRepository("repository-2") - .withDocumentationUrl("documentation-url-2") - .withIcon("icon-2"); - return Arrays.asList(standardSourceDefinition1, standardSourceDefinition2); - } - - private static ConnectorSpecification connectorSpecification() { - return new ConnectorSpecification() - .withAuthSpecification(new AuthSpecification().withAuthType(AuthType.OAUTH_2_0)) - .withConnectionSpecification(Jsons.jsonNode(CONNECTION_SPEC)) - .withDocumentationUrl(URI.create("whatever")) - .withAdvancedAuth(null) - .withChangelogUrl(URI.create("whatever")) - .withSupportedDestinationSyncModes(Arrays.asList(DestinationSyncMode.APPEND, DestinationSyncMode.OVERWRITE, DestinationSyncMode.APPEND_DEDUP)) - .withSupportsDBT(true) - .withSupportsIncremental(true) - .withSupportsNormalization(true); - } - - public static List standardDestinationDefinitions() { - final ConnectorSpecification connectorSpecification = connectorSpecification(); - final StandardDestinationDefinition standardDestinationDefinition1 = new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_1) - .withName("random-destination-1") - .withDockerImageTag("tag-3") - .withDockerRepository("repository-3") - .withDocumentationUrl("documentation-url-3") - .withIcon("icon-3") - .withSpec(connectorSpecification); - final StandardDestinationDefinition standardDestinationDefinition2 = new StandardDestinationDefinition() - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_2) - .withName("random-destination-2") - .withDockerImageTag("tag-4") - .withDockerRepository("repository-4") - .withDocumentationUrl("documentation-url-4") - .withIcon("icon-4") - .withSpec(connectorSpecification); - return Arrays.asList(standardDestinationDefinition1, standardDestinationDefinition2); - } - - public static List sourceConnections() { - final SourceConnection sourceConnection1 = new SourceConnection() - .withName("source-1") - .withTombstone(false) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_1) - .withWorkspaceId(WORKSPACE_ID) - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withSourceId(SOURCE_ID_1); - final SourceConnection sourceConnection2 = new SourceConnection() - .withName("source-2") - .withTombstone(false) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_2) - .withWorkspaceId(WORKSPACE_ID) - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withSourceId(SOURCE_ID_2); - return Arrays.asList(sourceConnection1, sourceConnection2); - } - - public static List destinationConnections() { - final DestinationConnection destinationConnection1 = new DestinationConnection() - .withName("destination-1") - .withTombstone(false) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_1) - .withWorkspaceId(WORKSPACE_ID) - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withDestinationId(DESTINATION_ID_1); - final DestinationConnection destinationConnection2 = new DestinationConnection() - .withName("destination-2") - .withTombstone(false) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_2) - .withWorkspaceId(WORKSPACE_ID) - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withDestinationId(DESTINATION_ID_2); - return Arrays.asList(destinationConnection1, destinationConnection2); - } - - public static List sourceOauthParameters() { - final SourceOAuthParameter sourceOAuthParameter1 = new SourceOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withWorkspaceId(null) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_1) - .withOauthParameterId(SOURCE_OAUTH_PARAMETER_ID_1); - final SourceOAuthParameter sourceOAuthParameter2 = new SourceOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withWorkspaceId(WORKSPACE_ID) - .withSourceDefinitionId(SOURCE_DEFINITION_ID_2) - .withOauthParameterId(SOURCE_OAUTH_PARAMETER_ID_2); - return Arrays.asList(sourceOAuthParameter1, sourceOAuthParameter2); - } - - public static List destinationOauthParameters() { - final DestinationOAuthParameter destinationOAuthParameter1 = new DestinationOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withWorkspaceId(null) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_1) - .withOauthParameterId(DESTINATION_OAUTH_PARAMETER_ID_1); - final DestinationOAuthParameter destinationOAuthParameter2 = new DestinationOAuthParameter() - .withConfiguration(Jsons.jsonNode(CONNECTION_SPEC)) - .withWorkspaceId(WORKSPACE_ID) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID_2) - .withOauthParameterId(DESTINATION_OAUTH_PARAMETER_ID_2); - return Arrays.asList(destinationOAuthParameter1, destinationOAuthParameter2); - } - - public static List standardSyncOperations() { - final OperatorDbt operatorDbt = new OperatorDbt() - .withDbtArguments("dbt-arguments") - .withDockerImage("image-tag") - .withGitRepoBranch("git-repo-branch") - .withGitRepoUrl("git-repo-url"); - final StandardSyncOperation standardSyncOperation1 = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(OPERATION_ID_1) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(operatorDbt) - .withOperatorNormalization(null) - .withOperatorType(OperatorType.DBT); - final StandardSyncOperation standardSyncOperation2 = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(OPERATION_ID_2) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); - return Arrays.asList(standardSyncOperation1, standardSyncOperation2); - } - - public static List standardSyncs() { - final ResourceRequirements resourceRequirements = new ResourceRequirements() - .withCpuRequest("1") - .withCpuLimit("1") - .withMemoryRequest("1") - .withMemoryLimit("1"); - final Schedule schedule = new Schedule().withTimeUnit(TimeUnit.DAYS).withUnits(1L); - final StandardSync standardSync1 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_1, OPERATION_ID_2)) - .withConnectionId(CONNECTION_ID_1) - .withSourceId(SOURCE_ID_1) - .withDestinationId(DESTINATION_ID_1) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-1") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.ACTIVE) - .withSchedule(schedule); - - final StandardSync standardSync2 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_1, OPERATION_ID_2)) - .withConnectionId(CONNECTION_ID_2) - .withSourceId(SOURCE_ID_1) - .withDestinationId(DESTINATION_ID_2) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-2") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.ACTIVE) - .withSchedule(schedule); - - final StandardSync standardSync3 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_1, OPERATION_ID_2)) - .withConnectionId(CONNECTION_ID_3) - .withSourceId(SOURCE_ID_2) - .withDestinationId(DESTINATION_ID_1) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-3") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.DESTINATION) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.ACTIVE) - .withSchedule(schedule); - - final StandardSync standardSync4 = new StandardSync() - .withOperationIds(Arrays.asList(OPERATION_ID_1, OPERATION_ID_2)) - .withConnectionId(CONNECTION_ID_4) - .withSourceId(SOURCE_ID_2) - .withDestinationId(DESTINATION_ID_2) - .withCatalog(getConfiguredCatalog()) - .withName("standard-sync-4") - .withManual(true) - .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("") - .withPrefix("") - .withResourceRequirements(resourceRequirements) - .withStatus(Status.INACTIVE) - .withSchedule(schedule); - - return Arrays.asList(standardSync1, standardSync2, standardSync3, standardSync4); - } - - private static ConfiguredAirbyteCatalog getConfiguredCatalog() { - final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - "models", - "models_schema", - io.airbyte.protocol.models.Field.of("id", JsonSchemaType.NUMBER), - io.airbyte.protocol.models.Field.of("make_id", JsonSchemaType.NUMBER), - io.airbyte.protocol.models.Field.of("model", JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("id"))))); - return CatalogHelpers.toDefaultConfiguredCatalog(catalog); - } - - public static List standardSyncStates() { - final StandardSyncState standardSyncState1 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_1) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPEC))); - final StandardSyncState standardSyncState2 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_2) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPEC))); - final StandardSyncState standardSyncState3 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_3) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPEC))); - final StandardSyncState standardSyncState4 = new StandardSyncState() - .withConnectionId(CONNECTION_ID_4) - .withState(new State().withState(Jsons.jsonNode(CONNECTION_SPEC))); - return Arrays.asList(standardSyncState1, standardSyncState2, standardSyncState3, standardSyncState4); - } - - public static Instant now() { - return NOW; - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java deleted file mode 100644 index 8c0f6270c8ff7..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java +++ /dev/null @@ -1,292 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.COLUMN_CONFIG_BLOB; -import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.COLUMN_CONFIG_ID; -import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.COLUMN_CONFIG_TYPE; -import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.COLUMN_CREATED_AT; -import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.COLUMN_UPDATED_AT; -import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.TABLE_AIRBYTE_CONFIGS; -import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.getStandardSyncState; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.table; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.Configs; -import io.airbyte.config.JobOutput; -import io.airbyte.config.JobOutput.OutputType; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.config.StandardSyncState; -import io.airbyte.config.State; -import io.airbyte.db.Database; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.jobs.JobsDatabaseTestProvider; -import java.io.IOException; -import java.sql.Connection; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.Collections; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import javax.annotation.Nullable; -import org.flywaydb.core.api.configuration.Configuration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Table; -import org.jooq.impl.SQLDataType; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.MethodOrderer; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestMethodOrder; -import org.junit.jupiter.api.Timeout; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) -class V0_30_22_001__Store_last_sync_state_test extends AbstractConfigsDatabaseTest { - - private static final OffsetDateTime TIMESTAMP = OffsetDateTime.now(); - - private static final Table JOBS_TABLE = table("jobs"); - private static final Field JOB_ID_FIELD = field("id", SQLDataType.BIGINT); - private static final Field JOB_SCOPE_FIELD = field("scope", SQLDataType.VARCHAR); - private static final Field JOB_CREATED_AT_FIELD = field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - private static final Table ATTEMPTS_TABLE = table("attempts"); - private static final Field ATTEMPT_ID_FIELD = field("id", SQLDataType.BIGINT); - private static final Field ATTEMPT_JOB_ID_FIELD = field("job_id", SQLDataType.BIGINT); - private static final Field ATTEMPT_NUMBER_FIELD = field("attempt_number", SQLDataType.INTEGER); - private static final Field ATTEMPT_OUTPUT_FIELD = field("output", SQLDataType.JSONB); - private static final Field ATTEMPT_CREATED_AT_FIELD = field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - private static final UUID CONNECTION_1_ID = UUID.randomUUID(); - private static final UUID CONNECTION_2_ID = UUID.randomUUID(); - private static final UUID CONNECTION_3_ID = UUID.randomUUID(); - - private static final State CONNECTION_2_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": 2222 } }", State.class); - private static final State CONNECTION_3_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": 3333 } }", State.class); - private static final State CONNECTION_OLD_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": -1 } }", State.class); - - private static final StandardSyncState STD_CONNECTION_STATE_2 = getStandardSyncState(CONNECTION_2_ID, CONNECTION_2_STATE); - private static final StandardSyncState STD_CONNECTION_STATE_3 = getStandardSyncState(CONNECTION_3_ID, CONNECTION_3_STATE); - private static final Set STD_CONNECTION_STATES = Set.of(STD_CONNECTION_STATE_2, STD_CONNECTION_STATE_3); - - private Database jobDatabase; - - @BeforeEach - @Timeout(value = 2, - unit = TimeUnit.MINUTES) - void setupJobDatabase() throws DatabaseInitializationException, IOException { - jobDatabase = new JobsDatabaseTestProvider(dslContext, null).create(false); - } - - @Test - @Order(10) - void testGetJobsDatabase() { - assertTrue(V0_30_22_001__Store_last_sync_state.getJobsDatabase("", "", "").isEmpty()); - - // when there is database environment variable, return the database - final Configs configs = mock(Configs.class); - when(configs.getDatabaseUser()).thenReturn(container.getUsername()); - when(configs.getDatabasePassword()).thenReturn(container.getPassword()); - when(configs.getDatabaseUrl()).thenReturn(container.getJdbcUrl()); - - assertTrue(V0_30_22_001__Store_last_sync_state - .getJobsDatabase(configs.getDatabaseUser(), configs.getDatabasePassword(), configs.getDatabaseUrl()).isPresent()); - } - - @Test - @Order(20) - void testGetStandardSyncStates() throws Exception { - jobDatabase.query(ctx -> { - // Connection 1 has 1 job, no attempt. - // This is to test that connection without no state is not returned. - createJob(ctx, CONNECTION_1_ID, 30); - - // Connection 2 has two jobs, each has one attempt. - // This is to test that only the state from the latest job is returned. - final long job21 = createJob(ctx, CONNECTION_2_ID, 10); - final long job22 = createJob(ctx, CONNECTION_2_ID, 20); - assertNotEquals(job21, job22); - createAttempt(ctx, job21, 1, createAttemptOutput(CONNECTION_OLD_STATE), 11); - createAttempt(ctx, job22, 1, createAttemptOutput(CONNECTION_2_STATE), 21); - - // Connection 3 has two jobs. - // The first job has multiple attempts. Its third attempt has the latest state. - // The second job has two attempts with no state. - // This is to test that only the state from the latest attempt is returned. - final long job31 = createJob(ctx, CONNECTION_3_ID, 5); - final long job32 = createJob(ctx, CONNECTION_3_ID, 15); - assertNotEquals(job31, job32); - createAttempt(ctx, job31, 1, createAttemptOutput(CONNECTION_OLD_STATE), 6); - createAttempt(ctx, job31, 2, null, 7); - createAttempt(ctx, job31, 3, createAttemptOutput(CONNECTION_3_STATE), 8); - createAttempt(ctx, job31, 4, null, 9); - createAttempt(ctx, job31, 5, null, 10); - createAttempt(ctx, job32, 1, null, 20); - createAttempt(ctx, job32, 2, null, 25); - - assertEquals(STD_CONNECTION_STATES, V0_30_22_001__Store_last_sync_state.getStandardSyncStates(jobDatabase)); - - return null; - }); - } - - @Test - @Order(30) - void testCopyData() throws SQLException { - - final Set newConnectionStates = Collections.singleton( - new StandardSyncState() - .withConnectionId(CONNECTION_2_ID) - .withState(new State().withState(Jsons.deserialize("{ \"cursor\": 3 }")))); - - final OffsetDateTime timestampWithFullPrecision = OffsetDateTime.now(); - /* - * The AWS CI machines get a higher precision value here (2021-12-07T19:56:28.967213187Z) vs what is - * retrievable on Postgres or on my local machine (2021-12-07T19:56:28.967213Z). Truncating the - * value to match. - */ - final OffsetDateTime timestamp = timestampWithFullPrecision.withNano(1000 * (timestampWithFullPrecision.getNano() / 1000)); - - jobDatabase.query(ctx -> { - V0_30_22_001__Store_last_sync_state.copyData(ctx, STD_CONNECTION_STATES, timestamp); - checkSyncStates(ctx, STD_CONNECTION_STATES, timestamp); - - // call the copyData method again with different data will not affect existing records - V0_30_22_001__Store_last_sync_state.copyData(ctx, newConnectionStates, OffsetDateTime.now()); - // the states remain the same as those in STD_CONNECTION_STATES - checkSyncStates(ctx, STD_CONNECTION_STATES, timestamp); - - return null; - }); - } - - /** - * Clear the table and test the migration end-to-end. - */ - @Test - @Order(40) - void testMigration() throws Exception { - jobDatabase.query(ctx -> ctx.deleteFrom(TABLE_AIRBYTE_CONFIGS) - .where(COLUMN_CONFIG_TYPE.eq(ConfigSchema.STANDARD_SYNC_STATE.name())) - .execute()); - - final var migration = new V0_30_22_001__Store_last_sync_state(); - // this context is a flyway class - final Context context = new Context() { - - @Override - public Configuration getConfiguration() { - final Configuration configuration = mock(Configuration.class); - when(configuration.getUser()).thenReturn(container.getUsername()); - when(configuration.getPassword()).thenReturn(container.getPassword()); - when(configuration.getUrl()).thenReturn(container.getJdbcUrl()); - return configuration; - } - - @Override - public Connection getConnection() { - try { - return dataSource.getConnection(); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - } - - }; - migration.migrate(context); - jobDatabase.query(ctx -> { - checkSyncStates(ctx, STD_CONNECTION_STATES, null); - return null; - }); - } - - /** - * Create a job record whose scope equals to the passed in connection id, and return the job id. - * - * @param creationOffset Set the creation timestamp to {@code TIMESTAMP} + this passed in offset. - */ - private static long createJob(final DSLContext ctx, final UUID connectionId, final long creationOffset) { - final int insertCount = ctx.insertInto(JOBS_TABLE) - .set(JOB_SCOPE_FIELD, connectionId.toString()) - .set(JOB_CREATED_AT_FIELD, TIMESTAMP.plusDays(creationOffset)) - .execute(); - assertEquals(1, insertCount); - - return ctx.select(JOB_ID_FIELD) - .from(JOBS_TABLE) - .where(JOB_SCOPE_FIELD.eq(connectionId.toString())) - .orderBy(JOB_CREATED_AT_FIELD.desc()) - .limit(1) - .fetchOne() - .get(JOB_ID_FIELD); - } - - /** - * @param creationOffset Set the creation timestamp to {@code TIMESTAMP} + this passed in offset. - */ - private static void createAttempt(final DSLContext ctx, - final long jobId, - final int attemptNumber, - final JobOutput attemptOutput, - final long creationOffset) { - final int insertCount = ctx.insertInto(ATTEMPTS_TABLE) - .set(ATTEMPT_JOB_ID_FIELD, jobId) - .set(ATTEMPT_NUMBER_FIELD, attemptNumber) - .set(ATTEMPT_OUTPUT_FIELD, JSONB.valueOf(Jsons.serialize(attemptOutput))) - .set(ATTEMPT_CREATED_AT_FIELD, TIMESTAMP.plusDays(creationOffset)) - .execute(); - assertEquals(1, insertCount); - - ctx.select(ATTEMPT_ID_FIELD) - .from(ATTEMPTS_TABLE) - .where(ATTEMPT_JOB_ID_FIELD.eq(jobId), ATTEMPT_NUMBER_FIELD.eq(attemptNumber)) - .fetchOne() - .get(ATTEMPT_ID_FIELD); - } - - /** - * Create an JobOutput object whose output type is StandardSyncOutput. - * - * @param state The state object within a StandardSyncOutput. - */ - private static JobOutput createAttemptOutput(final State state) { - final StandardSyncOutput standardSyncOutput = new StandardSyncOutput().withState(state); - return new JobOutput().withOutputType(OutputType.SYNC).withSync(standardSyncOutput); - } - - private static void checkSyncStates(final DSLContext ctx, - final Set standardSyncStates, - @Nullable final OffsetDateTime expectedTimestamp) { - for (final StandardSyncState standardSyncState : standardSyncStates) { - final var record = ctx - .select(COLUMN_CONFIG_BLOB, - COLUMN_CREATED_AT, - COLUMN_UPDATED_AT) - .from(TABLE_AIRBYTE_CONFIGS) - .where(COLUMN_CONFIG_ID.eq(standardSyncState.getConnectionId().toString()), - COLUMN_CONFIG_TYPE.eq(ConfigSchema.STANDARD_SYNC_STATE.name())) - .fetchOne(); - assertEquals(standardSyncState, Jsons.deserialize(record.value1().data(), StandardSyncState.class)); - if (expectedTimestamp != null) { - assertEquals(expectedTimestamp, record.value2()); - assertEquals(expectedTimestamp, record.value3()); - } - } - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java deleted file mode 100644 index 1924c3ad29026..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java +++ /dev/null @@ -1,470 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.destinationConnections; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.destinationOauthParameters; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.now; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.sourceConnections; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.sourceOauthParameters; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardDestinationDefinitions; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSourceDefinitions; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSyncOperations; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSyncStates; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSyncs; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardWorkspace; -import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.table; - -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.Notification; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.Schedule; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncState; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.State; -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.NamespaceDefinitionType; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.OperatorType; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.SourceType; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.StatusType; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -class V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test extends AbstractConfigsDatabaseTest { - - @Test - void testCompleteMigration() throws IOException, SQLException { - final DSLContext context = getDslContext(); - SetupForNormalizedTablesTest.setup(context); - - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - - assertDataForWorkspace(context); - assertDataForSourceDefinition(context); - assertDataForDestinationDefinition(context); - assertDataForSourceConnection(context); - assertDataForDestinationConnection(context); - assertDataForSourceOauthParams(context); - assertDataForDestinationOauthParams(context); - assertDataForOperations(context); - assertDataForConnections(context); - assertDataForStandardSyncStates(context); - } - - private void assertDataForWorkspace(final DSLContext context) { - final Result workspaces = context.select(asterisk()) - .from(table("workspace")) - .fetch(); - Assertions.assertEquals(1, workspaces.size()); - - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field slug = DSL.field("slug", SQLDataType.VARCHAR(256).nullable(false)); - final Field initialSetupComplete = DSL.field("initial_setup_complete", SQLDataType.BOOLEAN.nullable(false)); - final Field customerId = DSL.field("customer_id", SQLDataType.UUID.nullable(true)); - final Field email = DSL.field("email", SQLDataType.VARCHAR(256).nullable(true)); - final Field anonymousDataCollection = DSL.field("anonymous_data_collection", SQLDataType.BOOLEAN.nullable(true)); - final Field sendNewsletter = DSL.field("send_newsletter", SQLDataType.BOOLEAN.nullable(true)); - final Field sendSecurityUpdates = DSL.field("send_security_updates", SQLDataType.BOOLEAN.nullable(true)); - final Field displaySetupWizard = DSL.field("display_setup_wizard", SQLDataType.BOOLEAN.nullable(true)); - final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(true)); - final Field notifications = DSL.field("notifications", SQLDataType.JSONB.nullable(true)); - final Field firstSyncComplete = DSL.field("first_sync_complete", SQLDataType.BOOLEAN.nullable(true)); - final Field feedbackComplete = DSL.field("feedback_complete", SQLDataType.BOOLEAN.nullable(true)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Record workspace = workspaces.get(0); - - final List notificationList = new ArrayList<>(); - final List fetchedNotifications = Jsons.deserialize(workspace.get(notifications).data(), List.class); - for (final Object notification : fetchedNotifications) { - notificationList.add(Jsons.convertValue(notification, Notification.class)); - } - final StandardWorkspace workspaceFromNewTable = new StandardWorkspace() - .withWorkspaceId(workspace.get(id)) - .withName(workspace.get(name)) - .withSlug(workspace.get(slug)) - .withInitialSetupComplete(workspace.get(initialSetupComplete)) - .withCustomerId(workspace.get(customerId)) - .withEmail(workspace.get(email)) - .withAnonymousDataCollection(workspace.get(anonymousDataCollection)) - .withNews(workspace.get(sendNewsletter)) - .withSecurityUpdates(workspace.get(sendSecurityUpdates)) - .withDisplaySetupWizard(workspace.get(displaySetupWizard)) - .withTombstone(workspace.get(tombstone)) - .withNotifications(notificationList) - .withFirstCompletedSync(workspace.get(firstSyncComplete)) - .withFeedbackDone(workspace.get(feedbackComplete)); - Assertions.assertEquals(standardWorkspace(), workspaceFromNewTable); - Assertions.assertEquals(now(), workspace.get(createdAt).toInstant()); - Assertions.assertEquals(now(), workspace.get(updatedAt).toInstant()); - Assertions.assertFalse(V0_32_8_001__AirbyteConfigDatabaseDenormalization.workspaceDoesNotExist(standardWorkspace().getWorkspaceId(), context)); - Assertions.assertTrue(V0_32_8_001__AirbyteConfigDatabaseDenormalization.workspaceDoesNotExist(UUID.randomUUID(), context)); - } - - private void assertDataForSourceDefinition(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field dockerRepository = DSL.field("docker_repository", SQLDataType.VARCHAR(256).nullable(false)); - final Field dockerImageTag = DSL.field("docker_image_tag", SQLDataType.VARCHAR(256).nullable(false)); - final Field documentationUrl = DSL.field("documentation_url", SQLDataType.VARCHAR(256).nullable(false)); - final Field spec = DSL.field("spec", SQLDataType.JSONB.nullable(false)); - final Field icon = DSL.field("icon", SQLDataType.VARCHAR(256).nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field sourceType = DSL.field("source_type", SQLDataType.VARCHAR.asEnumDataType(SourceType.class).nullable(true)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result sourceDefinitions = context.select(asterisk()) - .from(table("actor_definition")) - .where(actorType.eq(ActorType.source)) - .fetch(); - final List expectedDefinitions = standardSourceDefinitions(); - Assertions.assertEquals(expectedDefinitions.size(), sourceDefinitions.size()); - Assertions.assertTrue(V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDefinitionDoesNotExist(UUID.randomUUID(), context)); - for (final Record sourceDefinition : sourceDefinitions) { - final StandardSourceDefinition standardSourceDefinition = new StandardSourceDefinition() - .withSourceDefinitionId(sourceDefinition.get(id)) - .withDockerImageTag(sourceDefinition.get(dockerImageTag)) - .withIcon(sourceDefinition.get(icon)) - .withDockerRepository(sourceDefinition.get(dockerRepository)) - .withDocumentationUrl(sourceDefinition.get(documentationUrl)) - .withName(sourceDefinition.get(name)) - .withSourceType(Enums.toEnum(sourceDefinition.get(sourceType, String.class), StandardSourceDefinition.SourceType.class).orElseThrow()) - .withSpec(Jsons.deserialize(sourceDefinition.get(spec).data(), ConnectorSpecification.class)); - Assertions.assertTrue(expectedDefinitions.contains(standardSourceDefinition)); - Assertions.assertEquals(now(), sourceDefinition.get(createdAt).toInstant()); - Assertions.assertEquals(now(), sourceDefinition.get(updatedAt).toInstant()); - Assertions.assertFalse( - V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDefinitionDoesNotExist(standardSourceDefinition.getSourceDefinitionId(), context)); - } - } - - private void assertDataForDestinationDefinition(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field dockerRepository = DSL.field("docker_repository", SQLDataType.VARCHAR(256).nullable(false)); - final Field dockerImageTag = DSL.field("docker_image_tag", SQLDataType.VARCHAR(256).nullable(false)); - final Field documentationUrl = DSL.field("documentation_url", SQLDataType.VARCHAR(256).nullable(false)); - final Field spec = DSL.field("spec", SQLDataType.JSONB.nullable(false)); - final Field icon = DSL.field("icon", SQLDataType.VARCHAR(256).nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field sourceType = DSL.field("source_type", SQLDataType.VARCHAR.asEnumDataType(SourceType.class).nullable(true)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result destinationDefinitions = context.select(asterisk()) - .from(table("actor_definition")) - .where(actorType.eq(ActorType.destination)) - .fetch(); - final List expectedDefinitions = standardDestinationDefinitions(); - Assertions.assertEquals(expectedDefinitions.size(), destinationDefinitions.size()); - Assertions.assertTrue(V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDefinitionDoesNotExist(UUID.randomUUID(), context)); - for (final Record record : destinationDefinitions) { - final StandardDestinationDefinition standardDestinationDefinition = new StandardDestinationDefinition() - .withDestinationDefinitionId(record.get(id)) - .withDockerImageTag(record.get(dockerImageTag)) - .withIcon(record.get(icon)) - .withDockerRepository(record.get(dockerRepository)) - .withDocumentationUrl(record.get(documentationUrl)) - .withName(record.get(name)) - .withSpec(Jsons.deserialize(record.get(spec).data(), ConnectorSpecification.class)); - Assertions.assertTrue(expectedDefinitions.contains(standardDestinationDefinition)); - Assertions.assertNull(record.get(sourceType)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - Assertions.assertFalse(V0_32_8_001__AirbyteConfigDatabaseDenormalization - .actorDefinitionDoesNotExist(standardDestinationDefinition.getDestinationDefinitionId(), context)); - } - } - - private void assertDataForSourceConnection(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result sourceConnections = context.select(asterisk()) - .from(table("actor")) - .where(actorType.eq(ActorType.source)) - .fetch(); - final List expectedDefinitions = sourceConnections(); - Assertions.assertEquals(expectedDefinitions.size(), sourceConnections.size()); - Assertions.assertTrue(V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDoesNotExist(UUID.randomUUID(), context)); - for (final Record record : sourceConnections) { - final SourceConnection sourceConnection = new SourceConnection() - .withSourceId(record.get(id)) - .withConfiguration(Jsons.deserialize(record.get(configuration).data())) - .withWorkspaceId(record.get(workspaceId)) - .withSourceDefinitionId(record.get(actorDefinitionId)) - .withTombstone(record.get(tombstone)) - .withName(record.get(name)); - - Assertions.assertTrue(expectedDefinitions.contains(sourceConnection)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - Assertions.assertFalse(V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDoesNotExist(sourceConnection.getSourceId(), context)); - } - } - - private void assertDataForDestinationConnection(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result destinationConnections = context.select(asterisk()) - .from(table("actor")) - .where(actorType.eq(ActorType.destination)) - .fetch(); - final List expectedDefinitions = destinationConnections(); - Assertions.assertEquals(expectedDefinitions.size(), destinationConnections.size()); - Assertions.assertTrue(V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDoesNotExist(UUID.randomUUID(), context)); - for (final Record record : destinationConnections) { - final DestinationConnection destinationConnection = new DestinationConnection() - .withDestinationId(record.get(id)) - .withConfiguration(Jsons.deserialize(record.get(configuration).data())) - .withWorkspaceId(record.get(workspaceId)) - .withDestinationDefinitionId(record.get(actorDefinitionId)) - .withTombstone(record.get(tombstone)) - .withName(record.get(name)); - - Assertions.assertTrue(expectedDefinitions.contains(destinationConnection)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - Assertions.assertFalse(V0_32_8_001__AirbyteConfigDatabaseDenormalization.actorDoesNotExist(destinationConnection.getDestinationId(), context)); - } - } - - private void assertDataForSourceOauthParams(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result sourceOauthParams = context.select(asterisk()) - .from(table("actor_oauth_parameter")) - .where(actorType.eq(ActorType.source)) - .fetch(); - final List expectedDefinitions = sourceOauthParameters().stream().filter(c -> c.getWorkspaceId() != null).toList(); - Assertions.assertEquals(expectedDefinitions.size(), sourceOauthParams.size()); - - for (final Record record : sourceOauthParams) { - final SourceOAuthParameter sourceOAuthParameter = new SourceOAuthParameter() - .withOauthParameterId(record.get(id)) - .withConfiguration(Jsons.deserialize(record.get(configuration).data())) - .withWorkspaceId(record.get(workspaceId)) - .withSourceDefinitionId(record.get(actorDefinitionId)); - Assertions.assertTrue(expectedDefinitions.contains(sourceOAuthParameter)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - } - } - - private void assertDataForDestinationOauthParams(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result destinationOauthParams = context.select(asterisk()) - .from(table("actor_oauth_parameter")) - .where(actorType.eq(ActorType.destination)) - .fetch(); - final List expectedDefinitions = - destinationOauthParameters().stream().filter(c -> c.getWorkspaceId() != null).toList(); - Assertions.assertEquals(expectedDefinitions.size(), destinationOauthParams.size()); - - for (final Record record : destinationOauthParams) { - final DestinationOAuthParameter destinationOAuthParameter = new DestinationOAuthParameter() - .withOauthParameterId(record.get(id)) - .withConfiguration(Jsons.deserialize(record.get(configuration).data())) - .withWorkspaceId(record.get(workspaceId)) - .withDestinationDefinitionId(record.get(actorDefinitionId)); - Assertions.assertTrue(expectedDefinitions.contains(destinationOAuthParameter)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - } - } - - private void assertDataForOperations(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field operatorType = DSL.field("operator_type", SQLDataType.VARCHAR.asEnumDataType(OperatorType.class).nullable(false)); - final Field operatorNormalization = DSL.field("operator_normalization", SQLDataType.JSONB.nullable(true)); - final Field operatorDbt = DSL.field("operator_dbt", SQLDataType.JSONB.nullable(true)); - final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(true)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result standardSyncOperations = context.select(asterisk()) - .from(table("operation")) - .fetch(); - final List expectedDefinitions = standardSyncOperations(); - Assertions.assertEquals(expectedDefinitions.size(), standardSyncOperations.size()); - Assertions.assertTrue(V0_32_8_001__AirbyteConfigDatabaseDenormalization.operationDoesNotExist(UUID.randomUUID(), context)); - for (final Record record : standardSyncOperations) { - final StandardSyncOperation standardSyncOperation = new StandardSyncOperation() - .withOperationId(record.get(id)) - .withName(record.get(name)) - .withWorkspaceId(record.get(workspaceId)) - .withOperatorType(Enums.toEnum(record.get(operatorType, String.class), StandardSyncOperation.OperatorType.class).orElseThrow()) - .withOperatorNormalization(Jsons.deserialize(record.get(operatorNormalization).data(), OperatorNormalization.class)) - .withOperatorDbt(Jsons.deserialize(record.get(operatorDbt).data(), OperatorDbt.class)) - .withTombstone(record.get(tombstone)); - - Assertions.assertTrue(expectedDefinitions.contains(standardSyncOperation)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - Assertions - .assertFalse(V0_32_8_001__AirbyteConfigDatabaseDenormalization.operationDoesNotExist(standardSyncOperation.getOperationId(), context)); - } - } - - private void assertDataForConnections(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field namespaceDefinition = DSL - .field("namespace_definition", SQLDataType.VARCHAR.asEnumDataType(NamespaceDefinitionType.class).nullable(false)); - final Field namespaceFormat = DSL.field("namespace_format", SQLDataType.VARCHAR(256).nullable(true)); - final Field prefix = DSL.field("prefix", SQLDataType.VARCHAR(256).nullable(true)); - final Field sourceId = DSL.field("source_id", SQLDataType.UUID.nullable(false)); - final Field destinationId = DSL.field("destination_id", SQLDataType.UUID.nullable(false)); - final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); - final Field catalog = DSL.field("catalog", SQLDataType.JSONB.nullable(false)); - final Field status = DSL.field("status", SQLDataType.VARCHAR.asEnumDataType(StatusType.class).nullable(true)); - final Field schedule = DSL.field("schedule", SQLDataType.JSONB.nullable(true)); - final Field manual = DSL.field("manual", SQLDataType.BOOLEAN.nullable(false)); - final Field resourceRequirements = DSL.field("resource_requirements", SQLDataType.JSONB.nullable(true)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result standardSyncs = context.select(asterisk()) - .from(table("connection")) - .fetch(); - final List expectedStandardSyncs = standardSyncs(); - Assertions.assertEquals(expectedStandardSyncs.size(), standardSyncs.size()); - Assertions.assertTrue(V0_32_8_001__AirbyteConfigDatabaseDenormalization.connectionDoesNotExist(UUID.randomUUID(), context)); - for (final Record record : standardSyncs) { - final StandardSync standardSync = new StandardSync() - .withConnectionId(record.get(id)) - .withNamespaceDefinition( - Enums.toEnum(record.get(namespaceDefinition, String.class), io.airbyte.config.JobSyncConfig.NamespaceDefinitionType.class) - .orElseThrow()) - .withNamespaceFormat(record.get(namespaceFormat)) - .withPrefix(record.get(prefix)) - .withSourceId(record.get(sourceId)) - .withDestinationId(record.get(destinationId)) - .withName(record.get(name)) - .withCatalog(Jsons.deserialize(record.get(catalog).data(), ConfiguredAirbyteCatalog.class)) - .withStatus(Enums.toEnum(record.get(status, String.class), StandardSync.Status.class).orElseThrow()) - .withSchedule(Jsons.deserialize(record.get(schedule).data(), Schedule.class)) - .withManual(record.get(manual)) - .withOperationIds(connectionOperationIds(record.get(id), context)) - .withResourceRequirements(Jsons.deserialize(record.get(resourceRequirements).data(), ResourceRequirements.class)); - - Assertions.assertTrue(expectedStandardSyncs.contains(standardSync)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - Assertions.assertFalse(V0_32_8_001__AirbyteConfigDatabaseDenormalization.connectionDoesNotExist(standardSync.getConnectionId(), context)); - } - } - - private List connectionOperationIds(final UUID connectionIdTo, final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); - final Field operationId = DSL.field("operation_id", SQLDataType.UUID.nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result connectionOperations = context.select(asterisk()) - .from(table("connection_operation")) - .where(connectionId.eq(connectionIdTo)) - .fetch(); - - final List ids = new ArrayList<>(); - - for (final Record record : connectionOperations) { - ids.add(record.get(operationId)); - Assertions.assertNotNull(record.get(id)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - } - - return ids; - } - - private void assertDataForStandardSyncStates(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); - final Field state = DSL.field("state", SQLDataType.JSONB.nullable(true)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result standardSyncStates = context.select(asterisk()) - .from(table("state")) - .fetch(); - final List expectedStandardSyncsStates = standardSyncStates(); - Assertions.assertEquals(expectedStandardSyncsStates.size(), standardSyncStates.size()); - - for (final Record record : standardSyncStates) { - final StandardSyncState standardSyncState = new StandardSyncState() - .withConnectionId(record.get(connectionId)) - .withState(Jsons.deserialize(record.get(state).data(), State.class)); - - Assertions.assertTrue(expectedStandardSyncsStates.contains(standardSyncState)); - Assertions.assertNotNull(record.get(id)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - } - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinitionTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinitionTest.java deleted file mode 100644 index 3451a820628fe..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinitionTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.io.IOException; -import java.sql.SQLException; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_14_001__AddTombstoneToActorDefinitionTest extends AbstractConfigsDatabaseTest { - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - - // necessary to add actor_definition table - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - - final UUID id = UUID.randomUUID(); - context.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec")) - .values( - id, - "name", - "repo", - "1.0.0", - ActorType.source, - JSONB.valueOf("{}")) - .execute(); - - Assertions.assertFalse(tombstoneColumnExists(context)); - - V0_35_14_001__AddTombstoneToActorDefinition.addTombstoneColumn(context); - - Assertions.assertTrue(tombstoneColumnExists(context)); - Assertions.assertTrue(tombstoneDefaultsToFalse(context, id)); - } - - protected static boolean tombstoneColumnExists(final DSLContext ctx) { - return ctx.fetchExists(DSL.select() - .from("information_schema.columns") - .where(DSL.field("table_name").eq("actor_definition") - .and(DSL.field("column_name").eq("tombstone")))); - } - - protected static boolean tombstoneDefaultsToFalse(final DSLContext ctx, final UUID id) { - final Record record = ctx.fetchOne(DSL.select() - .from("actor_definition") - .where(DSL.field("id").eq(id))); - - return record.get("tombstone").equals(false); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test.java deleted file mode 100644 index 533946eb6750a..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import io.airbyte.db.instance.configs.migrations.V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.ReleaseStage; -import java.io.IOException; -import java.sql.SQLException; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test extends AbstractConfigsDatabaseTest { - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - - // necessary to add actor_definition table - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - - Assertions.assertFalse(releaseStageColumnExists(context)); - Assertions.assertFalse(releaseDateColumnExists(context)); - - V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.createReleaseStageEnum(context); - V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.addReleaseStageColumn(context); - V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.addReleaseDateColumn(context); - - Assertions.assertTrue(releaseStageColumnExists(context)); - Assertions.assertTrue(releaseDateColumnExists(context)); - - assertReleaseStageEnumWorks(context); - } - - private static boolean releaseStageColumnExists(final DSLContext ctx) { - return ctx.fetchExists(DSL.select() - .from("information_schema.columns") - .where(DSL.field("table_name").eq("actor_definition") - .and(DSL.field("column_name").eq("release_stage")))); - } - - private static boolean releaseDateColumnExists(final DSLContext ctx) { - return ctx.fetchExists(DSL.select() - .from("information_schema.columns") - .where(DSL.field("table_name").eq("actor_definition") - .and(DSL.field("column_name").eq("release_date")))); - } - - private static void assertReleaseStageEnumWorks(final DSLContext ctx) { - Assertions.assertDoesNotThrow(() -> { - insertWithReleaseStage(ctx, ReleaseStage.alpha); - insertWithReleaseStage(ctx, ReleaseStage.beta); - insertWithReleaseStage(ctx, ReleaseStage.generally_available); - insertWithReleaseStage(ctx, ReleaseStage.custom); - }); - } - - private static void insertWithReleaseStage(final DSLContext ctx, final ReleaseStage releaseStage) { - ctx.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec"), - DSL.field("release_stage")) - .values( - UUID.randomUUID(), - "name", - "repo", - "1.0.0", - ActorType.source, - JSONB.valueOf("{}"), - releaseStage) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth_Test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth_Test.java deleted file mode 100644 index 6b8224c0cef39..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth_Test.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.destinationOauthParameters; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.now; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.sourceOauthParameters; -import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.table; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.List; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_1_001__RemoveForeignKeyFromActorOauth_Test extends AbstractConfigsDatabaseTest { - - @Test - void testCompleteMigration() throws IOException, SQLException { - final DSLContext context = getDslContext(); - SetupForNormalizedTablesTest.setup(context); - - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - V0_35_1_001__RemoveForeignKeyFromActorOauth.migrate(context); - assertDataForSourceOauthParams(context); - assertDataForDestinationOauthParams(context); - } - - private void assertDataForSourceOauthParams(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result sourceOauthParams = context.select(asterisk()) - .from(table("actor_oauth_parameter")) - .where(actorType.eq(ActorType.source)) - .fetch(); - final List expectedDefinitions = sourceOauthParameters(); - Assertions.assertEquals(expectedDefinitions.size(), sourceOauthParams.size()); - - for (final Record record : sourceOauthParams) { - final SourceOAuthParameter sourceOAuthParameter = new SourceOAuthParameter() - .withOauthParameterId(record.get(id)) - .withConfiguration(Jsons.deserialize(record.get(configuration).data())) - .withWorkspaceId(record.get(workspaceId)) - .withSourceDefinitionId(record.get(actorDefinitionId)); - Assertions.assertTrue(expectedDefinitions.contains(sourceOAuthParameter)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - } - } - - private void assertDataForDestinationOauthParams(final DSLContext context) { - final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); - final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID.nullable(false)); - final Field configuration = DSL.field("configuration", SQLDataType.JSONB.nullable(false)); - final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(true)); - final Field actorType = DSL.field("actor_type", SQLDataType.VARCHAR.asEnumDataType(ActorType.class).nullable(false)); - final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); - - final Result destinationOauthParams = context.select(asterisk()) - .from(table("actor_oauth_parameter")) - .where(actorType.eq(ActorType.destination)) - .fetch(); - final List expectedDefinitions = destinationOauthParameters(); - Assertions.assertEquals(expectedDefinitions.size(), destinationOauthParams.size()); - - for (final Record record : destinationOauthParams) { - final DestinationOAuthParameter destinationOAuthParameter = new DestinationOAuthParameter() - .withOauthParameterId(record.get(id)) - .withConfiguration(Jsons.deserialize(record.get(configuration).data())) - .withWorkspaceId(record.get(workspaceId)) - .withDestinationDefinitionId(record.get(actorDefinitionId)); - Assertions.assertTrue(expectedDefinitions.contains(destinationOAuthParameter)); - Assertions.assertEquals(now(), record.get(createdAt).toInstant()); - Assertions.assertEquals(now(), record.get(updatedAt).toInstant()); - } - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalogTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalogTest.java deleted file mode 100644 index d7f201b0ce7c7..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalogTest.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_26_001__PersistDiscoveredCatalogTest extends AbstractConfigsDatabaseTest { - - private static final String NAME = "name"; - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - V0_35_26_001__PersistDiscoveredCatalog.migrate(context); - assertCanInsertData(context); - } - - private void assertCanInsertData(final DSLContext ctx) { - Assertions.assertDoesNotThrow(() -> { - final UUID catalogId = UUID.randomUUID(); - final UUID actorId = UUID.randomUUID(); - final UUID actorDefinitionId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - - ctx.insertInto(DSL.table("workspace")) - .columns( - DSL.field("id"), - DSL.field(NAME), - DSL.field("slug"), - DSL.field("initial_setup_complete")) - .values( - workspaceId, - "default", - "default", - true) - .execute(); - ctx.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field(NAME), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec")) - .values( - actorDefinitionId, - NAME, - "repo", - "1.0.0", - ActorType.source, - JSONB.valueOf("{}")) - .execute(); - ctx.insertInto(DSL.table("actor")) - .columns( - DSL.field("id"), - DSL.field("workspace_id"), - DSL.field("actor_definition_id"), - DSL.field(NAME), - DSL.field("configuration"), - DSL.field("actor_type"), - DSL.field("created_at"), - DSL.field("updated_at")) - .values( - actorId, - workspaceId, - actorDefinitionId, - "some actor", - JSONB.valueOf("{}"), - ActorType.source, - OffsetDateTime.now(), - OffsetDateTime.now()) - .execute(); - ctx.insertInto(DSL.table("actor_catalog")) - .columns( - DSL.field("id"), - DSL.field("catalog"), - DSL.field("catalog_hash"), - DSL.field("created_at")) - .values( - catalogId, - JSONB.valueOf("{}"), - "", - OffsetDateTime.now()) - .execute(); - ctx.insertInto(DSL.table("actor_catalog_fetch_event")) - .columns( - DSL.field("id"), - DSL.field("actor_catalog_id"), - DSL.field("actor_id"), - DSL.field("config_hash"), - DSL.field("actor_version")) - .values( - UUID.randomUUID(), - catalogId, - actorId, - "", - "2.0.1") - .execute(); - }); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumnsTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumnsTest.java deleted file mode 100644 index cdc96d29da73f..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumnsTest.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_28_001__AddActorCatalogMetadataColumnsTest extends AbstractConfigsDatabaseTest { - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - V0_35_26_001__PersistDiscoveredCatalog.migrate(context); - V0_35_28_001__AddActorCatalogMetadataColumns.migrate(context); - assertCanInsertSchemaDataWithMetadata(context); - } - - private void assertCanInsertSchemaDataWithMetadata(final DSLContext ctx) { - Assertions.assertDoesNotThrow(() -> { - final UUID catalogId = UUID.randomUUID(); - final UUID actorId = UUID.randomUUID(); - final UUID actorDefinitionId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - - ctx.insertInto(DSL.table("workspace")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("slug"), - DSL.field("initial_setup_complete")) - .values( - workspaceId, - "base workspace", - "base_workspace", - true) - .execute(); - ctx.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec")) - .values( - actorDefinitionId, - "Jenkins", - "farosai/airbyte-jenkins-source", - "0.1.23", - ActorType.source, - JSONB.valueOf("{}")) - .execute(); - ctx.insertInto(DSL.table("actor")) - .columns( - DSL.field("id"), - DSL.field("workspace_id"), - DSL.field("actor_definition_id"), - DSL.field("name"), - DSL.field("configuration"), - DSL.field("actor_type"), - DSL.field("created_at"), - DSL.field("updated_at")) - .values( - actorId, - workspaceId, - actorDefinitionId, - "JenkinsConnection", - JSONB.valueOf("{}"), - ActorType.source, - OffsetDateTime.now(), - OffsetDateTime.now()) - .execute(); - ctx.insertInto(DSL.table("actor_catalog")) - .columns( - DSL.field("id"), - DSL.field("catalog"), - DSL.field("catalog_hash"), - DSL.field("created_at"), - DSL.field("modified_at")) - .values( - catalogId, - JSONB.valueOf("{}"), - "", - OffsetDateTime.now(), - OffsetDateTime.now()) - .execute(); - ctx.insertInto(DSL.table("actor_catalog_fetch_event")) - .columns( - DSL.field("id"), - DSL.field("actor_catalog_id"), - DSL.field("actor_id"), - DSL.field("config_hash"), - DSL.field("actor_version"), - DSL.field("created_at"), - DSL.field("modified_at")) - .values( - UUID.randomUUID(), - catalogId, - actorId, - "HASHVALUE", - "2.0.1", - OffsetDateTime.now(), - OffsetDateTime.now()) - .execute(); - }); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTableTest.java deleted file mode 100644 index bd43b77340157..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTableTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.jooq.impl.DSL.select; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import java.io.IOException; -import java.sql.SQLException; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Test; - -class V0_35_3_001__DropAirbyteConfigsTableTest extends AbstractConfigsDatabaseTest { - - @Test - void test() throws IOException, SQLException { - final DSLContext context = getDslContext(); - assertTrue(airbyteConfigsExists(context)); - V0_35_3_001__DropAirbyteConfigsTable.dropTable(context); - assertFalse(airbyteConfigsExists(context)); - } - - protected static boolean airbyteConfigsExists(final DSLContext ctx) { - return ctx.fetchExists(select() - .from("information_schema.tables") - .where(DSL.field("table_name").eq("airbyte_configs") - .and(DSL.field("table_schema").eq("public")))); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinitionTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinitionTest.java deleted file mode 100644 index e02c6a6260db5..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinitionTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.io.IOException; -import java.sql.SQLException; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_59_001__AddPublicToActorDefinitionTest extends AbstractConfigsDatabaseTest { - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - - // necessary to add actor_definition table - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - - Assertions.assertFalse(publicColumnExists(context)); - - final UUID id = UUID.randomUUID(); - context.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec")) - .values( - id, - "name", - "repo", - "1.0.0", - ActorType.source, - JSONB.valueOf("{}")) - .execute(); - - V0_35_59_001__AddPublicToActorDefinition.addPublicColumn(context); - - Assertions.assertTrue(publicColumnExists(context)); - Assertions.assertTrue(publicDefaultsToFalse(context, id)); - } - - protected static boolean publicColumnExists(final DSLContext ctx) { - return ctx.fetchExists(DSL.select() - .from("information_schema.columns") - .where(DSL.field("table_name").eq("actor_definition") - .and(DSL.field("column_name").eq("public")))); - } - - protected static boolean publicDefaultsToFalse(final DSLContext ctx, final UUID id) { - final Record record = ctx.fetchOne(DSL.select() - .from("actor_definition") - .where(DSL.field("id").eq(id))); - - return record.get("public").equals(false); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest.java deleted file mode 100644 index fc70fbe9ab323..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.io.IOException; -import java.sql.SQLException; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.exception.DataAccessException; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest extends AbstractConfigsDatabaseTest { - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - - final UUID actorDefinitionId = new UUID(0L, 1L); - context.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec")) - .values( - actorDefinitionId, - "name", - "repo", - "1.0.0", - ActorType.source, - JSONB.valueOf("{}")) - .execute(); - - final UUID workspaceId = new UUID(0L, 2L); - context.insertInto(DSL.table("workspace")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("slug"), - DSL.field("initial_setup_complete")) - .values( - workspaceId, - "default", - "default", - true) - .execute(); - - V0_35_59_002__AddActorDefinitionWorkspaceGrantTable.createActorDefinitionWorkspaceGrant(context); - assertCanInsertActorDefinitionWorkspaceGrant(context, actorDefinitionId, workspaceId); - assertActorDefinitionWorkspaceGrantConstraints(context); - } - - private void assertCanInsertActorDefinitionWorkspaceGrant( - final DSLContext context, - final UUID actorDefinitionId, - final UUID workspaceId) { - Assertions.assertDoesNotThrow(() -> { - context.insertInto(DSL.table("actor_definition_workspace_grant")) - .columns( - DSL.field("actor_definition_id"), - DSL.field("workspace_id")) - .values( - actorDefinitionId, - workspaceId) - .execute(); - }); - } - - private void assertActorDefinitionWorkspaceGrantConstraints(final DSLContext context) { - final Exception e = Assertions.assertThrows(DataAccessException.class, () -> { - context.insertInto(DSL.table("actor_definition_workspace_grant")) - .columns( - DSL.field("actor_definition_id"), - DSL.field("workspace_id")) - .values( - new UUID(0L, 3L), - new UUID(0L, 4L)) - .execute(); - }); - Assertions.assertTrue(e.getMessage().contains("violates foreign key constraint")); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinitionTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinitionTest.java deleted file mode 100644 index 9ca991034a4bc..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinitionTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import java.io.IOException; -import java.sql.SQLException; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_59_003__AddCustomToActorDefinitionTest extends AbstractConfigsDatabaseTest { - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - - // necessary to add actor_definition table - V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); - - Assertions.assertFalse(customColumnExists(context)); - - final UUID id = UUID.randomUUID(); - context.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec")) - .values( - id, - "name", - "repo", - "1.0.0", - ActorType.source, - JSONB.valueOf("{}")) - .execute(); - - V0_35_59_003__AddCustomToActorDefinition.addCustomColumn(context); - - Assertions.assertTrue(customColumnExists(context)); - Assertions.assertTrue(customDefaultsToFalse(context, id)); - } - - protected static boolean customColumnExists(final DSLContext ctx) { - return ctx.fetchExists(DSL.select() - .from("information_schema.columns") - .where(DSL.field("table_name").eq("actor_definition") - .and(DSL.field("column_name").eq("custom")))); - } - - protected static boolean customDefaultsToFalse(final DSLContext ctx, final UUID id) { - final Record record = ctx.fetchOne(DSL.select() - .from("actor_definition") - .where(DSL.field("id").eq(id))); - - return record.get("custom").equals(false); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTableTest.java deleted file mode 100644 index 7c79155c0aa07..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTableTest.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; -import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.NamespaceDefinitionType; -import io.airbyte.db.instance.configs.migrations.V0_39_17_001__AddStreamDescriptorsToStateTable.StateType; -import io.airbyte.db.instance.development.DevDatabaseMigrator; -import java.util.UUID; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.exception.DataAccessException; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -class V0_39_17_001__AddStreamDescriptorsToStateTableTest extends AbstractConfigsDatabaseTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(V0_39_17_001__AddStreamDescriptorsToStateTableTest.class); - private final String STATE_TABLE = "State"; - - private UUID connection1; - private UUID connection2; - - @Test - void testSimpleMigration() { - final DSLContext context = getDslContext(); - - // Adding a couple of states - context.insertInto(DSL.table(STATE_TABLE)) - .columns( - DSL.field("id"), - DSL.field("connection_id")) - .values(UUID.randomUUID(), connection1) - .values(UUID.randomUUID(), connection2) - .execute(); - - // Preconditions check: we should have one row in state - Assertions.assertEquals(2, context.select().from(STATE_TABLE).execute()); - - // Applying the migration - devConfigsDbMigrator.migrate(); - - final UUID newState = UUID.randomUUID(); - context.insertInto(DSL.table(STATE_TABLE)) - .columns( - DSL.field("id"), - DSL.field("connection_id"), - DSL.field("stream_name")) - .values(newState, connection1, "new_stream") - .execute(); - - LOGGER.info(String.valueOf(context.selectFrom("connection").fetch())); - LOGGER.info(String.valueOf(context.selectFrom(STATE_TABLE).fetch())); - - // Our two initial rows and the new row should be LEGACY - Assertions.assertEquals(3, - context.select() - .from(STATE_TABLE) - .where(DSL.field("type").equal(StateType.LEGACY)) - .execute()); - - // There should be no STREAM or GLOBAL - Assertions.assertEquals(0, - context.select() - .from(STATE_TABLE) - .where(DSL.field("type").in(StateType.GLOBAL, StateType.STREAM)) - .execute()); - } - - @Test - void testUniquenessConstraint() { - devConfigsDbMigrator.migrate(); - - final DSLContext context = getDslContext(); - context.insertInto(DSL.table(STATE_TABLE)) - .columns( - DSL.field("id"), - DSL.field("connection_id"), - DSL.field("type"), - DSL.field("stream_name"), - DSL.field("namespace")) - .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream1", "ns2") - .execute(); - - context.insertInto(DSL.table(STATE_TABLE)) - .columns( - DSL.field("id"), - DSL.field("connection_id"), - DSL.field("type"), - DSL.field("stream_name"), - DSL.field("namespace")) - .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream1", "ns1") - .execute(); - - context.insertInto(DSL.table(STATE_TABLE)) - .columns( - DSL.field("id"), - DSL.field("connection_id"), - DSL.field("type"), - DSL.field("stream_name"), - DSL.field("namespace")) - .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream2", "ns2") - .execute(); - - Assertions.assertThrows(DataAccessException.class, () -> { - context.insertInto(DSL.table(STATE_TABLE)) - .columns( - DSL.field("id"), - DSL.field("connection_id"), - DSL.field("type"), - DSL.field("stream_name"), - DSL.field("namespace")) - .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream1", "ns2") - .execute(); - }); - } - - @BeforeEach - void beforeEach() { - final Flyway flyway = - FlywayFactory.create(dataSource, "V0_39_17_001__AddStreamDescriptorsToStateTableTest", ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); - devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator); - - devConfigsDbMigrator.createBaseline(); - injectMockData(); - } - - @AfterEach - void afterEach() { - // Making sure we reset between tests - dslContext.dropSchemaIfExists("public").cascade().execute(); - dslContext.createSchema("public").execute(); - dslContext.setSchema("public").execute(); - } - - private void injectMockData() { - final DSLContext context = getDslContext(); - - final UUID workspaceId = UUID.randomUUID(); - final UUID actorId = UUID.randomUUID(); - final UUID actorDefinitionId = UUID.randomUUID(); - connection1 = UUID.randomUUID(); - connection2 = UUID.randomUUID(); - - context.insertInto(DSL.table("workspace")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("slug"), - DSL.field("initial_setup_complete")) - .values( - workspaceId, - "base workspace", - "base_workspace", - true) - .execute(); - context.insertInto(DSL.table("actor_definition")) - .columns( - DSL.field("id"), - DSL.field("name"), - DSL.field("docker_repository"), - DSL.field("docker_image_tag"), - DSL.field("actor_type"), - DSL.field("spec")) - .values( - actorDefinitionId, - "Jenkins", - "farosai/airbyte-jenkins-source", - "0.1.23", - ActorType.source, - JSONB.valueOf("{}")) - .execute(); - context.insertInto(DSL.table("actor")) - .columns( - DSL.field("id"), - DSL.field("workspace_id"), - DSL.field("actor_definition_id"), - DSL.field("name"), - DSL.field("configuration"), - DSL.field("actor_type")) - .values( - actorId, - workspaceId, - actorDefinitionId, - "ActorName", - JSONB.valueOf("{}"), - ActorType.source) - .execute(); - - insertConnection(context, connection1, actorId); - insertConnection(context, connection2, actorId); - } - - private void insertConnection(final DSLContext context, final UUID connectionId, final UUID actorId) { - context.insertInto(DSL.table("connection")) - .columns( - DSL.field("id"), - DSL.field("namespace_definition"), - DSL.field("source_id"), - DSL.field("destination_id"), - DSL.field("name"), - DSL.field("catalog"), - DSL.field("manual")) - .values( - connectionId, - NamespaceDefinitionType.source, - actorId, - actorId, - "Connection" + connectionId.toString(), - JSONB.valueOf("{}"), - true) - .execute(); - } - - private DevDatabaseMigrator devConfigsDbMigrator; - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumnsTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumnsTest.java deleted file mode 100644 index 0f1cdfe13e2f3..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumnsTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; -import io.airbyte.db.instance.development.DevDatabaseMigrator; -import org.flywaydb.core.Flyway; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumnsTest extends AbstractConfigsDatabaseTest { - - @BeforeEach - void beforeEach() { - final Flyway flyway = - FlywayFactory.create(dataSource, "V0_40_18_001__AddInvalidProtocolFlagToConnections", ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); - - final BaseJavaMigration previousMigration = new V0_40_18_001__AddInvalidProtocolFlagToConnections(); - final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); - devConfigsDbMigrator.createBaseline(); - } - - @Test - void test() throws Exception { - final DSLContext context = getDslContext(); - assertFalse(columnExists(context, "normalization_repository")); - assertFalse(columnExists(context, "normalization_tag")); - assertFalse(columnExists(context, "supports_dbt")); - V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns.addNormalizationRepositoryColumn(context); - assertTrue(columnExists(context, "normalization_repository")); - V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns.addNormalizationTagColumn(context); - assertTrue(columnExists(context, "normalization_tag")); - V0_40_18_002__AddActorDefinitionNormalizationAndDbtColumns.addSupportsDbtColumn(context); - assertTrue(columnExists(context, "supports_dbt")); - } - - static boolean columnExists(final DSLContext ctx, final String columnName) { - return ctx.fetchExists(DSL.select() - .from("information_schema.columns") - .where(DSL.field("table_name").eq("actor_definition") - .and(DSL.field("column_name").eq(columnName)))); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTableTest.java deleted file mode 100644 index aa17bd7bb9ae1..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTableTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.configs.migrations; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; -import io.airbyte.db.instance.development.DevDatabaseMigrator; -import java.io.IOException; -import java.sql.SQLException; -import org.flywaydb.core.Flyway; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTableTest extends AbstractConfigsDatabaseTest { - - @BeforeEach - void beforeEach() { - final Flyway flyway = - FlywayFactory.create(dataSource, "V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTableTest", ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); - - final BaseJavaMigration previousMigration = new V0_40_3_001__AddProtocolVersionToActorDefinition(); - final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); - devConfigsDbMigrator.createBaseline(); - } - - @Test - void test() throws IOException, SQLException { - final DSLContext context = getDslContext(); - assertTrue(foreignKeyExists(context)); - V0_40_3_002__RemoveActorForeignKeyFromOauthParamsTable.removeActorDefinitionForeignKey(context); - assertFalse(foreignKeyExists(context)); - } - - protected static boolean foreignKeyExists(final DSLContext ctx) { - return ctx.fetchExists(DSL.select() - .from("information_schema.table_constraints") - .where(DSL.field("table_name").eq("actor_oauth_parameter") - .and(DSL.field("constraint_name").eq("actor_oauth_parameter_actor_definition_id_fkey")))); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/development/MigrationDevHelperTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/development/MigrationDevHelperTest.java deleted file mode 100644 index 93725e9c9b9e3..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/development/MigrationDevHelperTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.development; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.commons.version.AirbyteVersion; -import java.util.Optional; -import org.flywaydb.core.api.MigrationVersion; -import org.junit.jupiter.api.Test; - -@SuppressWarnings({"PMD.AvoidUsingHardCodedIP", "PMD.JUnitTestsShouldIncludeAssert"}) -class MigrationDevHelperTest { - - private static final String VERSION_0113_ALPHA = "0.11.3-alpha"; - - @Test - void testGetCurrentAirbyteVersion() { - // Test that this method will not throw any exception. - MigrationDevHelper.getCurrentAirbyteVersion(); - } - - @Test - void testGetAirbyteVersion() { - final MigrationVersion migrationVersion = MigrationVersion.fromVersion("0.11.3.010"); - final AirbyteVersion airbyteVersion = MigrationDevHelper.getAirbyteVersion(migrationVersion); - assertEquals("0.11.3", airbyteVersion.serialize()); - } - - @Test - void testFormatAirbyteVersion() { - final AirbyteVersion airbyteVersion = new AirbyteVersion(VERSION_0113_ALPHA); - assertEquals("0_11_3", MigrationDevHelper.formatAirbyteVersion(airbyteVersion)); - } - - @Test - void testGetMigrationId() { - final MigrationVersion migrationVersion = MigrationVersion.fromVersion("0.11.3.010"); - assertEquals("010", MigrationDevHelper.getMigrationId(migrationVersion)); - } - - @Test - void testGetNextMigrationVersion() { - // Migration version does not exist - assertEquals("0.11.3.001", MigrationDevHelper.getNextMigrationVersion( - new AirbyteVersion(VERSION_0113_ALPHA), - Optional.empty()).getVersion()); - - // Airbyte version is greater - assertEquals("0.11.3.001", MigrationDevHelper.getNextMigrationVersion( - new AirbyteVersion(VERSION_0113_ALPHA), - Optional.of(MigrationVersion.fromVersion("0.10.9.003"))).getVersion()); - - // Airbyte version is equal to migration version - assertEquals("0.11.3.004", MigrationDevHelper.getNextMigrationVersion( - new AirbyteVersion(VERSION_0113_ALPHA), - Optional.of(MigrationVersion.fromVersion("0.11.3.003"))).getVersion()); - - // Migration version is greater - assertEquals("0.11.3.004", MigrationDevHelper.getNextMigrationVersion( - new AirbyteVersion("0.9.17-alpha"), - Optional.of(MigrationVersion.fromVersion("0.11.3.003"))).getVersion()); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java deleted file mode 100644 index af8b7bba64eb0..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import io.airbyte.db.Database; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.AbstractDatabaseTest; -import io.airbyte.db.instance.test.TestDatabaseProviders; -import java.io.IOException; -import javax.sql.DataSource; -import org.jooq.DSLContext; - -public abstract class AbstractJobsDatabaseTest extends AbstractDatabaseTest { - - @Override - public Database getDatabase(final DataSource dataSource, final DSLContext dslContext) throws IOException, DatabaseInitializationException { - return new TestDatabaseProviders(dataSource, dslContext).turnOffMigration().createNewJobsDatabase(); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenterTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenterTest.java deleted file mode 100644 index 5213edd27bdc4..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenterTest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.io.IOs; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.development.MigrationDevCenter; -import java.nio.file.Path; -import org.junit.jupiter.api.Test; - -class JobsDatabaseMigrationDevCenterTest { - - /** - * This test ensures that the dev center is working correctly end-to-end. If it fails, it means - * either the migration is not run properly, or the database initialization is incorrect. - */ - @Test - void testSchemaDump() { - final MigrationDevCenter devCenter = new JobsDatabaseMigrationDevCenter(); - final String schemaDump = IOs.readFile(Path.of(DatabaseConstants.JOBS_SCHEMA_DUMP_PATH)); - assertEquals(schemaDump.trim(), devCenter.dumpSchema(false)); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigratorTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigratorTest.java deleted file mode 100644 index af17e0d2ac0cf..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigratorTest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs; - -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.development.MigrationDevHelper; -import java.io.IOException; -import org.flywaydb.core.Flyway; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class JobsDatabaseMigratorTest extends AbstractJobsDatabaseTest { - - @Test - void dumpSchema() throws IOException { - final Flyway flyway = FlywayFactory.create(getDataSource(), getClass().getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, - JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); - final DatabaseMigrator migrator = new JobsDatabaseMigrator(database, flyway); - migrator.migrate(); - final String schema = migrator.dumpSchema(); - MigrationDevHelper.dumpSchema(schema, DatabaseConstants.JOBS_SCHEMA_DUMP_PATH, false); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java deleted file mode 100644 index c838b7543d80c..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_CONFIG_ERROR; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_MANUAL_CANCELLATION; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_REPLICATION_ORIGIN; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_SYSTEM_ERROR; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_CONFIG_ERROR; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_MANUAL_CANCELLATION; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_REPLICATION_ORIGIN; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_SYSTEM_ERROR; -import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_UNKNOWN; -import static org.jooq.impl.DSL.asterisk; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.Metadata; -import io.airbyte.db.instance.jobs.AbstractJobsDatabaseTest; -import io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.AttemptFailureSummaryForMigration; -import io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.FailureReasonForMigration; -import java.util.List; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.junit.jupiter.api.Test; - -class V0_35_40_001_MigrateFailureReasonEnumValues_Test extends AbstractJobsDatabaseTest { - - private static int currJobId = 1; - private static final long timeNowMillis = System.currentTimeMillis(); - private static final String ORIGIN_SOURCE = "source"; - - // create pairs of old failure reasons and their fixed versions. - private static final FailureReasonForMigration originReplicationWorker = baseFailureReason().withFailureOrigin(OLD_REPLICATION_ORIGIN); - private static final FailureReasonForMigration fixedOriginReplicationWorker = baseFailureReason().withFailureOrigin(NEW_REPLICATION_ORIGIN); - - private static final FailureReasonForMigration originUnknown = baseFailureReason().withFailureOrigin(OLD_UNKNOWN); - private static final FailureReasonForMigration fixedOriginUnknown = baseFailureReason().withFailureOrigin(null); - - private static final FailureReasonForMigration typeManualCancellation = baseFailureReason().withFailureType(OLD_MANUAL_CANCELLATION); - private static final FailureReasonForMigration fixedTypeManualCancellation = baseFailureReason().withFailureType(NEW_MANUAL_CANCELLATION); - - private static final FailureReasonForMigration typeSystemError = baseFailureReason().withFailureType(OLD_SYSTEM_ERROR); - private static final FailureReasonForMigration fixedTypeSystemError = baseFailureReason().withFailureType(NEW_SYSTEM_ERROR); - - private static final FailureReasonForMigration typeConfigError = baseFailureReason().withFailureType(OLD_CONFIG_ERROR); - private static final FailureReasonForMigration fixedTypeConfigError = baseFailureReason().withFailureType(NEW_CONFIG_ERROR); - - private static final FailureReasonForMigration typeUnknown = baseFailureReason().withFailureType(OLD_UNKNOWN); - private static final FailureReasonForMigration fixedTypeUnknown = baseFailureReason().withFailureType(null); - - // enum values that don't need updating, or aren't recognized at all, should be left untouched - private static final FailureReasonForMigration noChangeNeeded = baseFailureReason().withFailureOrigin(ORIGIN_SOURCE); - private static final FailureReasonForMigration unrecognizedValue = baseFailureReason().withFailureType("someUnrecognizedValue"); - - // create failure summaries containing failure reasons that need fixing. - // mixing in noChangeNeeded reasons in different spots to make sure the migration properly leaves - // those untouched. - private static final AttemptFailureSummaryForMigration summaryFixReplicationOrigin = getFailureSummary(noChangeNeeded, originReplicationWorker); - private static final AttemptFailureSummaryForMigration summaryFixReplicationOriginAndManualCancellationType = - getFailureSummary(originReplicationWorker, typeManualCancellation, noChangeNeeded); - private static final AttemptFailureSummaryForMigration summaryFixUnknownOriginAndUnknownType = - getFailureSummary(originUnknown, noChangeNeeded, typeUnknown); - private static final AttemptFailureSummaryForMigration summaryFixMultipleSystemErrorType = getFailureSummary(typeSystemError, typeSystemError); - private static final AttemptFailureSummaryForMigration summaryFixConfigErrorType = getFailureSummary(typeConfigError); - private static final AttemptFailureSummaryForMigration summaryNoChangeNeeded = getFailureSummary(noChangeNeeded, noChangeNeeded); - private static final AttemptFailureSummaryForMigration summaryFixOriginAndLeaveUnrecognizedValue = - getFailureSummary(originReplicationWorker, unrecognizedValue); - - // define attempt ids corresponding to each summary above - private static final Long attemptIdForFixReplicationOrigin = 1L; - private static final Long attemptIdForFixReplicationOriginAndManualCancellationType = 2L; - private static final Long attemptIdForFixUnknownOriginAndUnknownType = 3L; - private static final Long attemptIdForFixMultipleSystemErrorType = 4L; - private static final Long attemptIdForFixConfigErrorType = 5L; - private static final Long attemptIdForNoChangeNeeded = 6L; - private static final Long attemptIdForFixOriginAndLeaveUnrecognizedValue = 7L; - - // create expected fixed failure summaries after migration. - private static final AttemptFailureSummaryForMigration expectedSummaryFixReplicationOrigin = - getFailureSummary(noChangeNeeded, fixedOriginReplicationWorker); - private static final AttemptFailureSummaryForMigration expectedSummaryFixReplicationOriginAndManualCancellationType = - getFailureSummary(fixedOriginReplicationWorker, fixedTypeManualCancellation, noChangeNeeded); - private static final AttemptFailureSummaryForMigration expectedSummaryFixUnknownOriginAndUnknownType = - getFailureSummary(fixedOriginUnknown, noChangeNeeded, fixedTypeUnknown); - private static final AttemptFailureSummaryForMigration expectedSummaryFixMultipleSystemErrorType = - getFailureSummary(fixedTypeSystemError, fixedTypeSystemError); - private static final AttemptFailureSummaryForMigration expectedSummaryFixConfigErrorType = - getFailureSummary(fixedTypeConfigError); - private static final AttemptFailureSummaryForMigration expectedSummaryNoChangeNeeded = - getFailureSummary(noChangeNeeded, noChangeNeeded); - private static final AttemptFailureSummaryForMigration expectedFixOriginAndLeaveUnrecognizedValue = - getFailureSummary(fixedOriginReplicationWorker, unrecognizedValue); - - @Test - void test() throws Exception { - final DSLContext ctx = getDslContext(); - - V0_35_5_001__Add_failureSummary_col_to_Attempts.migrate(ctx); - - addRecordsWithOldEnumValues(ctx); - - V0_35_40_001__MigrateFailureReasonEnumValues.updateRecordsWithNewEnumValues(ctx); - - verifyEnumValuesFixed(ctx); - } - - private static void addRecordsWithOldEnumValues(final DSLContext ctx) { - insertAttemptWithSummary(ctx, attemptIdForFixReplicationOrigin, summaryFixReplicationOrigin); - insertAttemptWithSummary(ctx, attemptIdForFixReplicationOriginAndManualCancellationType, summaryFixReplicationOriginAndManualCancellationType); - insertAttemptWithSummary(ctx, attemptIdForFixUnknownOriginAndUnknownType, summaryFixUnknownOriginAndUnknownType); - insertAttemptWithSummary(ctx, attemptIdForFixMultipleSystemErrorType, summaryFixMultipleSystemErrorType); - insertAttemptWithSummary(ctx, attemptIdForFixConfigErrorType, summaryFixConfigErrorType); - insertAttemptWithSummary(ctx, attemptIdForNoChangeNeeded, summaryNoChangeNeeded); - insertAttemptWithSummary(ctx, attemptIdForFixOriginAndLeaveUnrecognizedValue, summaryFixOriginAndLeaveUnrecognizedValue); - } - - private static void verifyEnumValuesFixed(final DSLContext ctx) { - assertEquals(expectedSummaryFixReplicationOrigin, fetchFailureSummary(ctx, attemptIdForFixReplicationOrigin)); - assertEquals(expectedSummaryFixReplicationOriginAndManualCancellationType, - fetchFailureSummary(ctx, attemptIdForFixReplicationOriginAndManualCancellationType)); - assertEquals(expectedSummaryFixUnknownOriginAndUnknownType, fetchFailureSummary(ctx, attemptIdForFixUnknownOriginAndUnknownType)); - assertEquals(expectedSummaryFixMultipleSystemErrorType, fetchFailureSummary(ctx, attemptIdForFixMultipleSystemErrorType)); - assertEquals(expectedSummaryFixConfigErrorType, fetchFailureSummary(ctx, attemptIdForFixConfigErrorType)); - assertEquals(expectedSummaryNoChangeNeeded, fetchFailureSummary(ctx, attemptIdForNoChangeNeeded)); - assertEquals(expectedFixOriginAndLeaveUnrecognizedValue, fetchFailureSummary(ctx, attemptIdForFixOriginAndLeaveUnrecognizedValue)); - } - - private static void insertAttemptWithSummary(final DSLContext ctx, final Long attemptId, final AttemptFailureSummaryForMigration summary) { - ctx.insertInto(DSL.table("attempts")) - .columns( - DSL.field("id"), - DSL.field("failure_summary"), - DSL.field("job_id"), - DSL.field("attempt_number")) - .values( - attemptId, - JSONB.valueOf(Jsons.serialize(summary)), - currJobId, - 1) - .execute(); - - currJobId++; - } - - private static AttemptFailureSummaryForMigration fetchFailureSummary(final DSLContext ctx, final Long attemptId) { - final Record record = ctx.fetchOne(DSL.select(asterisk()) - .from(DSL.table("attempts")) - .where(DSL.field("id").eq(attemptId))); - - return Jsons.deserialize( - record.get(DSL.field("failure_summary", SQLDataType.JSONB.nullable(true))).data(), - AttemptFailureSummaryForMigration.class); - - } - - private static FailureReasonForMigration baseFailureReason() { - return new FailureReasonForMigration() - .withInternalMessage("some internal message") - .withExternalMessage("some external message") - .withRetryable(false) - .withTimestamp(timeNowMillis) - .withStacktrace("some stacktrace") - .withMetadata(new Metadata().withAdditionalProperty("key1", "value1")); - } - - private static AttemptFailureSummaryForMigration getFailureSummary(final FailureReasonForMigration... failureReasons) { - return new AttemptFailureSummaryForMigration() - .withPartialSuccess(false) - .withFailures(List.of(failureReasons)); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_AttemptsTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_AttemptsTest.java deleted file mode 100644 index 9b784db3c0757..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_AttemptsTest.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.jobs.migrations; - -import io.airbyte.db.instance.jobs.AbstractJobsDatabaseTest; -import java.io.IOException; -import java.sql.SQLException; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class V0_35_5_001__Add_failureSummary_col_to_AttemptsTest extends AbstractJobsDatabaseTest { - - @Test - void test() throws SQLException, IOException { - final DSLContext context = getDslContext(); - Assertions.assertFalse(failureSummaryColumnExists(context)); - V0_35_5_001__Add_failureSummary_col_to_Attempts.addFailureSummaryColumn(context); - Assertions.assertTrue(failureSummaryColumnExists(context)); - } - - protected static boolean failureSummaryColumnExists(final DSLContext ctx) { - return ctx.fetchExists(DSL.select() - .from("information_schema.columns") - .where(DSL.field("table_name").eq("attempts") - .and(DSL.field("column_name").eq("failure_summary")))); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseAvailabilityCheck.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseAvailabilityCheck.java deleted file mode 100644 index 664c1c1d871d7..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseAvailabilityCheck.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.toys; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import java.util.Optional; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ToysDatabaseAvailabilityCheck implements DatabaseAvailabilityCheck { - - private static final Logger LOGGER = LoggerFactory.getLogger(ToysDatabaseAvailabilityCheck.class); - - private final DSLContext dslContext; - - private final long timeoutMs; - - public ToysDatabaseAvailabilityCheck(final DSLContext dslContext, final long timeoutMs) { - this.dslContext = dslContext; - this.timeoutMs = timeoutMs; - } - - @Override - public String getDatabaseName() { - return ToysDatabaseConstants.DATABASE_LOGGING_NAME; - } - - @Override - public Optional getDslContext() { - return Optional.ofNullable(dslContext); - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public long getTimeoutMs() { - return timeoutMs; - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseConstants.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseConstants.java deleted file mode 100644 index c207f17c14349..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseConstants.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.toys; - -/** - * Collection of toys database related constants. - */ -public final class ToysDatabaseConstants { - - /** - * Logical name of the Toys database. - */ - public static final String DATABASE_LOGGING_NAME = "toys"; - - /** - * Expected table to be present in the Toys database after creation. - */ - public static final String TABLE_NAME = "toy_cars"; - - /** - * Path to the script that contains the initial schema definition for the Toys database. - */ - public static final String SCHEMA_PATH = "toys_database/schema.sql"; - - /** - * Private constructor to prevent instantiation. - */ - private ToysDatabaseConstants() {} - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseInitializer.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseInitializer.java deleted file mode 100644 index e11628882944a..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseInitializer.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.toys; - -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.init.DatabaseInitializer; -import java.util.Collection; -import java.util.Optional; -import java.util.Set; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ToysDatabaseInitializer implements DatabaseInitializer { - - private static final Logger LOGGER = LoggerFactory.getLogger(ToysDatabaseInitializer.class); - - private final DatabaseAvailabilityCheck databaseAvailablityCheck; - - private final DSLContext dslContext; - - private final String initialSchema; - - public ToysDatabaseInitializer(final DatabaseAvailabilityCheck databaseAvailablityCheck, - final DSLContext dslContext, - final String initialSchema) { - this.databaseAvailablityCheck = databaseAvailablityCheck; - this.dslContext = dslContext; - this.initialSchema = initialSchema; - } - - @Override - public Optional getDatabaseAvailabilityCheck() { - return Optional.ofNullable(databaseAvailablityCheck); - } - - @Override - public String getDatabaseName() { - return ToysDatabaseConstants.DATABASE_LOGGING_NAME; - } - - @Override - public Optional getDslContext() { - return Optional.ofNullable(dslContext); - } - - @Override - public String getInitialSchema() { - return initialSchema; - } - - @Override - public Logger getLogger() { - return LOGGER; - } - - @Override - public Optional> getTableNames() { - return Optional.of(Set.of(ToysDatabaseConstants.TABLE_NAME)); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigrator.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigrator.java deleted file mode 100644 index 9c61a6db7c2fb..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigrator.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.toys; - -import io.airbyte.db.Database; -import io.airbyte.db.instance.FlywayDatabaseMigrator; -import org.flywaydb.core.Flyway; - -/** - * A database migrator for testing purposes only. - */ -public class ToysDatabaseMigrator extends FlywayDatabaseMigrator { - - public static final String DB_IDENTIFIER = "toy"; - public static final String MIGRATION_FILE_LOCATION = "classpath:io/airbyte/db/instance/toys/migrations"; - - public ToysDatabaseMigrator(final Database database, final Flyway flyway) { - super(database, flyway); - } - - @Override - protected String getDisclaimer() { - return ""; - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java deleted file mode 100644 index 5a7a64a54e9bf..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.toys; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Database; -import io.airbyte.db.check.DatabaseAvailabilityCheck; -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.AbstractDatabaseTest; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import java.io.IOException; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; -import org.junit.jupiter.api.Test; - -class ToysDatabaseMigratorTest extends AbstractDatabaseTest { - - private static final String PRE_MIGRATION_SCHEMA_DUMP = "toys_database/pre_migration_schema.txt"; - private static final String POST_MIGRATION_SCHEMA_DUMP = "toys_database/schema_dump.txt"; - - @Override - public Database getDatabase(final DataSource dataSource, final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - @Test - void testMigration() throws Exception { - final DataSource dataSource = getDataSource(); - - initializeDatabase(getDslContext()); - - final Flyway flyway = FlywayFactory.create(dataSource, getClass().getSimpleName(), ToysDatabaseMigrator.DB_IDENTIFIER, - ToysDatabaseMigrator.MIGRATION_FILE_LOCATION); - final DatabaseMigrator migrator = new ToysDatabaseMigrator(database, flyway); - - // Compare pre migration baseline schema - migrator.createBaseline(); - final String preMigrationSchema = MoreResources.readResource(PRE_MIGRATION_SCHEMA_DUMP).strip(); - final String actualPreMigrationSchema = migrator.dumpSchema(); - assertEquals(preMigrationSchema, actualPreMigrationSchema, "The pre migration schema dump has changed"); - - // Compare post migration schema - migrator.migrate(); - final String postMigrationSchema = MoreResources.readResource(POST_MIGRATION_SCHEMA_DUMP).strip(); - final String actualPostMigrationSchema = migrator.dumpSchema(); - assertEquals(postMigrationSchema, actualPostMigrationSchema, "The post migration schema dump has changed"); - } - - private void initializeDatabase(final DSLContext dslContext) throws DatabaseInitializationException, IOException { - final String initialSchema = MoreResources.readResource(ToysDatabaseConstants.SCHEMA_PATH); - final DatabaseAvailabilityCheck availabilityCheck = new ToysDatabaseAvailabilityCheck(dslContext, - DatabaseConstants.DEFAULT_CONNECTION_TIMEOUT_MS); - new ToysDatabaseInitializer(availabilityCheck, dslContext, initialSchema).initialize(); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/migrations/V0_30_4_001__Add_timestamp_columns.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/migrations/V0_30_4_001__Add_timestamp_columns.java deleted file mode 100644 index 2514fb6e40c91..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/migrations/V0_30_4_001__Add_timestamp_columns.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.toys.migrations; - -import static org.jooq.impl.DSL.currentTimestamp; -import static org.jooq.impl.DSL.field; - -import io.airbyte.db.instance.toys.ToysDatabaseConstants; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; - -public class V0_30_4_001__Add_timestamp_columns extends BaseJavaMigration { - - @Override - public void migrate(final Context context) { - final DSLContext dsl = DSL.using(context.getConnection()); - dsl.alterTable(ToysDatabaseConstants.TABLE_NAME) - .addColumn(field("created_at", SQLDataType.TIMESTAMP.defaultValue(currentTimestamp()).nullable(false))) - .execute(); - dsl.alterTable(ToysDatabaseConstants.TABLE_NAME) - .addColumn(field("updated_at", SQLDataType.TIMESTAMP.defaultValue(currentTimestamp()).nullable(false))) - .execute(); - } - -} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/migrations/V0_30_4_002__Remove_updated_at_column.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/migrations/V0_30_4_002__Remove_updated_at_column.java deleted file mode 100644 index d7483455073c0..0000000000000 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/migrations/V0_30_4_002__Remove_updated_at_column.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.db.instance.toys.migrations; - -import static org.jooq.impl.DSL.field; - -import io.airbyte.db.instance.toys.ToysDatabaseConstants; -import org.flywaydb.core.api.migration.BaseJavaMigration; -import org.flywaydb.core.api.migration.Context; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; - -public class V0_30_4_002__Remove_updated_at_column extends BaseJavaMigration { - - @Override - public void migrate(final Context context) { - final DSLContext dsl = DSL.using(context.getConnection()); - dsl.alterTable(ToysDatabaseConstants.TABLE_NAME) - .dropColumn(field("updated_at")) - .execute(); - } - -} diff --git a/airbyte-db/jooq/README.md b/airbyte-db/jooq/README.md deleted file mode 100644 index 32d762fe570cb..0000000000000 --- a/airbyte-db/jooq/README.md +++ /dev/null @@ -1,86 +0,0 @@ -# jOOQ Code Generation - -## How to Use -This module generates jOOQ code for the configs and jobs database. To use the generated code, add the following dependency: - -```gradle -dependencies { - implementation project(':airbyte-db:jooq') -} -``` - -The generated code exists in the package `io.airbyte.db.instance..jooq` under the directory `build/generated/Database/src/main/java`. - -## Code Generation -Gradle plugin `nu.studer.jooq` is used for jOOQ code generation. See [here](https://github.com/etiennestuder/gradle-jooq-plugin) for details. - -It is necessary to separate this module from the `lib` module, because we use a custom database (`FlywayMigrationDatabase`) that runs Flyway migration first for the code generator. This implementation needs to be compiled before it can be used. - -The code will be automatically generated when this module is compiled. To manually update the generated code, run the `compileJava` task: - -```sh -SUB_BUILD=PLATFORM ./gradlew :airbyte-db:jooq:compileJava -``` - -Or run the following tasks for individual database: - -```sh -# for configs database -SUB_BUILD=PLATFORM ./gradlew :airbyte-db:jooq:generateConfigsDatabaseJooq - -# for jobs database -SUB_BUILD=PLATFORM ./gradlew :airbyte-db:jooq:generateJobsDatabaseJooq -``` - -## How to Setup Code Generation for New Database -- In `build.gradle`, do the following. -- Add a new jOOQ configuration under `jooq.configuration`. - - This step will automatically create a `generateDatabaseJooq` task. -- Register the output of the code generation task in the main sourceSet. -- Setup caching for the code generation task. - -Template: - -```build.gradle -// add jooq configuration -jooq { - configurations { - Database { - generateSchemaSourceOnCompilation = true - generationTool { - generator { - name = 'org.jooq.codegen.DefaultGenerator' - database { - name = 'io.airbyte.db.instance.configs.ConfigsFlywayMigrationDatabase' - inputSchema = 'public' - excludes = 'airbyte_configs_migrations' - } - target { - packageName = 'io.airbyte.db.instance.configs.jooq' - directory = 'build/generated/configsDatabase/src/main/java' - } - } - } - } - } -} - -// register output as source set -sourceSets.main.java.srcDirs ( - tasks.named('generateDatabaseJooq').flatMap { it.outputDir } -) - -sourceSets { - main { - java { - srcDirs "$buildDir/generated/Database/src/main/java" - } - } -} - -// setup caching -tasks.named('generateDatabaseJooq').configure { - allInputsDeclared = true - outputs.cacheIf { true } -} -``` diff --git a/airbyte-db/jooq/build.gradle b/airbyte-db/jooq/build.gradle deleted file mode 100644 index 4ac3a2821cb4b..0000000000000 --- a/airbyte-db/jooq/build.gradle +++ /dev/null @@ -1,93 +0,0 @@ -plugins { - id 'java' - id 'nu.studer.jooq' version '6.0.1' -} - -dependencies { - implementation libs.jooq.meta - implementation libs.jooq - implementation libs.postgresql - implementation libs.flyway.core - implementation project(':airbyte-db:db-lib') - - // jOOQ code generation - implementation libs.jooq.codegen - implementation libs.platform.testcontainers.postgresql - // These are required because gradle might be using lower version of Jna from other - // library transitive dependency. Can be removed if we can figure out which library is the cause. - // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 - implementation 'net.java.dev.jna:jna:5.8.0' - implementation 'net.java.dev.jna:jna-platform:5.8.0' - - // The jOOQ code generator only has access to classes added to the jooqGenerator configuration - jooqGenerator project(':airbyte-db:db-lib') - jooqGenerator libs.platform.testcontainers.postgresql -} - -jooq { - version = '3.13.4' - edition = nu.studer.gradle.jooq.JooqEdition.OSS - - configurations { - configsDatabase { - generateSchemaSourceOnCompilation = true - generationTool { - generator { - name = 'org.jooq.codegen.DefaultGenerator' - database { - name = 'io.airbyte.db.instance.configs.ConfigsFlywayMigrationDatabase' - inputSchema = 'public' - excludes = 'airbyte_configs_migrations' - } - target { - packageName = 'io.airbyte.db.instance.configs.jooq.generated' - directory = 'build/generated/configsDatabase/src/main/java' - } - } - } - } - - jobsDatabase { - generateSchemaSourceOnCompilation = true - generationTool { - generator { - name = 'org.jooq.codegen.DefaultGenerator' - database { - name = 'io.airbyte.db.instance.jobs.JobsFlywayMigrationDatabase' - inputSchema = 'public' - excludes = 'airbyte_jobs_migrations' - } - target { - packageName = 'io.airbyte.db.instance.jobs.jooq.generated' - directory = 'build/generated/jobsDatabase/src/main/java' - } - } - } - } - } -} - -sourceSets.main.java.srcDirs ( - tasks.named('generateConfigsDatabaseJooq').flatMap { it.outputDir }, - tasks.named('generateJobsDatabaseJooq').flatMap { it.outputDir } -) - -sourceSets { - main { - java { - srcDirs "$buildDir/generated/configsDatabase/src/main/java", "$buildDir/generated/jobsDatabase/src/main/java" - } - } -} - -tasks.named('generateConfigsDatabaseJooq').configure { - allInputsDeclared = true - outputs.cacheIf { true } -} - -tasks.named('generateJobsDatabaseJooq').configure { - allInputsDeclared = true - outputs.cacheIf { true } -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-integrations/bases/base-normalization/build.gradle b/airbyte-integrations/bases/base-normalization/build.gradle index 4693b612adc32..7de626f4d40d5 100644 --- a/airbyte-integrations/bases/base-normalization/build.gradle +++ b/airbyte-integrations/bases/base-normalization/build.gradle @@ -8,12 +8,12 @@ airbytePython { } dependencies { - implementation project(':airbyte-commons-worker') + implementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') } // we need to access the sshtunneling script from airbyte-workers for ssh support -task copySshScript(type: Copy, dependsOn: [project(':airbyte-commons-worker').processResources]) { - from "${project(':airbyte-commons-worker').buildDir}/resources/main" +task copySshScript(type: Copy, dependsOn: [project(':airbyte-connector-test-harnesses:acceptance-test-harness').processResources]) { + from "${project(':airbyte-connector-test-harnesses:acceptance-test-harness').buildDir}/resources/main" into "${buildDir}" include "sshtunneling.sh" } diff --git a/airbyte-integrations/bases/standard-destination-test/build.gradle b/airbyte-integrations/bases/standard-destination-test/build.gradle index a6ef45eb9abce..722df8b4a2bf7 100644 --- a/airbyte-integrations/bases/standard-destination-test/build.gradle +++ b/airbyte-integrations/bases/standard-destination-test/build.gradle @@ -3,7 +3,7 @@ plugins { } dependencies { implementation project(':airbyte-db:db-lib') - implementation project(':airbyte-commons-worker') + implementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') implementation project(':airbyte-config:config-models') implementation project(':airbyte-config:init') implementation project(':airbyte-json-validation') diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java index ca3e5ae8255df..e2f45f4cd59f8 100644 --- a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java +++ b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java @@ -26,7 +26,6 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; -import io.airbyte.config.EnvConfigs; import io.airbyte.config.JobGetSpecConfig; import io.airbyte.config.OperatorDbt; import io.airbyte.config.StandardCheckConnectionInput; @@ -53,11 +52,10 @@ import io.airbyte.protocol.models.v0.ConnectorSpecification; import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.general.DbtTransformationRunner; -import io.airbyte.workers.general.DefaultCheckConnectionWorker; -import io.airbyte.workers.general.DefaultGetSpecWorker; +import io.airbyte.workers.general.DefaultCheckConnectionTestHarness; +import io.airbyte.workers.general.DefaultGetSpecTestHarness; import io.airbyte.workers.helper.ConnectorConfigUpdater; import io.airbyte.workers.helper.EntrypointEnvChecker; import io.airbyte.workers.internal.AirbyteDestination; @@ -116,7 +114,6 @@ public abstract class DestinationAcceptanceTest { private Path jobRoot; private ProcessFactory processFactory; - private WorkerConfigs workerConfigs; private ConnectorConfigUpdater mConnectorConfigUpdater; protected Path localRoot; @@ -217,7 +214,7 @@ protected boolean implementsNamespaces() { * * @return - a boolean. */ - protected boolean implementsAppend() throws WorkerException { + protected boolean implementsAppend() throws TestHarnessException { final ConnectorSpecification spec = runSpec(); assertNotNull(spec); if (spec.getSupportsIncremental() != null) { @@ -255,7 +252,7 @@ protected String getNormalizationIntegrationType() { * * @return - a boolean. */ - protected boolean implementsAppendDedup() throws WorkerException { + protected boolean implementsAppendDedup() throws TestHarnessException { final ConnectorSpecification spec = runSpec(); assertNotNull(spec); if (spec.getSupportedDestinationSyncModes() != null) { @@ -271,7 +268,7 @@ protected boolean implementsAppendDedup() throws WorkerException { * * @return - a boolean. */ - protected boolean implementsOverwrite() throws WorkerException { + protected boolean implementsOverwrite() throws TestHarnessException { final ConnectorSpecification spec = runSpec(); assertNotNull(spec); if (spec.getSupportedDestinationSyncModes() != null) { @@ -347,13 +344,16 @@ void setUpInternal() throws Exception { LOGGER.info("jobRoot: {}", jobRoot); LOGGER.info("localRoot: {}", localRoot); testEnv = new TestDestinationEnv(localRoot); - workerConfigs = new WorkerConfigs(new EnvConfigs()); mConnectorConfigUpdater = Mockito.mock(ConnectorConfigUpdater.class); setup(testEnv); - processFactory = new DockerProcessFactory(workerConfigs, workspaceRoot, - workspaceRoot.toString(), localRoot.toString(), "host"); + processFactory = new DockerProcessFactory( + workspaceRoot, + workspaceRoot.toString(), + localRoot.toString(), + "host", + Collections.emptyMap()); } @AfterEach @@ -365,7 +365,7 @@ void tearDownInternal() throws Exception { * Verify that when the integrations returns a valid spec. */ @Test - public void testGetSpec() throws WorkerException { + public void testGetSpec() throws TestHarnessException { assertNotNull(runSpec()); } @@ -948,12 +948,12 @@ public void testCustomDbtTransformations() throws Exception { // 1. First, it tests if connection to the destination works. dbtConfig.withDbtArguments("debug"); if (!runner.run(JOB_ID, JOB_ATTEMPT, transformationRoot, config, null, dbtConfig)) { - throw new WorkerException("dbt debug Failed."); + throw new TestHarnessException("dbt debug Failed."); } // 2. Install any dependencies packages, if any dbtConfig.withDbtArguments("deps"); if (!runner.transform(JOB_ID, JOB_ATTEMPT, transformationRoot, config, null, dbtConfig)) { - throw new WorkerException("dbt deps Failed."); + throw new TestHarnessException("dbt deps Failed."); } // 3. It contains seeds that includes some (fake) raw data from a fictional app as CSVs data sets. // This materializes the CSVs as tables in your target schema. @@ -961,19 +961,19 @@ public void testCustomDbtTransformations() throws Exception { // already in your warehouse. dbtConfig.withDbtArguments("seed"); if (!runner.transform(JOB_ID, JOB_ATTEMPT, transformationRoot, config, null, dbtConfig)) { - throw new WorkerException("dbt seed Failed."); + throw new TestHarnessException("dbt seed Failed."); } // 4. Run the models: // Note: If this steps fails, it might mean that you need to make small changes to the SQL in the // models folder to adjust for the flavor of SQL of your target database. dbtConfig.withDbtArguments("run"); if (!runner.transform(JOB_ID, JOB_ATTEMPT, transformationRoot, config, null, dbtConfig)) { - throw new WorkerException("dbt run Failed."); + throw new TestHarnessException("dbt run Failed."); } // 5. Test the output of the models and tables have been properly populated: dbtConfig.withDbtArguments("test"); if (!runner.transform(JOB_ID, JOB_ATTEMPT, transformationRoot, config, null, dbtConfig)) { - throw new WorkerException("dbt test Failed."); + throw new TestHarnessException("dbt test Failed."); } // 6. Generate dbt documentation for the project: // This step is commented out because it takes a long time, but is not vital for Airbyte @@ -1008,7 +1008,7 @@ void testCustomDbtTransformationsFailure() throws Exception { .withDockerImage("fishtownanalytics/dbt:0.19.1") .withDbtArguments("debug"); if (!runner.run(JOB_ID, JOB_ATTEMPT, transformationRoot, config, null, dbtConfig)) { - throw new WorkerException("dbt debug Failed."); + throw new TestHarnessException("dbt debug Failed."); } dbtConfig.withDbtArguments("test"); @@ -1243,16 +1243,16 @@ protected void assertNamespaceNormalization(final String testCaseId, testCaseId)); } - private ConnectorSpecification runSpec() throws WorkerException { + private ConnectorSpecification runSpec() throws TestHarnessException { return convertProtocolObject( - new DefaultGetSpecWorker( + new DefaultGetSpecTestHarness( new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, new EnvVariableFeatureFlags())) .run(new JobGetSpecConfig().withDockerImage(getImageName()), jobRoot).getSpec(), ConnectorSpecification.class); } - protected StandardCheckConnectionOutput runCheck(final JsonNode config) throws WorkerException { - return new DefaultCheckConnectionWorker( + protected StandardCheckConnectionOutput runCheck(final JsonNode config) throws TestHarnessException { + return new DefaultCheckConnectionTestHarness( new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, new EnvVariableFeatureFlags()), mConnectorConfigUpdater) .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot) @@ -1262,7 +1262,7 @@ protected StandardCheckConnectionOutput runCheck(final JsonNode config) throws W protected StandardCheckConnectionOutput.Status runCheckWithCatchedException( final JsonNode config) { try { - final StandardCheckConnectionOutput standardCheckConnectionOutput = new DefaultCheckConnectionWorker( + final StandardCheckConnectionOutput standardCheckConnectionOutput = new DefaultCheckConnectionTestHarness( new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, new EnvVariableFeatureFlags()), mConnectorConfigUpdater) .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot) @@ -1360,7 +1360,7 @@ private List runSync( if (!runner.normalize(JOB_ID, JOB_ATTEMPT, normalizationRoot, destinationConfig.getDestinationConnectionConfiguration(), destinationConfig.getCatalog(), null)) { - throw new WorkerException("Normalization Failed."); + throw new TestHarnessException("Normalization Failed."); } runner.close(); return destinationOutput; diff --git a/airbyte-integrations/bases/standard-source-test/build.gradle b/airbyte-integrations/bases/standard-source-test/build.gradle index 9d64af20a8349..0dd052dedb1f3 100644 --- a/airbyte-integrations/bases/standard-source-test/build.gradle +++ b/airbyte-integrations/bases/standard-source-test/build.gradle @@ -14,9 +14,8 @@ import org.jsoup.Jsoup; dependencies { implementation project(':airbyte-db:db-lib') implementation project(':airbyte-api') - implementation project(':airbyte-commons-worker') + implementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') implementation libs.airbyte.protocol implementation 'org.mockito:mockito-core:4.6.1' diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java index e245ec5df6df8..1df436953fdd1 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java @@ -18,9 +18,7 @@ import io.airbyte.api.client.model.generated.SourceDiscoverSchemaWriteRequestBody; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.json.Jsons; -import io.airbyte.config.EnvConfigs; import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.config.ResourceRequirements; import io.airbyte.config.StandardCheckConnectionInput; import io.airbyte.config.StandardCheckConnectionOutput; import io.airbyte.config.StandardDiscoverCatalogInput; @@ -32,11 +30,10 @@ import io.airbyte.protocol.models.v0.AirbyteRecordMessage; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.general.DefaultCheckConnectionWorker; -import io.airbyte.workers.general.DefaultDiscoverCatalogWorker; -import io.airbyte.workers.general.DefaultGetSpecWorker; +import io.airbyte.workers.exception.TestHarnessException; +import io.airbyte.workers.general.DefaultCheckConnectionTestHarness; +import io.airbyte.workers.general.DefaultDiscoverCatalogTestHarness; +import io.airbyte.workers.general.DefaultGetSpecTestHarness; import io.airbyte.workers.helper.CatalogClientConverters; import io.airbyte.workers.helper.ConnectorConfigUpdater; import io.airbyte.workers.helper.EntrypointEnvChecker; @@ -48,7 +45,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -117,8 +113,6 @@ public abstract class AbstractSourceConnectorTest { */ protected abstract void tearDown(TestDestinationEnv testEnv) throws Exception; - private WorkerConfigs workerConfigs; - private AirbyteApiClient mAirbyteApiClient; private SourceApi mSourceApi; @@ -142,7 +136,6 @@ public void setUpInternal() throws Exception { localRoot = Files.createTempDirectory(testDir, "output"); environment = new TestDestinationEnv(localRoot); setupEnvironment(environment); - workerConfigs = new WorkerConfigs(new EnvConfigs()); mAirbyteApiClient = mock(AirbyteApiClient.class); mSourceApi = mock(SourceApi.class); when(mAirbyteApiClient.getSourceApi()).thenReturn(mSourceApi); @@ -150,11 +143,11 @@ public void setUpInternal() throws Exception { .thenReturn(new DiscoverCatalogResult().catalogId(CATALOG_ID)); mConnectorConfigUpdater = mock(ConnectorConfigUpdater.class); processFactory = new DockerProcessFactory( - workerConfigs, workspaceRoot, workspaceRoot.toString(), localRoot.toString(), - "host"); + "host", + new TestEnvConfigs().getJobDefaultEnvMap()); postSetup(); } @@ -170,34 +163,34 @@ public void tearDownInternal() throws Exception { tearDown(environment); } - protected ConnectorSpecification runSpec() throws WorkerException { - final io.airbyte.protocol.models.ConnectorSpecification spec = new DefaultGetSpecWorker( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements(), null, false, + protected ConnectorSpecification runSpec() throws TestHarnessException { + final io.airbyte.protocol.models.ConnectorSpecification spec = new DefaultGetSpecTestHarness( + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, new EnvVariableFeatureFlags())) .run(new JobGetSpecConfig().withDockerImage(getImageName()), jobRoot).getSpec(); return convertProtocolObject(spec, ConnectorSpecification.class); } protected StandardCheckConnectionOutput runCheck() throws Exception { - return new DefaultCheckConnectionWorker( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements(), null, false, + return new DefaultCheckConnectionTestHarness( + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, new EnvVariableFeatureFlags()), mConnectorConfigUpdater) .run(new StandardCheckConnectionInput().withConnectionConfiguration(getConfig()), jobRoot).getCheckConnection(); } protected String runCheckAndGetStatusAsString(final JsonNode config) throws Exception { - return new DefaultCheckConnectionWorker( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements(), null, false, + return new DefaultCheckConnectionTestHarness( + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, new EnvVariableFeatureFlags()), mConnectorConfigUpdater) .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot).getCheckConnection().getStatus().toString(); } protected UUID runDiscover() throws Exception { - final UUID toReturn = new DefaultDiscoverCatalogWorker( + final UUID toReturn = new DefaultDiscoverCatalogTestHarness( mAirbyteApiClient, - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements(), null, false, + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, new EnvVariableFeatureFlags()), mConnectorConfigUpdater) .run(new StandardDiscoverCatalogInput().withSourceId(SOURCE_ID.toString()).withConnectionConfiguration(getConfig()), jobRoot) @@ -232,7 +225,7 @@ protected List runRead(final ConfiguredAirbyteCatalog catalog, f final var featureFlags = new EnvVariableFeatureFlags(); final AirbyteSource source = new DefaultAirbyteSource( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements(), null, false, + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, featureFlags), featureFlags); final List messages = new ArrayList<>(); @@ -255,9 +248,7 @@ protected Map runReadVerifyNumberOfReceivedMsgs(final Configure .withState(state == null ? null : new State().withState(state)) .withCatalog(convertProtocolObject(catalog, io.airbyte.protocol.models.ConfiguredAirbyteCatalog.class)); - final Map mapOfResourceRequirementsParams = prepareResourceRequestMapBySystemProperties(); - final AirbyteSource source = - prepareAirbyteSource(!mapOfResourceRequirementsParams.isEmpty() ? prepareResourceRequirements(mapOfResourceRequirementsParams) : null); + final AirbyteSource source = prepareAirbyteSource(); source.start(sourceConfig, jobRoot); while (!source.isFinished()) { @@ -274,43 +265,20 @@ protected Map runReadVerifyNumberOfReceivedMsgs(final Configure return mapOfExpectedRecordsCount; } - protected ResourceRequirements prepareResourceRequirements(final Map mapOfResourceRequirementsParams) { - return new ResourceRequirements().withCpuRequest(mapOfResourceRequirementsParams.get(CPU_REQUEST_FIELD_NAME)) - .withCpuLimit(mapOfResourceRequirementsParams.get(CPU_LIMIT_FIELD_NAME)) - .withMemoryRequest(mapOfResourceRequirementsParams.get(MEMORY_REQUEST_FIELD_NAME)) - .withMemoryLimit(mapOfResourceRequirementsParams.get(MEMORY_LIMIT_FIELD_NAME)); - } - - private AirbyteSource prepareAirbyteSource(final ResourceRequirements resourceRequirements) { - final var workerConfigs = new WorkerConfigs(new EnvConfigs()); + private AirbyteSource prepareAirbyteSource() { final var featureFlags = new EnvVariableFeatureFlags(); - final var integrationLauncher = resourceRequirements == null - ? new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements(), null, false, - featureFlags) - : new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, resourceRequirements, null, false, featureFlags); + final var integrationLauncher = new AirbyteIntegrationLauncher( + JOB_ID, + JOB_ATTEMPT, + getImageName(), + processFactory, + null, + null, + false, + featureFlags); return new DefaultAirbyteSource(integrationLauncher, featureFlags); } - private static Map prepareResourceRequestMapBySystemProperties() { - var cpuLimit = System.getProperty(CPU_LIMIT_FIELD_NAME); - var memoryLimit = System.getProperty(MEMORY_LIMIT_FIELD_NAME); - final var workerConfigs = new WorkerConfigs(new EnvConfigs()); - if (cpuLimit.isBlank() || cpuLimit.isEmpty()) { - cpuLimit = workerConfigs.getResourceRequirements().getCpuLimit(); - } - if (memoryLimit.isBlank() || memoryLimit.isEmpty()) { - memoryLimit = workerConfigs.getResourceRequirements().getMemoryLimit(); - } - LOGGER.info("Container CPU Limit = {}", cpuLimit); - LOGGER.info("Container Memory Limit = {}", memoryLimit); - final Map result = new HashMap<>(); - result.put(CPU_REQUEST_FIELD_NAME, workerConfigs.getResourceRequirements().getCpuRequest()); - result.put(CPU_LIMIT_FIELD_NAME, cpuLimit); - result.put(MEMORY_REQUEST_FIELD_NAME, workerConfigs.getResourceRequirements().getMemoryRequest()); - result.put(MEMORY_LIMIT_FIELD_NAME, memoryLimit); - return result; - } - private static V0 convertProtocolObject(final V1 v1, final Class klass) { return Jsons.object(Jsons.jsonNode(v1), klass); } diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/PythonSourceAcceptanceTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/PythonSourceAcceptanceTest.java index 70c716ee6262d..7ac0c4bf65244 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/PythonSourceAcceptanceTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/PythonSourceAcceptanceTest.java @@ -15,7 +15,7 @@ import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.workers.WorkerUtils; +import io.airbyte.workers.TestHarnessUtils; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -131,7 +131,7 @@ private Path runExecutableInternal(final Command cmd) throws IOException { LineGobbler.gobble(process.getErrorStream(), LOGGER::error); LineGobbler.gobble(process.getInputStream(), LOGGER::info); - WorkerUtils.gentleClose(process, 1, TimeUnit.MINUTES); + TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES); final int exitCode = process.exitValue(); if (exitCode != 0) { diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/TestEnvConfigs.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/TestEnvConfigs.java new file mode 100644 index 0000000000000..0d223edc29d69 --- /dev/null +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/TestEnvConfigs.java @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.standardtest.source; + +import com.google.common.base.Preconditions; +import io.airbyte.commons.lang.Exceptions; +import io.airbyte.commons.map.MoreMaps; +import io.airbyte.commons.version.AirbyteVersion; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class passes environment variable to the DockerProcessFactory that runs the source in the + * SourceAcceptanceTest. + */ +// todo (cgardens) - this cloud_deployment implicit interface is going to bite us. +public class TestEnvConfigs { + + private static final Logger LOGGER = LoggerFactory.getLogger(TestEnvConfigs.class); + + // env variable names + public static final String AIRBYTE_ROLE = "AIRBYTE_ROLE"; + public static final String AIRBYTE_VERSION = "AIRBYTE_VERSION"; + public static final String WORKER_ENVIRONMENT = "WORKER_ENVIRONMENT"; + public static final String DEPLOYMENT_MODE = "DEPLOYMENT_MODE"; + public static final String JOB_DEFAULT_ENV_PREFIX = "JOB_DEFAULT_ENV_"; + + public static final Map> JOB_SHARED_ENVS = Map.of( + AIRBYTE_VERSION, (instance) -> instance.getAirbyteVersion().serialize(), + AIRBYTE_ROLE, TestEnvConfigs::getAirbyteRole, + DEPLOYMENT_MODE, (instance) -> instance.getDeploymentMode().name(), + WORKER_ENVIRONMENT, (instance) -> instance.getWorkerEnvironment().name()); + + enum DeploymentMode { + OSS, + CLOUD + } + + enum WorkerEnvironment { + DOCKER, + KUBERNETES + } + + private final Function getEnv; + private final Supplier> getAllEnvKeys; + + public TestEnvConfigs() { + this(System.getenv()); + } + + private TestEnvConfigs(final Map envMap) { + getEnv = envMap::get; + getAllEnvKeys = envMap::keySet; + } + + // CORE + // General + public String getAirbyteRole() { + return getEnv(AIRBYTE_ROLE); + } + + public AirbyteVersion getAirbyteVersion() { + return new AirbyteVersion(getEnsureEnv(AIRBYTE_VERSION)); + } + + public DeploymentMode getDeploymentMode() { + return getEnvOrDefault(DEPLOYMENT_MODE, DeploymentMode.OSS, s -> { + try { + return DeploymentMode.valueOf(s); + } catch (final IllegalArgumentException e) { + LOGGER.info(s + " not recognized, defaulting to " + DeploymentMode.OSS); + return DeploymentMode.OSS; + } + }); + } + + public WorkerEnvironment getWorkerEnvironment() { + return getEnvOrDefault(WORKER_ENVIRONMENT, WorkerEnvironment.DOCKER, s -> WorkerEnvironment.valueOf(s.toUpperCase())); + } + + /** + * There are two types of environment variables available to the job container: + *

      + *
    • Exclusive variables prefixed with JOB_DEFAULT_ENV_PREFIX
    • + *
    • Shared variables defined in JOB_SHARED_ENVS
    • + *
    + */ + public Map getJobDefaultEnvMap() { + final Map jobPrefixedEnvMap = getAllEnvKeys.get().stream() + .filter(key -> key.startsWith(JOB_DEFAULT_ENV_PREFIX)) + .collect(Collectors.toMap(key -> key.replace(JOB_DEFAULT_ENV_PREFIX, ""), getEnv)); + // This method assumes that these shared env variables are not critical to the execution + // of the jobs, and only serve as metadata. So any exception is swallowed and default to + // an empty string. Change this logic if this assumption no longer holds. + final Map jobSharedEnvMap = JOB_SHARED_ENVS.entrySet().stream().collect(Collectors.toMap( + Entry::getKey, + entry -> Exceptions.swallowWithDefault(() -> Objects.requireNonNullElse(entry.getValue().apply(this), ""), ""))); + return MoreMaps.merge(jobPrefixedEnvMap, jobSharedEnvMap); + } + + public T getEnvOrDefault(final String key, final T defaultValue, final Function parser) { + return getEnvOrDefault(key, defaultValue, parser, false); + } + + public T getEnvOrDefault(final String key, final T defaultValue, final Function parser, final boolean isSecret) { + final String value = getEnv.apply(key); + if (value != null && !value.isEmpty()) { + return parser.apply(value); + } else { + LOGGER.info("Using default value for environment variable {}: '{}'", key, isSecret ? "*****" : defaultValue); + return defaultValue; + } + } + + public String getEnv(final String name) { + return getEnv.apply(name); + } + + public String getEnsureEnv(final String name) { + final String value = getEnv(name); + Preconditions.checkArgument(value != null, "'%s' environment variable cannot be null", name); + + return value; + } + +} diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java index bf20d32bfe969..050b24a678803 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java @@ -37,7 +37,6 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; -import io.airbyte.config.WorkerEnvConstants; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.gcs.GcsDestinationConfig; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; @@ -458,7 +457,7 @@ public static HeaderProvider getHeaderProvider() { } private static String getConnectorNameOrDefault() { - return Optional.ofNullable(System.getenv(WorkerEnvConstants.WORKER_CONNECTOR_IMAGE)) + return Optional.ofNullable(System.getenv("WORKER_CONNECTOR_IMAGE")) .map(name -> name.replace("airbyte/", Strings.EMPTY).replace(":", "/")) .orElse("destination-bigquery"); } diff --git a/airbyte-integrations/connectors/destination-clickhouse/build.gradle b/airbyte-integrations/connectors/destination-clickhouse/build.gradle index 40b2ece6b7d0f..48c209f70ea19 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/destination-clickhouse/build.gradle @@ -25,7 +25,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-clickhouse') - integrationTestJavaImplementation project(':airbyte-commons-worker') + integrationTestJavaImplementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') // https://mvnrepository.com/artifact/org.testcontainers/clickhouse integrationTestJavaImplementation libs.connectors.destination.testcontainers.clickhouse integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle index 07ad7bf8cc369..aaf04b312bed5 100644 --- a/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle @@ -33,7 +33,7 @@ dependencies { testImplementation libs.connectors.testcontainers.elasticsearch integrationTestJavaImplementation libs.connectors.testcontainers.elasticsearch - integrationTestJavaImplementation project(':airbyte-commons-worker') + integrationTestJavaImplementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-elasticsearch') } diff --git a/airbyte-integrations/connectors/destination-gcs/build.gradle b/airbyte-integrations/connectors/destination-gcs/build.gradle index a90eceeb8294d..71312d5350bda 100644 --- a/airbyte-integrations/connectors/destination-gcs/build.gradle +++ b/airbyte-integrations/connectors/destination-gcs/build.gradle @@ -43,5 +43,5 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-gcs') - integrationTestJavaImplementation project(':airbyte-commons-worker') + integrationTestJavaImplementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') } diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle index 1e4704b3da540..e9c6c9a1b1426 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle @@ -23,6 +23,6 @@ dependencies { implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) integrationTestJavaImplementation project(':airbyte-config:config-models') - integrationTestJavaImplementation project(':airbyte-commons-worker') + integrationTestJavaImplementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') } diff --git a/airbyte-integrations/connectors/destination-snowflake/build.gradle b/airbyte-integrations/connectors/destination-snowflake/build.gradle index c9287b5f2d89d..254c9e7c5d8de 100644 --- a/airbyte-integrations/connectors/destination-snowflake/build.gradle +++ b/airbyte-integrations/connectors/destination-snowflake/build.gradle @@ -47,7 +47,7 @@ dependencies { // this is a configuration to make mockito work with final classes testImplementation 'org.mockito:mockito-inline:2.13.0' - integrationTestJavaImplementation project(':airbyte-commons-worker') + integrationTestJavaImplementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-snowflake') integrationTestJavaImplementation 'org.apache.commons:commons-lang3:3.11' diff --git a/airbyte-test-utils/build.gradle b/airbyte-test-utils/build.gradle index 228304f11d2cc..3ec37f5393087 100644 --- a/airbyte-test-utils/build.gradle +++ b/airbyte-test-utils/build.gradle @@ -10,7 +10,7 @@ configurations.all { dependencies { api project(':airbyte-db:db-lib') implementation project(':airbyte-api') - implementation project(':airbyte-commons-worker') + implementation project(':airbyte-connector-test-harnesses:acceptance-test-harness') implementation 'io.fabric8:kubernetes-client:5.12.2' implementation libs.temporal.sdk diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/container/AirbyteTestContainer.java b/airbyte-test-utils/src/main/java/io/airbyte/test/container/AirbyteTestContainer.java deleted file mode 100644 index 7b17d1f800471..0000000000000 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/container/AirbyteTestContainer.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.container; - -import com.google.common.collect.Maps; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.HealthApi; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.commons.concurrency.WaitingUtils; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.lang.reflect.Field; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.Scanner; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; -import java.util.function.Supplier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.DockerComposeContainer; -import org.testcontainers.containers.SocatContainer; -import org.testcontainers.containers.output.OutputFrame; - -/** - * The goal of this class is to make it easy to run the Airbyte docker-compose configuration from - * test containers. This helps make it easy to stop the test container without deleting the volumes - * { @link AirbyteTestContainer#stopRetainVolumes() }. It waits for Airbyte to be ready. It also - * handles the nuances of configuring the Airbyte docker-compose configuration in test containers. - */ -@SuppressWarnings("PMD.AvoidAccessibilityAlteration") -public class AirbyteTestContainer { - - private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteTestContainer.class); - - private final File dockerComposeFile; - private final Map env; - private final Map> customServiceLogListeners; - - private DockerComposeContainer dockerComposeContainer; - - public AirbyteTestContainer(final File dockerComposeFile, - final Map env, - final Map> customServiceLogListeners) { - this.dockerComposeFile = dockerComposeFile; - this.env = env; - this.customServiceLogListeners = customServiceLogListeners; - } - - /** - * Starts Airbyte docker-compose configuration. Will block until the server is reachable or it times - * outs. - */ - public void startBlocking() throws IOException, InterruptedException { - startAsync(); - waitForAirbyte(); - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - public void startAsync() throws IOException, InterruptedException { - final File cleanedDockerComposeFile = prepareDockerComposeFile(dockerComposeFile); - dockerComposeContainer = new DockerComposeContainer(cleanedDockerComposeFile) - .withEnv(env); - - // Only expose logs related to db migrations. - serviceLogConsumer(dockerComposeContainer, "init"); - serviceLogConsumer(dockerComposeContainer, "bootloader"); - serviceLogConsumer(dockerComposeContainer, "db"); - serviceLogConsumer(dockerComposeContainer, "seed"); - serviceLogConsumer(dockerComposeContainer, "server"); - - dockerComposeContainer.start(); - } - - /** - * TestContainers docker compose files cannot have container_names, so we filter them. - */ - private static File prepareDockerComposeFile(final File originalDockerComposeFile) throws IOException { - final File cleanedDockerComposeFile = Files.createTempFile(Path.of("/tmp"), "docker_compose", "acceptance_test").toFile(); - - try (final Scanner scanner = new Scanner(originalDockerComposeFile, StandardCharsets.UTF_8)) { - try (final FileWriter fileWriter = new FileWriter(cleanedDockerComposeFile, StandardCharsets.UTF_8)) { - while (scanner.hasNextLine()) { - final String s = scanner.nextLine(); - if (s.contains("container_name")) { - continue; - } - fileWriter.write(s); - fileWriter.write('\n'); - } - } - } - return cleanedDockerComposeFile; - } - - @SuppressWarnings("BusyWait") - private static void waitForAirbyte() throws InterruptedException { - // todo (cgardens) - assumes port 8001 which is misleading since we can start airbyte on other - // ports. need to make this configurable. - final AirbyteApiClient apiClient = new AirbyteApiClient( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - final HealthApi healthApi = apiClient.getHealthApi(); - - final AtomicReference lastException = new AtomicReference<>(); - final AtomicInteger attempt = new AtomicInteger(); - final Supplier condition = () -> { - try { - healthApi.getHealthCheck(); - return true; - } catch (final ApiException e) { - lastException.set(e); - LOGGER.info("airbyte not ready yet. attempt: {}", attempt.incrementAndGet()); - return false; - } - }; - - if (!WaitingUtils.waitForCondition(Duration.ofSeconds(5), Duration.ofMinutes(2), condition)) { - throw new IllegalStateException("Airbyte took too long to start. Including last exception.", lastException.get()); - } - } - - private void serviceLogConsumer(final DockerComposeContainer composeContainer, final String service) { - composeContainer.withLogConsumer(service, logConsumer(service, customServiceLogListeners.get(service))); - } - - /** - * Exposes logs generated by docker containers in docker compose temporal test container. - * - * @param service - name of docker container from which log is emitted. - * @param customConsumer - each line output by the service in docker compose will be passed ot the - * consumer. if null do nothing. - * @return log consumer - */ - private Consumer logConsumer(final String service, final Consumer customConsumer) { - return c -> { - if (c != null && c.getBytes() != null) { - final String log = new String(c.getBytes(), StandardCharsets.UTF_8); - if (customConsumer != null) { - customConsumer.accept(log); - } - - final String message = prependService(service, log.replace("\n", "")); - switch (c.getType()) { - // prefer matching log levels from docker containers with log levels in logger. - case STDERR -> LOGGER.error(message); - // assumption that this is an empty frame that connotes the container exiting. - case END -> LOGGER.error(service + " stopped!!!"); - // default includes STDOUT and anything else - default -> LOGGER.info(message); - } - } - }; - } - - private String prependService(final String service, final String message) { - return service + " - " + message; - } - - /** - * This stop method will delete any underlying volumes for the docker compose setup. - */ - public void stop() { - if (dockerComposeContainer != null) { - dockerComposeContainer.stop(); - } - } - - /** - * This method is hacked from {@link org.testcontainers.containers.DockerComposeContainer#stop()} We - * needed to do this to avoid removing the volumes when the container is stopped so that the data - * persists and can be tested against in the second run - */ - public void stopRetainVolumes() { - if (dockerComposeContainer == null) { - return; - } - - try { - stopRetainVolumesInternal(); - } catch (final InvocationTargetException | IllegalAccessException | NoSuchMethodException | NoSuchFieldException e) { - throw new RuntimeException(e); - } - } - - @SuppressWarnings("rawtypes") - private void stopRetainVolumesInternal() throws InvocationTargetException, IllegalAccessException, NoSuchMethodException, NoSuchFieldException { - final Class dockerComposeContainerClass = dockerComposeContainer.getClass(); - try { - final Field ambassadorContainerField = dockerComposeContainerClass.getDeclaredField("ambassadorContainer"); - ambassadorContainerField.setAccessible(true); - final SocatContainer ambassadorContainer = (SocatContainer) ambassadorContainerField.get(dockerComposeContainer); - ambassadorContainer.stop(); - - final String cmd = "down "; - - final Method runWithComposeMethod = dockerComposeContainerClass.getDeclaredMethod("runWithCompose", String.class); - runWithComposeMethod.setAccessible(true); - runWithComposeMethod.invoke(dockerComposeContainer, cmd); - - } finally { - final Field projectField = dockerComposeContainerClass.getDeclaredField("project"); - projectField.setAccessible(true); - - final Method randomProjectId = dockerComposeContainerClass.getDeclaredMethod("randomProjectId"); - randomProjectId.setAccessible(true); - final String newProjectValue = (String) randomProjectId.invoke(dockerComposeContainer); - - projectField.set(dockerComposeContainer, newProjectValue); - } - } - - public static class Builder { - - private final File dockerComposeFile; - private final Map env; - private final Map> customServiceLogListeners; - - public Builder(final File dockerComposeFile) { - this.dockerComposeFile = dockerComposeFile; - this.customServiceLogListeners = new HashMap<>(); - this.env = new HashMap<>(); - } - - public Builder setEnv(final Properties env) { - this.env.putAll(Maps.fromProperties(env)); - return this; - } - - public Builder setEnv(final Map env) { - this.env.putAll(env); - return this; - } - - public Builder setEnvVariable(final String propertyName, final String propertyValue) { - this.env.put(propertyName, propertyValue); - return this; - } - - public Builder setLogListener(final String serviceName, final Consumer logConsumer) { - this.customServiceLogListeners.put(serviceName, logConsumer); - return this; - } - - public AirbyteTestContainer build() { - // override .env file so that we never report to segment while testing. - env.put("TRACKING_STRATEGY", "logging"); - - LOGGER.info("Using env: {}", env); - return new AirbyteTestContainer(dockerComposeFile, env, customServiceLogListeners); - } - - } - -} diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/SchemaTableNamePair.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/SchemaTableNamePair.java deleted file mode 100644 index fc35a6b288829..0000000000000 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/SchemaTableNamePair.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.utils; - -public record SchemaTableNamePair(String schemaName, String tableName) { - - public String getFullyQualifiedTableName() { - return schemaName + "." + tableName; - } - -} diff --git a/build.gradle b/build.gradle index 36ef7ee9f00a9..cb836c4ca7aa2 100644 --- a/build.gradle +++ b/build.gradle @@ -506,11 +506,6 @@ subprojects { task listAllDependencies(type: DependencyReportTask) {} } -task('generate-docker') { - dependsOn(':airbyte-db:db-lib:assemble') - dependsOn(':airbyte-config:init:assemble') -} - // produce reproducible archives // (see https://docs.gradle.org/current/userguide/working_with_files.html#sec:reproducible_archives) tasks.withType(AbstractArchiveTask) { diff --git a/settings.gradle b/settings.gradle index e34004f613fbf..5c9a644bae09a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -89,10 +89,7 @@ include ':airbyte-json-validation' include ':airbyte-test-utils' // airbyte-workers has a lot of dependencies. -include ':airbyte-commons-worker' -include ':airbyte-config:config-persistence' // transitively used by airbyte-workers. -include ':airbyte-db:jooq' // transitively used by airbyte-workers. -include ':airbyte-persistence:job-persistence' // transitively used by airbyte-workers. +include ':airbyte-connector-test-harnesses:acceptance-test-harness' // connectors base if (!System.getenv().containsKey("SUB_BUILD") || System.getenv().get("SUB_BUILD") == "CONNECTORS_BASE" || System.getenv().get("SUB_BUILD") == "ALL_CONNECTORS") { diff --git a/tools/bin/publish_docker.sh b/tools/bin/publish_docker.sh index caf04d1727f4a..8a9904660e8d3 100755 --- a/tools/bin/publish_docker.sh +++ b/tools/bin/publish_docker.sh @@ -1,14 +1,13 @@ #!/bin/bash set -e +# todo (cgardens) - remove this file. used in platform build script only. # List of directories without "airbyte-" prefix. projectDir=( "bootloader" - "config/init" "container-orchestrator" "cron" "connector-builder-server" - "db/db-lib" "metrics/reporter" "proxy" "server"