diff --git a/api-service/src/configs/Config.ts b/api-service/src/configs/Config.ts index cd74c111..1b4ccb04 100644 --- a/api-service/src/configs/Config.ts +++ b/api-service/src/configs/Config.ts @@ -118,5 +118,6 @@ export const config = { "otel": { "enable": process.env.otel_enable || "false", "collector_endpoint": process.env.otel_collector_endpoint || "http://localhost:4318" - } + }, + "storage_types": process.env.storage_types || 'druid,datalake' } diff --git a/api-service/src/controllers/DatasetCopy/DatasetCopy.ts b/api-service/src/controllers/DatasetCopy/DatasetCopy.ts index 9015e9c7..7c53f538 100644 --- a/api-service/src/controllers/DatasetCopy/DatasetCopy.ts +++ b/api-service/src/controllers/DatasetCopy/DatasetCopy.ts @@ -4,7 +4,7 @@ import { ResponseHandler } from "../../helpers/ResponseHandler"; import * as _ from "lodash"; import { schemaValidation } from "../../services/ValidationService"; import validationSchema from "./RequestValidationSchema.json"; -import { datasetService, getLiveDatasetConfigs } from "../../services/DatasetService"; +import { datasetService, getLiveDatasetConfigs, validateStorageSupport } from "../../services/DatasetService"; import { updateRecords } from "./DatasetCopyHelper"; import { obsrvError } from "../../types/ObsrvError"; @@ -40,6 +40,7 @@ const datasetCopy = async (req: Request, res: Response) => { validateRequest(req); const newDatasetId = _.get(req, "body.request.destination.datasetId"); const dataset = await fetchDataset(req); + validateStorageSupport(dataset); const userID = (req as any)?.userID; _.set(dataset, "created_by", userID); _.set(dataset, "updated_by", userID); diff --git a/api-service/src/controllers/DatasetCreate/DatasetCreate.ts b/api-service/src/controllers/DatasetCreate/DatasetCreate.ts index bb96dac2..d91da0f5 100644 --- a/api-service/src/controllers/DatasetCreate/DatasetCreate.ts +++ b/api-service/src/controllers/DatasetCreate/DatasetCreate.ts @@ -1,7 +1,7 @@ import _ from "lodash"; import { Request, Response } from "express"; import httpStatus from "http-status"; -import { datasetService } from "../../services/DatasetService"; +import { datasetService, validateStorageSupport } from "../../services/DatasetService"; import DatasetCreate from "./DatasetCreateValidationSchema.json"; import { schemaValidation } from "../../services/ValidationService"; import { ResponseHandler } from "../../helpers/ResponseHandler"; @@ -28,6 +28,7 @@ const validateRequest = async (req: Request) => { throw obsrvError(datasetId, "DATASET_DUPLICATE_DENORM_KEY", "Duplicate denorm output fields found.", "BAD_REQUEST", 400, undefined, {duplicateKeys: duplicateDenormKeys}) } + validateStorageSupport(_.get(req, ["body", "request"])) } const datasetCreate = async (req: Request, res: Response) => { diff --git a/api-service/src/controllers/DatasetImport/DatasetImport.ts b/api-service/src/controllers/DatasetImport/DatasetImport.ts index 2d0312b5..de326bb9 100644 --- a/api-service/src/controllers/DatasetImport/DatasetImport.ts +++ b/api-service/src/controllers/DatasetImport/DatasetImport.ts @@ -2,7 +2,7 @@ import { Request, Response } from "express"; import { ResponseHandler } from "../../helpers/ResponseHandler"; import httpStatus from "http-status"; import _ from "lodash"; -import { datasetService } from "../../services/DatasetService"; +import { datasetService, validateStorageSupport } from "../../services/DatasetService"; import { datasetImportValidation, migrateExportedDatasetV1 } from "./DatasetImportHelper"; import { obsrvError } from "../../types/ObsrvError"; @@ -21,6 +21,7 @@ const datasetImport = async (req: Request, res: Response) => { const { updatedDataset, ignoredFields } = await datasetImportValidation({ ...requestBody, "request": datasetPayload }) const { successMsg, partialIgnored } = getResponseData(ignoredFields) + validateStorageSupport(updatedDataset); const dataset = await importDataset(updatedDataset, overwrite, userID); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { message: successMsg, data: dataset, ...(!_.isEmpty(partialIgnored) && { ignoredFields: partialIgnored }) } }); } diff --git a/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts b/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts index 0197d39e..ff1502d7 100644 --- a/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts +++ b/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts @@ -1,7 +1,7 @@ import { Request, Response } from "express"; import _ from "lodash"; import { ResponseHandler } from "../../helpers/ResponseHandler"; -import { datasetService } from "../../services/DatasetService"; +import { datasetService, validateStorageSupport } from "../../services/DatasetService"; import { schemaValidation } from "../../services/ValidationService"; import StatusTransitionSchema from "./RequestValidationSchema.json"; import ReadyToPublishSchema from "./ReadyToPublishSchema.json" @@ -88,6 +88,7 @@ const deleteDataset = async (dataset: Record) => { const readyForPublish = async (dataset: Record, updated_by: any) => { const draftDataset: any = await datasetService.getDraftDataset(dataset.dataset_id) + validateStorageSupport(draftDataset); let defaultConfigs: any = _.cloneDeep(defaultDatasetConfig) defaultConfigs = _.omit(defaultConfigs, ["router_config"]) defaultConfigs = _.omit(defaultConfigs, "dedup_config.dedup_key"); @@ -136,6 +137,7 @@ const readyForPublish = async (dataset: Record, updated_by: any) => const publishDataset = async (dataset: Record, userID: any) => { const draftDataset: Record = await datasetService.getDraftDataset(dataset.dataset_id) as unknown as Record + validateStorageSupport(draftDataset); _.set(draftDataset, ["created_by"], userID); _.set(draftDataset, ["updated_by"], userID); await validateAndUpdateDenormConfig(draftDataset); diff --git a/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts b/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts index e9fea051..c1ef0324 100644 --- a/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts +++ b/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts @@ -5,7 +5,7 @@ import Model from "sequelize/types/model"; import { DatasetStatus } from "../../types/DatasetModels"; import { ResponseHandler } from "../../helpers/ResponseHandler"; import { cipherService } from "../../services/CipherService"; -import { datasetService } from "../../services/DatasetService"; +import { datasetService, validateStorageSupport } from "../../services/DatasetService"; import { schemaValidation } from "../../services/ValidationService"; import DatasetUpdate from "./DatasetUpdateValidationSchema.json"; import { obsrvError } from "../../types/ObsrvError"; @@ -30,6 +30,7 @@ const validateRequest = async (req: Request) => { throw obsrvError(datasetId, "DATASET_UPDATE_NO_FIELDS", "Provide atleast one field in addition to the dataset_id to update the dataset", "BAD_REQUEST", 400) } + validateStorageSupport(_.get(req, ["body", "request"])) } const validateDataset = (dataset: Record | null, req: Request) => { diff --git a/api-service/src/services/DatasetService.ts b/api-service/src/services/DatasetService.ts index e8ce9a35..186e43b5 100644 --- a/api-service/src/services/DatasetService.ts +++ b/api-service/src/services/DatasetService.ts @@ -18,6 +18,7 @@ import { obsrvError } from "../types/ObsrvError"; import { druidHttpService } from "../connections/druidConnection"; import { tableGenerator } from "./TableGenerator"; import { deleteAlertByDataset, deleteMetricAliasByDataset } from "./managers"; +import { config } from "../configs/Config"; class DatasetService { @@ -411,4 +412,15 @@ export const getV1Connectors = async (datasetId: string) => { return modifiedV1Connectors; } +const storageTypes = _.split(config.storage_types, ",") +export const validateStorageSupport = (dataset: Record) => { + const { olap_store_enabled, lakehouse_enabled } = _.get(dataset, ["dataset_config", "indexing_config"]) || {} + if (olap_store_enabled && !_.includes(storageTypes, "druid")) { + throw obsrvError("", "DATASET_UNSUPPORTED_STORAGE_TYPE", `The storage type "olap_store" is not available. Please use one of the available storage types: ${storageTypes}`, "BAD_REQUEST", 400) + } + if (lakehouse_enabled && !_.includes(storageTypes, "datalake")) { + throw obsrvError("", "DATASET_UNSUPPORTED_STORAGE_TYPE", `The storage type "datalake" is not available. Please use one of the available storage types: ${storageTypes}`, "BAD_REQUEST", 400) + } +} + export const datasetService = new DatasetService(); \ No newline at end of file