Skip to content

Commit

Permalink
Merge pull request #303 from Sanketika-Obsrv/data-schema-fix
Browse files Browse the repository at this point in the history
fix: #OBS-I406 storage type support read from envs
  • Loading branch information
HarishGangula authored Jan 2, 2025
2 parents 655bb5f + d107ebd commit b1e9744
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 6 deletions.
3 changes: 2 additions & 1 deletion api-service/src/configs/Config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,5 +118,6 @@ export const config = {
"otel": {
"enable": process.env.otel_enable || "false",
"collector_endpoint": process.env.otel_collector_endpoint || "http://localhost:4318"
}
},
"storage_types": process.env.storage_types || 'druid,datalake'
}
3 changes: 2 additions & 1 deletion api-service/src/controllers/DatasetCopy/DatasetCopy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { ResponseHandler } from "../../helpers/ResponseHandler";
import * as _ from "lodash";
import { schemaValidation } from "../../services/ValidationService";
import validationSchema from "./RequestValidationSchema.json";
import { datasetService, getLiveDatasetConfigs } from "../../services/DatasetService";
import { datasetService, getLiveDatasetConfigs, validateStorageSupport } from "../../services/DatasetService";
import { updateRecords } from "./DatasetCopyHelper";
import { obsrvError } from "../../types/ObsrvError";

Expand Down Expand Up @@ -40,6 +40,7 @@ const datasetCopy = async (req: Request, res: Response) => {
validateRequest(req);
const newDatasetId = _.get(req, "body.request.destination.datasetId");
const dataset = await fetchDataset(req);
validateStorageSupport(dataset);
const userID = (req as any)?.userID;
_.set(dataset, "created_by", userID);
_.set(dataset, "updated_by", userID);
Expand Down
3 changes: 2 additions & 1 deletion api-service/src/controllers/DatasetCreate/DatasetCreate.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import _ from "lodash";
import { Request, Response } from "express";
import httpStatus from "http-status";
import { datasetService } from "../../services/DatasetService";
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
import DatasetCreate from "./DatasetCreateValidationSchema.json";
import { schemaValidation } from "../../services/ValidationService";
import { ResponseHandler } from "../../helpers/ResponseHandler";
Expand All @@ -28,6 +28,7 @@ const validateRequest = async (req: Request) => {
throw obsrvError(datasetId, "DATASET_DUPLICATE_DENORM_KEY", "Duplicate denorm output fields found.", "BAD_REQUEST", 400, undefined, {duplicateKeys: duplicateDenormKeys})
}

validateStorageSupport(_.get(req, ["body", "request"]))
}

const datasetCreate = async (req: Request, res: Response) => {
Expand Down
3 changes: 2 additions & 1 deletion api-service/src/controllers/DatasetImport/DatasetImport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { Request, Response } from "express";
import { ResponseHandler } from "../../helpers/ResponseHandler";
import httpStatus from "http-status";
import _ from "lodash";
import { datasetService } from "../../services/DatasetService";
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
import { datasetImportValidation, migrateExportedDatasetV1 } from "./DatasetImportHelper";
import { obsrvError } from "../../types/ObsrvError";

Expand All @@ -21,6 +21,7 @@ const datasetImport = async (req: Request, res: Response) => {
const { updatedDataset, ignoredFields } = await datasetImportValidation({ ...requestBody, "request": datasetPayload })
const { successMsg, partialIgnored } = getResponseData(ignoredFields)

validateStorageSupport(updatedDataset);
const dataset = await importDataset(updatedDataset, overwrite, userID);
ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { message: successMsg, data: dataset, ...(!_.isEmpty(partialIgnored) && { ignoredFields: partialIgnored }) } });
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Request, Response } from "express";
import _ from "lodash";
import { ResponseHandler } from "../../helpers/ResponseHandler";
import { datasetService } from "../../services/DatasetService";
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
import { schemaValidation } from "../../services/ValidationService";
import StatusTransitionSchema from "./RequestValidationSchema.json";
import ReadyToPublishSchema from "./ReadyToPublishSchema.json"
Expand Down Expand Up @@ -88,6 +88,7 @@ const deleteDataset = async (dataset: Record<string, any>) => {
const readyForPublish = async (dataset: Record<string, any>, updated_by: any) => {

const draftDataset: any = await datasetService.getDraftDataset(dataset.dataset_id)
validateStorageSupport(draftDataset);
let defaultConfigs: any = _.cloneDeep(defaultDatasetConfig)
defaultConfigs = _.omit(defaultConfigs, ["router_config"])
defaultConfigs = _.omit(defaultConfigs, "dedup_config.dedup_key");
Expand Down Expand Up @@ -136,6 +137,7 @@ const readyForPublish = async (dataset: Record<string, any>, updated_by: any) =>
const publishDataset = async (dataset: Record<string, any>, userID: any) => {

const draftDataset: Record<string, any> = await datasetService.getDraftDataset(dataset.dataset_id) as unknown as Record<string, any>
validateStorageSupport(draftDataset);
_.set(draftDataset, ["created_by"], userID);
_.set(draftDataset, ["updated_by"], userID);
await validateAndUpdateDenormConfig(draftDataset);
Expand Down
3 changes: 2 additions & 1 deletion api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import Model from "sequelize/types/model";
import { DatasetStatus } from "../../types/DatasetModels";
import { ResponseHandler } from "../../helpers/ResponseHandler";
import { cipherService } from "../../services/CipherService";
import { datasetService } from "../../services/DatasetService";
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
import { schemaValidation } from "../../services/ValidationService";
import DatasetUpdate from "./DatasetUpdateValidationSchema.json";
import { obsrvError } from "../../types/ObsrvError";
Expand All @@ -30,6 +30,7 @@ const validateRequest = async (req: Request) => {
throw obsrvError(datasetId, "DATASET_UPDATE_NO_FIELDS", "Provide atleast one field in addition to the dataset_id to update the dataset", "BAD_REQUEST", 400)
}

validateStorageSupport(_.get(req, ["body", "request"]))
}

const validateDataset = (dataset: Record<string, any> | null, req: Request) => {
Expand Down
12 changes: 12 additions & 0 deletions api-service/src/services/DatasetService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import { obsrvError } from "../types/ObsrvError";
import { druidHttpService } from "../connections/druidConnection";
import { tableGenerator } from "./TableGenerator";
import { deleteAlertByDataset, deleteMetricAliasByDataset } from "./managers";
import { config } from "../configs/Config";

class DatasetService {

Expand Down Expand Up @@ -411,4 +412,15 @@ export const getV1Connectors = async (datasetId: string) => {
return modifiedV1Connectors;
}

const storageTypes = _.split(config.storage_types, ",")
export const validateStorageSupport = (dataset: Record<string, any>) => {
const { olap_store_enabled, lakehouse_enabled } = _.get(dataset, ["dataset_config", "indexing_config"]) || {}
if (olap_store_enabled && !_.includes(storageTypes, "druid")) {
throw obsrvError("", "DATASET_UNSUPPORTED_STORAGE_TYPE", `The storage type "olap_store" is not available. Please use one of the available storage types: ${storageTypes}`, "BAD_REQUEST", 400)
}
if (lakehouse_enabled && !_.includes(storageTypes, "datalake")) {
throw obsrvError("", "DATASET_UNSUPPORTED_STORAGE_TYPE", `The storage type "datalake" is not available. Please use one of the available storage types: ${storageTypes}`, "BAD_REQUEST", 400)
}
}

export const datasetService = new DatasetService();

0 comments on commit b1e9744

Please sign in to comment.