diff --git a/api-service/src/app.ts b/api-service/src/app.ts index 962331e9..6615ad74 100644 --- a/api-service/src/app.ts +++ b/api-service/src/app.ts @@ -22,7 +22,7 @@ app.use("/v2/", v2Router); app.use("/", druidProxyRouter); app.use("/alerts/v1", alertsRouter); app.use("/", metricRouter); -app.use("*", ResponseHandler.routeNotFound); +app.use(/(.*)/, ResponseHandler.routeNotFound); app.use(obsrvErrorHandler); app.listen(config.api_port, () => { diff --git a/api-service/src/controllers/Alerts/Alerts.ts b/api-service/src/controllers/Alerts/Alerts.ts index 278ac586..76bd9c3a 100644 --- a/api-service/src/controllers/Alerts/Alerts.ts +++ b/api-service/src/controllers/Alerts/Alerts.ts @@ -13,6 +13,9 @@ const telemetryObject = { type: "alert", ver: "1.0.0" }; const createAlertHandler = async (req: Request, res: Response, next: NextFunction) => { try { const alertPayload = getAlertPayload(req.body); + const userID = (req as any)?.userID; + _.set(alertPayload, "created_by", userID); + _.set(alertPayload, "updated_by", userID); const response = await Alert.create(alertPayload); updateTelemetryAuditEvent({ request: req, object: { id: response?.dataValues?.id, ...telemetryObject } }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id: response.dataValues.id } }); @@ -28,8 +31,11 @@ const createAlertHandler = async (req: Request, res: Response, next: NextFunctio const publishAlertHandler = async (req: Request, res: Response, next: NextFunction) => { try { const { alertId } = req.params; - const rulePayload: Record | null = await getAlertRule(alertId); - if (!rulePayload) return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }); + const ruleModel: Record | null = await getAlertRule(alertId); + if (!ruleModel) return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }); + const rulePayload = ruleModel.toJSON(); + const userID = (req as any)?.userID; + _.set(rulePayload, "updated_by", userID); if (rulePayload.status == "live") { await deleteAlertRule(rulePayload, false); } @@ -87,6 +93,8 @@ const deleteAlertHandler = async (req: Request, res: Response, next: NextFunctio return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }); } const rulePayload = ruleModel.toJSON(); + const userID = (req as any)?.userID || "SYSTEM"; + _.set(rulePayload, "updated_by", userID); await deleteAlertRule(rulePayload, hardDelete === "true"); updateTelemetryAuditEvent({ request: req, currentRecord: rulePayload, object: { id: alertId, ...telemetryObject } }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id: alertId } }); @@ -103,12 +111,14 @@ const updateAlertHandler = async (req: Request, res: Response, next: NextFunctio const ruleModel = await getAlertRule(alertId); if (!ruleModel) { return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }) } const rulePayload = ruleModel.toJSON(); + const userID = (req as any)?.userID; if (rulePayload.status == "live") { + _.set(rulePayload, "updated_by", userID); await deleteAlertRule(rulePayload, false); await retireAlertSilence(alertId); } const updatedPayload = getAlertPayload({ ...req.body, manager: rulePayload?.manager }); - await Alert.update({ ...updatedPayload, status: "draft" }, { where: { id: alertId } }); + await Alert.update({ ...updatedPayload, status: "draft", updated_by: userID }, { where: { id: alertId } }); updateTelemetryAuditEvent({ request: req, currentRecord: rulePayload, object: { id: alertId, ...telemetryObject } }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id: alertId } }); } catch (error: any) { diff --git a/api-service/src/controllers/Alerts/Silence.ts b/api-service/src/controllers/Alerts/Silence.ts index 32e2c531..f424465a 100644 --- a/api-service/src/controllers/Alerts/Silence.ts +++ b/api-service/src/controllers/Alerts/Silence.ts @@ -20,12 +20,15 @@ const createHandler = async (request: Request, response: Response, next: NextFun const start_date = new Date(startDate); const end_date = new Date(endDate); + const userID = (request as any)?.userID; const silenceBody = { id: grafanaResponse.silenceId, manager: grafanaResponse.manager, alert_id: alertId, start_time: start_date, end_time: end_date, + created_by : userID, + updated_by : userID, } const sileneResponse = await Silence.create(silenceBody); updateTelemetryAuditEvent({ request, object: { id: sileneResponse?.dataValues?.id, ...telemetryObject } }); @@ -78,10 +81,12 @@ const updateHandler = async (request: Request, response: Response, next: NextFun await updateSilence(silenceObject, payload); const updatedStartTime = new Date(payload.startTime); const updatedEndTime = new Date(payload.endTime); + const userID = (request as any)?.userID; const updatedSilence = { ...silenceObject, start_time: updatedStartTime, - end_time: updatedEndTime + end_time: updatedEndTime, + updated_by: userID, } const silenceResponse = await Silence.update(updatedSilence, { where: { id } }) ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { silenceResponse } }) diff --git a/api-service/src/controllers/CreateQueryTemplate/CreateTemplateController.ts b/api-service/src/controllers/CreateQueryTemplate/CreateTemplateController.ts index 90808e7d..c7fffa29 100644 --- a/api-service/src/controllers/CreateQueryTemplate/CreateTemplateController.ts +++ b/api-service/src/controllers/CreateQueryTemplate/CreateTemplateController.ts @@ -42,6 +42,9 @@ export const createQueryTemplate = async (req: Request, res: Response) => { } const data = transformRequest(requestBody, templateName); + const userID = (req as any)?.userID; + _.set(data, "created_by", userID); + _.set(data, "updated_by", userID); await QueryTemplate.create(data) logger.info({ apiId, msgid, resmsgid, requestBody: req?.body, message: `Query template created successfully` }) return ResponseHandler.successResponse(req, res, { status: 200, data: { template_id: templateId, template_name: templateName, message: `The query template has been saved successfully` } }); diff --git a/api-service/src/controllers/DataOut/QueryValidator.ts b/api-service/src/controllers/DataOut/QueryValidator.ts index e569bdb3..4c47a90e 100644 --- a/api-service/src/controllers/DataOut/QueryValidator.ts +++ b/api-service/src/controllers/DataOut/QueryValidator.ts @@ -4,10 +4,11 @@ import * as _ from "lodash"; import moment from "moment"; import { getDatasourceList } from "../../services/DatasourceService"; import logger from "../../logger"; -import { getDatasourceListFromDruid } from "../../connections/druidConnection"; +import { druidHttpService, getDatasourceListFromDruid } from "../../connections/druidConnection"; import { apiId } from "./DataOutController"; import { ErrorObject } from "../../types/ResponseModel"; import { Parser } from "node-sql-parser"; +import { obsrvError } from "../../types/ObsrvError"; const parser = new Parser(); const momentFormat = "YYYY-MM-DD HH:MM:SS"; @@ -15,7 +16,7 @@ let dataset_id: string; let requestBody: any; let msgid: string; const errCode = { - notFound: "DATA_OUT_SOURCE_NOT_FOUND", + notFound: "DATASOURCE_NOT_FOUND", invalidDateRange: "DATA_OUT_INVALID_DATE_RANGE" } @@ -155,23 +156,41 @@ const validateQueryRules = (queryPayload: any, limits: any) => { : { message: "Invalid date range! the date range cannot be a null value", statusCode: 400, errCode: "BAD_REQUEST", code: errCode.invalidDateRange }; }; -const getDataSourceRef = async (datasetId: string, granularity?: string) => { +const getDataSourceRef = async (datasetId: string, requestGranularity?: string) => { const dataSources = await getDatasourceList(datasetId) if (_.isEmpty(dataSources)) { logger.error({ apiId, requestBody, msgid, dataset_id, message: `Datasource ${datasetId} not available in datasource live table`, code: errCode.notFound }) throw { message: `Datasource ${datasetId} not available for querying`, statusCode: 404, errCode: "NOT_FOUND", code: errCode.notFound } as ErrorObject; } - const record = dataSources.filter((record: any) => { - const aggregatedRecord = _.get(record, "dataValues.metadata.aggregated") - if (granularity) - return aggregatedRecord && _.get(record, "dataValues.metadata.granularity") === granularity; + const record = dataSources.find((record: any) => { + const metadata = _.get(record, "dataValues.metadata", {}); + const { aggregated, granularity } = metadata; + if (!aggregated) { + return true; + } + return aggregated && requestGranularity ? granularity === requestGranularity : false; }); - return record[0]?.dataValues?.datasource_ref + return _.get(record, ["dataValues", "datasource_ref"]) +} + +const checkSupervisorAvailability = async (datasourceRef: string) => { + const { data } = await druidHttpService.get("/druid/coordinator/v1/loadstatus"); + const datasourceAvailability = _.get(data, datasourceRef) + if (_.isUndefined(datasourceAvailability)) { + throw obsrvError("", "DATASOURCE_NOT_AVAILABLE", "Datasource not available for querying", "NOT_FOUND", 404) + } + if (datasourceAvailability !== 100) { + throw obsrvError("", "DATASOURCE_NOT_FULLY_AVAILABLE", "Datasource not fully available for querying", "RANGE_NOT_SATISFIABLE", 416) + } } const setDatasourceRef = async (datasetId: string, payload: any): Promise => { const granularity = _.get(payload, "context.aggregationLevel") const datasourceRef = await getDataSourceRef(datasetId, granularity); + if (!datasourceRef) { + throw obsrvError("", "DATASOURCE_NOT_FOUND", "Datasource not found to query", "NOT_FOUND", 404) + } + await checkSupervisorAvailability(datasourceRef) const existingDatasources = await getDatasourceListFromDruid(); if (!_.includes(existingDatasources.data, datasourceRef)) { diff --git a/api-service/src/controllers/DatasetCopy/DatasetCopy.ts b/api-service/src/controllers/DatasetCopy/DatasetCopy.ts index 9308dd71..9015e9c7 100644 --- a/api-service/src/controllers/DatasetCopy/DatasetCopy.ts +++ b/api-service/src/controllers/DatasetCopy/DatasetCopy.ts @@ -40,6 +40,9 @@ const datasetCopy = async (req: Request, res: Response) => { validateRequest(req); const newDatasetId = _.get(req, "body.request.destination.datasetId"); const dataset = await fetchDataset(req); + const userID = (req as any)?.userID; + _.set(dataset, "created_by", userID); + _.set(dataset, "updated_by", userID); updateRecords(dataset, newDatasetId) const response = await datasetService.createDraftDataset(dataset).catch(err => { if (err?.name === "SequelizeUniqueConstraintError") { diff --git a/api-service/src/controllers/DatasetCreate/DatasetCreate.ts b/api-service/src/controllers/DatasetCreate/DatasetCreate.ts index 2867457b..bb96dac2 100644 --- a/api-service/src/controllers/DatasetCreate/DatasetCreate.ts +++ b/api-service/src/controllers/DatasetCreate/DatasetCreate.ts @@ -34,6 +34,9 @@ const datasetCreate = async (req: Request, res: Response) => { await validateRequest(req) const draftDataset = getDraftDataset(req.body.request) + const userID = (req as any)?.userID; + _.set(draftDataset, "created_by", userID); + _.set(draftDataset, "updated_by", userID); const dataset = await datasetService.createDraftDataset(draftDataset); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: dataset }); } diff --git a/api-service/src/controllers/DatasetImport/DatasetImport.ts b/api-service/src/controllers/DatasetImport/DatasetImport.ts index e390d8bd..2d0312b5 100644 --- a/api-service/src/controllers/DatasetImport/DatasetImport.ts +++ b/api-service/src/controllers/DatasetImport/DatasetImport.ts @@ -15,18 +15,22 @@ const datasetImport = async (req: Request, res: Response) => { const migratedConfigs = migrateExportedDatasetV1(requestBody) datasetPayload = migratedConfigs; } + const userID = (req as any)?.userID; + _.set(datasetPayload, "created_by", userID); + _.set(datasetPayload, "updated_by", userID); const { updatedDataset, ignoredFields } = await datasetImportValidation({ ...requestBody, "request": datasetPayload }) const { successMsg, partialIgnored } = getResponseData(ignoredFields) - const dataset = await importDataset(updatedDataset, overwrite); + const dataset = await importDataset(updatedDataset, overwrite, userID); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { message: successMsg, data: dataset, ...(!_.isEmpty(partialIgnored) && { ignoredFields: partialIgnored }) } }); } -const importDataset = async (dataset: Record, overwrite: string | any) => { +const importDataset = async (dataset: Record, overwrite: string | any, userID : string) => { const dataset_id = _.get(dataset,"dataset_id") const response = await datasetService.createDraftDataset(dataset).catch(err => { return err }) if (response?.name === "SequelizeUniqueConstraintError") { if (overwrite === "true") { + _.set(dataset, "updated_by", userID); const overwriteRes = await datasetService.updateDraftDataset(dataset).catch(()=>{ throw obsrvError(dataset_id, "DATASET_IMPORT_FAILURE", `Failed to import dataset: ${dataset_id} as overwrite failed`, "INTERNAL_SERVER_ERROR", 500); }) diff --git a/api-service/src/controllers/DatasetRead/DatasetRead.ts b/api-service/src/controllers/DatasetRead/DatasetRead.ts index e1853e57..d428b02a 100644 --- a/api-service/src/controllers/DatasetRead/DatasetRead.ts +++ b/api-service/src/controllers/DatasetRead/DatasetRead.ts @@ -30,8 +30,9 @@ const datasetRead = async (req: Request, res: Response) => { validateRequest(req); const { dataset_id } = req.params; const { fields, mode } = req.query; + const userID = (req as any)?.userID; const attributes = !fields ? defaultFields : _.split(fields, ","); - const dataset = (mode == "edit") ? await readDraftDataset(dataset_id, attributes) : await readDataset(dataset_id, attributes) + const dataset = (mode == "edit") ? await readDraftDataset(dataset_id, attributes, userID) : await readDataset(dataset_id, attributes) if (!dataset) { throw obsrvError(dataset_id, "DATASET_NOT_FOUND", `Dataset with the given dataset_id:${dataset_id} not found`, "NOT_FOUND", 404); } @@ -41,19 +42,19 @@ const datasetRead = async (req: Request, res: Response) => { ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: dataset }); } -const readDraftDataset = async (datasetId: string, attributes: string[]): Promise => { +const readDraftDataset = async (datasetId: string, attributes: string[], userID: string): Promise => { const attrs = _.union(attributes, ["dataset_config", "api_version", "type", "id"]) const draftDataset = await datasetService.getDraftDataset(datasetId, attrs); if (draftDataset) { // Contains a draft const apiVersion = _.get(draftDataset, ["api_version"]); - const dataset: any = (apiVersion === "v2") ? draftDataset : await datasetService.migrateDraftDataset(datasetId, draftDataset) + const dataset: any = (apiVersion === "v2") ? draftDataset : await datasetService.migrateDraftDataset(datasetId, draftDataset, userID) return _.pick(dataset, attributes); } const liveDataset = await datasetService.getDataset(datasetId, undefined, true); if (liveDataset) { - const dataset = await datasetService.createDraftDatasetFromLive(liveDataset) + const dataset = await datasetService.createDraftDatasetFromLive(liveDataset, userID) return _.pick(dataset, attributes); } diff --git a/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts b/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts index c2a18021..4de7a474 100644 --- a/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts +++ b/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts @@ -18,11 +18,9 @@ const allowedTransitions: Record = { Delete: [DatasetStatus.Draft, DatasetStatus.ReadyToPublish], ReadyToPublish: [DatasetStatus.Draft], Live: [DatasetStatus.ReadyToPublish], - Retire: [DatasetStatus.Live], - Archive: [DatasetStatus.Retired], - Purge: [DatasetStatus.Archived] + Retire: [DatasetStatus.Live] } -const liveDatasetActions = ["Retire", "Archive", "Purge"] +const liveDatasetActions = ["Retire"] const validateRequest = (req: Request, datasetId: any) => { const isRequestValid: Record = schemaValidation(req.body, StatusTransitionSchema) @@ -54,7 +52,8 @@ const datasetStatusTransition = async (req: Request, res: Response) => { const { dataset_id, status } = _.get(req.body, "request"); validateRequest(req, dataset_id); - const dataset: Record = (_.includes(liveDatasetActions, status)) ? await datasetService.getDataset(dataset_id, ["id", "status", "type", "api_version"], true) : await datasetService.getDraftDataset(dataset_id, ["id", "dataset_id", "status", "type", "api_version"]) + const dataset: Record = (_.includes(liveDatasetActions, status)) ? await datasetService.getDataset(dataset_id, ["id", "status", "type", "api_version", "name"], true) : await datasetService.getDraftDataset(dataset_id, ["id", "dataset_id", "status", "type", "api_version"]) + const userID = (req as any)?.userID; validateDataset(dataset, dataset_id, status); switch (status) { @@ -62,20 +61,16 @@ const datasetStatusTransition = async (req: Request, res: Response) => { await deleteDataset(dataset); break; case "ReadyToPublish": - await readyForPublish(dataset); + await readyForPublish(dataset, userID); break; case "Live": - await publishDataset(dataset); + await publishDataset(dataset, userID); break; case "Retire": - await retireDataset(dataset); - break; - case "Archive": - await archiveDataset(dataset); - break; - case "Purge": - await purgeDataset(dataset); + await retireDataset(dataset, userID); break; + default: + throw obsrvError(dataset.id, "UNKNOWN_STATUS_TRANSITION", "Unknown status transition requested", "BAD_REQUEST", 400) } ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { message: `Dataset status transition to ${status} successful`, dataset_id } }); @@ -90,7 +85,7 @@ const deleteDataset = async (dataset: Record) => { } -const readyForPublish = async (dataset: Record) => { +const readyForPublish = async (dataset: Record, updated_by: any) => { const draftDataset: any = await datasetService.getDraftDataset(dataset.dataset_id) let defaultConfigs: any = _.cloneDeep(defaultDatasetConfig) @@ -99,7 +94,14 @@ const readyForPublish = async (dataset: Record) => { if (draftDataset?.type === "master") { defaultConfigs = _.omit(defaultConfigs, "dataset_config.keys_config.data_key"); } - _.mergeWith(draftDataset, defaultConfigs, draftDataset, (objValue, srcValue) => { + _.set(draftDataset, "updated_by", updated_by); + _.mergeWith(draftDataset, defaultConfigs, draftDataset, (objValue, srcValue ,key) => { + if (key === "created_by"|| key === "updated_by") { + if (objValue !== "SYSTEM") { + return objValue; + } + return srcValue; + } if (_.isBoolean(objValue) && _.isBoolean(srcValue)) { return objValue; } @@ -126,10 +128,11 @@ const readyForPublish = async (dataset: Record) => { * * @param dataset */ -const publishDataset = async (dataset: Record) => { +const publishDataset = async (dataset: Record, userID: any) => { const draftDataset: Record = await datasetService.getDraftDataset(dataset.dataset_id) as unknown as Record - + _.set(draftDataset, ["created_by"], userID); + _.set(draftDataset, ["updated_by"], userID); await validateAndUpdateDenormConfig(draftDataset); await updateMasterDataConfig(draftDataset) await datasetService.publishDataset(draftDataset) @@ -208,10 +211,10 @@ const updateMasterDataConfig = async (draftDataset: Record) => { } } -const retireDataset = async (dataset: Record) => { +const retireDataset = async (dataset: Record, updated_by: any) => { await canRetireIfMasterDataset(dataset); - await datasetService.retireDataset(dataset); + await datasetService.retireDataset(dataset, updated_by); await restartPipeline(dataset); } @@ -239,14 +242,4 @@ export const restartPipeline = async (dataset: Record) => { return executeCommand(dataset.id, "RESTART_PIPELINE") } -const archiveDataset = async (dataset: Record) => { - - throw obsrvError(dataset.id, "ARCHIVE_NOT_IMPLEMENTED", "Archive functionality is not implemented", "NOT_IMPLEMENTED", 501) -} - -const purgeDataset = async (dataset: Record) => { - - throw obsrvError(dataset.id, "PURGE_NOT_IMPLEMENTED", "Purge functionality is not implemented", "NOT_IMPLEMENTED", 501) -} - export default datasetStatusTransition; \ No newline at end of file diff --git a/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts b/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts index 274ad3c4..7277b1b8 100644 --- a/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts +++ b/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts @@ -59,6 +59,8 @@ const datasetUpdate = async (req: Request, res: Response) => { validateDataset(datasetModel, req) const draftDataset = mergeDraftDataset(datasetModel, datasetReq); + const userID = (req as any)?.userID; + _.set(draftDataset, "updated_by", userID ) const response = await datasetService.updateDraftDataset(draftDataset); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: response }); } diff --git a/api-service/src/controllers/NotificationChannel/Notification.ts b/api-service/src/controllers/NotificationChannel/Notification.ts index f6db4fd4..d8850436 100644 --- a/api-service/src/controllers/NotificationChannel/Notification.ts +++ b/api-service/src/controllers/NotificationChannel/Notification.ts @@ -12,6 +12,9 @@ const telemetryObject = { type: "notificationChannel", ver: "1.0.0" }; const createHandler = async (request: Request, response: Response, next: NextFunction) => { try { const payload = request.body; + const userID = (request as any)?.userID; + _.set(payload, "created_by", userID); + _.set(payload, "updated_by", userID); const notificationBody = await Notification.create(payload); updateTelemetryAuditEvent({ request, object: { id: notificationBody?.dataValues?.id, ...telemetryObject } }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id: notificationBody.dataValues.id } }) @@ -32,6 +35,8 @@ const updateHandler = async (request: Request, response: Response, next: NextFun if (_.get(notificationPayload, "status") === "live") { await updateNotificationChannel(notificationPayload); } + const userID = (request as any)?.userID; + _.set(updatedPayload, "updated_by", userID); await Notification.update({ ...updatedPayload, status: "draft" }, { where: { id } }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id } }); } catch (err) { @@ -74,7 +79,8 @@ const retireHandler = async (request: Request, response: Response, next: NextFun if (!notificationPayload) return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }); updateTelemetryAuditEvent({ request, object: { id, ...telemetryObject }, currentRecord: notificationPayload }); await updateNotificationChannel(notificationPayload); - await Notification.update({ status: "retired" }, { where: { id } }) + const userID = (request as any)?.userID; + await Notification.update({ status: "retired", updated_by: userID }, { where: { id } }) ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id } }); } catch (err) { const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) @@ -91,7 +97,8 @@ const publishHandler = async (request: Request, response: Response, next: NextFu if (notificationPayload.status === "live") throw new Error(httpStatus[httpStatus.CONFLICT]); updateTelemetryAuditEvent({ request, object: { id, ...telemetryObject }, currentRecord: notificationPayload }); await publishNotificationChannel(notificationPayload); - Notification.update({ status: "live" }, { where: { id } }); + const userID = (request as any)?.userID; + Notification.update({ status: "live", updated_by: userID }, { where: { id } }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id, status: "published" } }); } catch (err) { const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) diff --git a/api-service/src/controllers/UpdateQueryTemplate/UpdateTemplateController.ts b/api-service/src/controllers/UpdateQueryTemplate/UpdateTemplateController.ts index 114d370e..350b25f7 100644 --- a/api-service/src/controllers/UpdateQueryTemplate/UpdateTemplateController.ts +++ b/api-service/src/controllers/UpdateQueryTemplate/UpdateTemplateController.ts @@ -38,7 +38,8 @@ export const updateQueryTemplate = async (req: Request, res: Response) => { logger.error({ apiId, msgid, resmsgid, templateId, requestBody: req?.body, message: `Invalid template provided, A template should consist of variables ${requiredVariables} and type of json,sql`, code: "QUERY_TEMPLATE_INVALID_INPUT" }) return ResponseHandler.errorResponse({ statusCode: 400, message: `Invalid template provided, A template should consist of variables ${requiredVariables} and type of json,sql`, errCode: "BAD_REQUEST", code: "QUERY_TEMPLATE_INVALID_INPUT" }, req, res) } - + const userID = (req as any)?.userID; + requestBody.request.updated_by = userID; await QueryTemplate.update(requestBody?.request, { where: { template_id: templateId } }) logger.info({ apiId, msgid, resmsgid, templateId, requestBody, message: `Query template updated successfully` }) ResponseHandler.successResponse(req, res, { status: 200, data: { message: "Query template updated successfully", templateId } }); diff --git a/api-service/src/middlewares/RBAC_middleware.ts b/api-service/src/middlewares/RBAC_middleware.ts index d36babf0..b8d4032d 100644 --- a/api-service/src/middlewares/RBAC_middleware.ts +++ b/api-service/src/middlewares/RBAC_middleware.ts @@ -3,120 +3,43 @@ import jwt from "jsonwebtoken"; import { ResponseHandler } from "../helpers/ResponseHandler"; import { config } from "../configs/Config"; import _ from "lodash"; - -enum roles { - Admin = "admin", - DatasetManager = "dataset_manager", - Viewer = "viewer", - DatasetCreator = "dataset_creator", - Ingestor = "ingestor", +import userPermissions from "./userPermissions.json"; +import httpStatus from "http-status"; +interface AccessControl { + apiGroups : { + [key: string]: string[]; + }, + roles : { + [key: string]: string[]; + } } -enum permissions { - DatasetCreate = "api.datasets.create", - DatasetUpdate = "api.datasets.update", - DatasetRead = "api.datasets.read", - DatasetList = "api.datasets.list", - DataIngest = "api.data.in", - DataOut = "api.data.out", - DataExhaust = "api.data.exhaust", - QueryTemplateCreate = "api.query.template.create", - QueryTemplateRead = "api.query.template.read", - QueryTemplateDelete = "api.query.template.delete", - QueryTemplateList = "api.query.template.list", - QueryTemplateUpdate = "api.query.template.update", - QueryTemplate = "api.query.template.query", - SchemaValidator = "api.schema.validator", - GenerateSignedUrl = "api.files.generate-url", - DatasetStatusTransition = "api.datasets.status-transition", - DatasetHealth = "api.dataset.health", - DatasetReset = "api.dataset.reset", - DatasetSchemaGenerator = "api.datasets.dataschema", - DatasetExport = "api.datasets.export", - DatasetCopy = "api.datasets.copy", - ConnectorList = "api.connectors.list", - ConnectorRead = "api.connectors.read", - DatasetImport = "api.datasets.import", - SQLQuery = "api.obsrv.data.sql-query", -} +const accessControl: AccessControl = userPermissions; -interface AccessControl { - [key: string]: string[]; -} +const errorHandler = (statusCode: number, message: string, req: Request, res: Response) => { + const errorMapping: Record = { + 401: { + errCode: httpStatus["401_NAME"], + code: "UNAUTHORIZED ACCESS", + }, + 403: { + errCode: httpStatus["403_NAME"], + code: "FORBIDDEN ACCESS", + }, + }; + + const { errCode, code } = errorMapping[statusCode]; -const accessControl: AccessControl = { - [roles.Ingestor]: [permissions.DataIngest], - [roles.Viewer]: [ - permissions.DatasetList, - permissions.DatasetRead, - permissions.DatasetExport, - permissions.ConnectorRead, - permissions.SQLQuery, - permissions.DataOut, - permissions.DataExhaust, - ], - [roles.DatasetCreator]: [ - permissions.DatasetList, - permissions.DatasetRead, - permissions.DatasetExport, - permissions.ConnectorRead, - permissions.SQLQuery, - permissions.DataOut, - permissions.DataExhaust, - permissions.DatasetImport, - permissions.DatasetCreate, - permissions.DatasetUpdate, - permissions.DatasetCopy, - permissions.QueryTemplateCreate, - permissions.QueryTemplateRead, - permissions.QueryTemplateDelete, - permissions.GenerateSignedUrl, - permissions.SchemaValidator, - permissions.DatasetSchemaGenerator, - ], - [roles.DatasetManager]: [ - permissions.DatasetList, - permissions.DatasetRead, - permissions.DatasetExport, - permissions.ConnectorRead, - permissions.SQLQuery, - permissions.DataOut, - permissions.DataExhaust, - permissions.DatasetImport, - permissions.DatasetCreate, - permissions.DatasetUpdate, - permissions.DatasetCopy, - permissions.QueryTemplateCreate, - permissions.QueryTemplateRead, - permissions.QueryTemplateDelete, - permissions.GenerateSignedUrl, - permissions.SchemaValidator, - permissions.DatasetSchemaGenerator, - permissions.DatasetReset, - permissions.DatasetStatusTransition, - ], - [roles.Admin]: [ - permissions.DatasetCreate, - permissions.DatasetList, - permissions.DatasetRead, - permissions.DatasetExport, - permissions.ConnectorRead, - permissions.SQLQuery, - permissions.DataOut, - permissions.DataExhaust, - permissions.DatasetImport, - permissions.DatasetCreate, - permissions.DatasetUpdate, - permissions.DatasetCopy, - permissions.QueryTemplateCreate, - permissions.QueryTemplateRead, - permissions.QueryTemplateDelete, - permissions.GenerateSignedUrl, - permissions.SchemaValidator, - permissions.DatasetSchemaGenerator, - permissions.DatasetReset, - permissions.DatasetStatusTransition, - ], + return ResponseHandler.errorResponse( + { + statusCode, + errCode, + message, + code, + }, + req, + res + ); }; export default { @@ -124,49 +47,46 @@ export default { handler: () => (req: Request, res: Response, next: NextFunction) => { try { if (_.lowerCase(config.is_RBAC_enabled) === "false") { + (req as any).userID = "SYSTEM"; next(); } else { const public_key = config.user_token_public_key; const token = req.get("x-user-token"); if (!token) { - return ResponseHandler.errorResponse( - { - statusCode: 403, - errCode: "FORBIDDEN", - message: "No token provided", - }, - req, - res - ); + return errorHandler(401, "No token provided", req, res); } jwt.verify(token as string, public_key, (err, decoded) => { if (err) { - return ResponseHandler.errorResponse( - { - statusCode: 403, - errCode: "FORBIDDEN", - message: "Token verification failed", - }, - req, - res - ); + return errorHandler(401, "Token verification failed", req, res); } if (decoded && _.isObject(decoded)) { + if (!decoded?.id) { + return errorHandler(401, "User ID is missing from the decoded token.", req, res); + } + (req as any).userID = decoded?.id; const action = (req as any).id; - const hasAccess = decoded?.roles?.some( - (role: string) => - accessControl[role] && accessControl[role].includes(action) - ); - if (!hasAccess) { - return ResponseHandler.errorResponse( - { - statusCode: 401, - errCode: "Unauthorized access", - message: "Access denied for the user", - }, - req, - res + const hasAccess = decoded?.roles?.some((role: string) => { + const apiGroups = accessControl.roles[role]; + + if (!apiGroups) return false; + + return apiGroups.some((apiGroup: string) => + accessControl.apiGroups[apiGroup]?.includes(action) ); + }); + if (!hasAccess) { + const rolesWithAccess = Object.keys(accessControl.roles).filter(role => { + const apiGroups = accessControl.roles[role]; + if (!apiGroups) return false; + return apiGroups.some(apiGroup => accessControl.apiGroups[apiGroup]?.includes(action)); + }); + const rolesMessage = rolesWithAccess.length > 0 + ? `The following roles have access to this action: ${rolesWithAccess.join(", ")}` + : "No roles have this action"; + + const errorMessage = `Access denied. User does not have permission to perform this action. ${rolesMessage}.`; + + return errorHandler(403, errorMessage, req, res); } next(); } diff --git a/api-service/src/middlewares/userPermissions.json b/api-service/src/middlewares/userPermissions.json new file mode 100644 index 00000000..400e218f --- /dev/null +++ b/api-service/src/middlewares/userPermissions.json @@ -0,0 +1,129 @@ +{ + "apiGroups": { + "general_access": [ + "api.datasets.list", + "api.datasets.read", + "api.datasets.export", + "api.data.out", + "api.data.exhaust", + "api.alert.list", + "api.alert.getAlertDetails", + "api.metric.list", + "api.alert.silence.list", + "api.alert.silence.get", + "api.alert.notification.list", + "api.alert.notification.get" + ], + "restricted_dataset_api": [ + "api.datasets.reset", + "api.datasets.status-transition" + ], + "alert": [ + "api.alert.create", + "api.alert.publish", + "api.alert.update", + "api.alert.delete" + ], + "metric": [ + "api.metric.add", + "api.metric.update", + "api.metric.remove" + ], + "silence": [ + "api.alert.silence.create", + "api.alert.silence.edit", + "api.alert.silence.delete" + ], + "notificationChannel": [ + "api.alert.notification.create", + "api.alert.notification.publish", + "api.alert.notification.test", + "api.alert.notification.update", + "api.alert.notification.retire" + ], + "dataset": [ + "api.datasets.create", + "api.datasets.update", + "api.datasets.import", + "api.datasets.copy", + "api.dataset.health", + "api.datasets.dataschema" + ], + "data": [ + "api.data.in" + ], + "queryTemplate": [ + "api.query.template.create", + "api.query.template.read", + "api.query.template.delete", + "api.query.template.update", + "api.query.template.query", + "api.query.template.list" + ], + "schema": [ + "api.schema.validator" + ], + "file": [ + "api.files.generate-url" + ], + "connector": [ + "api.connectors.list", + "api.connectors.read" + ], + "sqlQuery": [ + "api.obsrv.data.sql-query" + ] + }, + "roles": { + "ingestor": [ + "data" + ], + "viewer": [ + "general_access", + "connector", + "sqlQuery" + ], + "dataset_creator": [ + "general_access", + "connector", + "sqlQuery", + "dataset", + "queryTemplate", + "schema", + "file", + "connector", + "sqlQuery" + ], + "dataset_manager": [ + "general_access", + "connector", + "sqlQuery", + "dataset", + "queryTemplate", + "schema", + "file", + "connector", + "sqlQuery", + "restricted_dataset_api" + ], + "admin": [ + "general_access", + "connector", + "sqlQuery", + "dataset", + "queryTemplate", + "schema", + "file", + "connector", + "sqlQuery", + "restricted_dataset_api" + ], + "operations_admin": [ + "alert", + "metric", + "silence", + "notificationChannel", + "general_access" + ] + } +} \ No newline at end of file diff --git a/api-service/src/routes/AlertsRouter.ts b/api-service/src/routes/AlertsRouter.ts index 01c843dd..6044e9fe 100644 --- a/api-service/src/routes/AlertsRouter.ts +++ b/api-service/src/routes/AlertsRouter.ts @@ -4,38 +4,39 @@ import { setDataToRequestObject } from "../middlewares/setDataToRequestObject"; import customAlertHandler from "../controllers/Alerts/Alerts"; import metricAliasHandler from "../controllers/Alerts/Metric"; import silenceHandler from "../controllers/Alerts/Silence"; +import checkRBAC from "../middlewares/RBAC_middleware"; export const alertsRouter = express.Router(); // Notifications -alertsRouter.post("/notifications/search", setDataToRequestObject("api.alert.notification.list"), notificationHandler.listHandler); -alertsRouter.post("/notifications/create", setDataToRequestObject("api.alert.notification.create"), notificationHandler.createHandler); -alertsRouter.get("/notifications/publish/:id", setDataToRequestObject("api.alert.notification.publish"), notificationHandler.publishHandler); -alertsRouter.post("/notifications/test", setDataToRequestObject("api.alert.notification.test"), notificationHandler.testNotifationChannelHandler); -alertsRouter.patch("/notifications/update/:id", setDataToRequestObject("api.alert.notification.update"), notificationHandler.updateHandler); -alertsRouter.delete("/notifications/delete/:id", setDataToRequestObject("api.alert.notification.retire"), notificationHandler.retireHandler); -alertsRouter.get("/notifications/get/:id", setDataToRequestObject("api.alert.notification.get"), notificationHandler.fetchHandler); +alertsRouter.post("/notifications/search", setDataToRequestObject("api.alert.notification.list"), checkRBAC.handler(), notificationHandler.listHandler); +alertsRouter.post("/notifications/create", setDataToRequestObject("api.alert.notification.create"), checkRBAC.handler(), notificationHandler.createHandler); +alertsRouter.get("/notifications/publish/:id", setDataToRequestObject("api.alert.notification.publish"), checkRBAC.handler(), notificationHandler.publishHandler); +alertsRouter.post("/notifications/test", setDataToRequestObject("api.alert.notification.test"), checkRBAC.handler(), notificationHandler.testNotifationChannelHandler); +alertsRouter.patch("/notifications/update/:id", setDataToRequestObject("api.alert.notification.update"), checkRBAC.handler(), notificationHandler.updateHandler); +alertsRouter.delete("/notifications/delete/:id", setDataToRequestObject("api.alert.notification.retire"), checkRBAC.handler(), notificationHandler.retireHandler); +alertsRouter.get("/notifications/get/:id", setDataToRequestObject("api.alert.notification.get"), checkRBAC.handler(), notificationHandler.fetchHandler); // alerts -alertsRouter.post("/create", setDataToRequestObject("api.alert.create"), customAlertHandler.createAlertHandler); -alertsRouter.get("/publish/:alertId", setDataToRequestObject("api.alert.publish"), customAlertHandler.publishAlertHandler); -alertsRouter.post(`/search`, setDataToRequestObject("api.alert.list"), customAlertHandler.searchAlertHandler); -alertsRouter.get("/get/:alertId", setDataToRequestObject("api.alert.getAlertDetails"), customAlertHandler.alertDetailsHandler); -alertsRouter.delete("/delete/:alertId", setDataToRequestObject("api.alert.delete"), customAlertHandler.deleteAlertHandler); -alertsRouter.delete("/delete", setDataToRequestObject("api.alert.delete"), customAlertHandler.deleteSystemAlertsHandler); -alertsRouter.patch("/update/:alertId", setDataToRequestObject("api.alert.update"), customAlertHandler.updateAlertHandler); +alertsRouter.post("/create", setDataToRequestObject("api.alert.create"), checkRBAC.handler(), customAlertHandler.createAlertHandler); +alertsRouter.get("/publish/:alertId", setDataToRequestObject("api.alert.publish"), checkRBAC.handler(), customAlertHandler.publishAlertHandler); +alertsRouter.post(`/search`, setDataToRequestObject("api.alert.list"), checkRBAC.handler(), customAlertHandler.searchAlertHandler); +alertsRouter.get("/get/:alertId", setDataToRequestObject("api.alert.getAlertDetails"), checkRBAC.handler(), customAlertHandler.alertDetailsHandler); +alertsRouter.delete("/delete/:alertId", setDataToRequestObject("api.alert.delete"), checkRBAC.handler(), customAlertHandler.deleteAlertHandler); +alertsRouter.delete("/delete", setDataToRequestObject("api.alert.delete"), checkRBAC.handler(), customAlertHandler.deleteSystemAlertsHandler); +alertsRouter.patch("/update/:alertId", setDataToRequestObject("api.alert.update"), checkRBAC.handler(), customAlertHandler.updateAlertHandler); // metrics -alertsRouter.post("/metric/alias/create",setDataToRequestObject("api.metric.add"), metricAliasHandler.createMetricHandler); -alertsRouter.post("/metric/alias/search", setDataToRequestObject("api.metric.list"), metricAliasHandler.listMetricsHandler); -alertsRouter.patch("/metric/alias/update/:id", setDataToRequestObject("api.metric.update"),metricAliasHandler.updateMetricHandler); -alertsRouter.delete("/metric/alias/delete/:id", setDataToRequestObject("api.metric.remove"),metricAliasHandler.deleteMetricHandler); -alertsRouter.delete("/metric/alias/delete", setDataToRequestObject("api.metric.remove"), metricAliasHandler.deleteMultipleMetricHandler); +alertsRouter.post("/metric/alias/create",setDataToRequestObject("api.metric.add"), checkRBAC.handler(), metricAliasHandler.createMetricHandler); +alertsRouter.post("/metric/alias/search", setDataToRequestObject("api.metric.list"), checkRBAC.handler(), metricAliasHandler.listMetricsHandler); +alertsRouter.patch("/metric/alias/update/:id", setDataToRequestObject("api.metric.update"), checkRBAC.handler(), metricAliasHandler.updateMetricHandler); +alertsRouter.delete("/metric/alias/delete/:id", setDataToRequestObject("api.metric.remove"), checkRBAC.handler(), metricAliasHandler.deleteMetricHandler); +alertsRouter.delete("/metric/alias/delete", setDataToRequestObject("api.metric.remove"), checkRBAC.handler(), metricAliasHandler.deleteMultipleMetricHandler); // silence -alertsRouter.post("/silence/create",setDataToRequestObject("api.alert.silence.create"),silenceHandler.createHandler); -alertsRouter.get("/silence/search",setDataToRequestObject("api.alert.silence.list"),silenceHandler.listHandler); -alertsRouter.get("/silence/get/:id",setDataToRequestObject("api.alert.silence.get"),silenceHandler.fetchHandler); -alertsRouter.patch("/silence/update/:id",setDataToRequestObject("api.alert.silence.edit"),silenceHandler.updateHandler); -alertsRouter.delete("/silence/delete/:id",setDataToRequestObject("api.alert.silence.delete"),silenceHandler.deleteHandler); \ No newline at end of file +alertsRouter.post("/silence/create",setDataToRequestObject("api.alert.silence.create"), checkRBAC.handler(), silenceHandler.createHandler); +alertsRouter.get("/silence/search",setDataToRequestObject("api.alert.silence.list"), checkRBAC.handler(), silenceHandler.listHandler); +alertsRouter.get("/silence/get/:id",setDataToRequestObject("api.alert.silence.get"), checkRBAC.handler(), silenceHandler.fetchHandler); +alertsRouter.patch("/silence/update/:id",setDataToRequestObject("api.alert.silence.edit"), checkRBAC.handler(), silenceHandler.updateHandler); +alertsRouter.delete("/silence/delete/:id",setDataToRequestObject("api.alert.silence.delete"), checkRBAC.handler(), silenceHandler.deleteHandler); \ No newline at end of file diff --git a/api-service/src/routes/DruidProxyRouter.ts b/api-service/src/routes/DruidProxyRouter.ts index ca1aec91..fdf73315 100644 --- a/api-service/src/routes/DruidProxyRouter.ts +++ b/api-service/src/routes/DruidProxyRouter.ts @@ -9,15 +9,15 @@ import { ResponseHandler } from "../helpers/ResponseHandler"; export const druidProxyRouter = express.Router(); // Send a 410 Gone response to all V1 API calls -druidProxyRouter.all("/datasets/v1/*", ResponseHandler.goneResponse) -druidProxyRouter.all("/dataset/v1/*", ResponseHandler.goneResponse) -druidProxyRouter.all("/datasources/v1/*", ResponseHandler.goneResponse) -druidProxyRouter.all("/data/v1/*", ResponseHandler.goneResponse) -druidProxyRouter.all("/template/v1/*", ResponseHandler.goneResponse) +druidProxyRouter.all(/\/datasets\/v1(.*)/, ResponseHandler.goneResponse); +druidProxyRouter.all(/\/dataset\/v1(.*)/, ResponseHandler.goneResponse); +druidProxyRouter.all(/\/datasources\/v1(.*)/, ResponseHandler.goneResponse); +druidProxyRouter.all(/\/data\/v1(.*)/, ResponseHandler.goneResponse); +druidProxyRouter.all(/\/template\/v1(.*)/, ResponseHandler.goneResponse); // Druid Proxy APIs for Metabase integration -druidProxyRouter.post(/\/druid\/v2.*/, setDataToRequestObject("query.wrapper.native.post"), onRequest({ entity: Entity.DruidProxy }), wrapperService.forwardNative) -druidProxyRouter.get(/\/druid\/v2.*/, setDataToRequestObject("query.wrapper.native.get"), onRequest({ entity: Entity.DruidProxy }), wrapperService.forwardNativeGet) +druidProxyRouter.post(/\/druid\/v2(.*)/, setDataToRequestObject("query.wrapper.native.post"), onRequest({ entity: Entity.DruidProxy }), wrapperService.forwardNative); +druidProxyRouter.get(/\/druid\/v2(.*)/, setDataToRequestObject("query.wrapper.native.get"), onRequest({ entity: Entity.DruidProxy }), wrapperService.forwardNativeGet); druidProxyRouter.delete("/druid/v2/:queryId", setDataToRequestObject("query.wrapper.native.delete"), onRequest({ entity: Entity.DruidProxy }), wrapperService.forwardNativeDel) druidProxyRouter.get("/status", setDataToRequestObject("query.wrapper.status"), onRequest({ entity: Entity.DruidProxy }), wrapperService.nativeStatus) druidProxyRouter.get("/health", setDataToRequestObject("api.health"), onRequest({ entity: Entity.DruidProxy }), healthService.checkDruidHealth) \ No newline at end of file diff --git a/api-service/src/services/DatasetService.ts b/api-service/src/services/DatasetService.ts index 2e800212..1c864404 100644 --- a/api-service/src/services/DatasetService.ts +++ b/api-service/src/services/DatasetService.ts @@ -17,6 +17,7 @@ import { Datasource } from "../models/Datasource"; import { obsrvError } from "../types/ObsrvError"; import { druidHttpService } from "../connections/druidConnection"; import { tableGenerator } from "./TableGenerator"; +import { deleteAlertByDataset, deleteMetricAliasByDataset } from "./managers"; class DatasetService { @@ -92,9 +93,10 @@ class DatasetService { return responseData; } - migrateDraftDataset = async (datasetId: string, dataset: Record): Promise => { + migrateDraftDataset = async (datasetId: string, dataset: Record, userID: string): Promise => { const dataset_id = _.get(dataset, "id") const draftDataset = await this.migrateDatasetV1(dataset_id, dataset); + _.set(draftDataset, "updated_by", userID); const transaction = await sequelize.transaction(); try { await DatasetDraft.update(draftDataset, { where: { id: dataset_id }, transaction }); @@ -166,7 +168,7 @@ class DatasetService { } } - createDraftDatasetFromLive = async (dataset: Model) => { + createDraftDatasetFromLive = async (dataset: Model, userID: string) => { const draftDataset: any = _.omit(dataset, ["created_date", "updated_date", "published_date"]); const dataset_config: any = _.get(dataset, "dataset_config"); @@ -232,6 +234,7 @@ class DatasetService { draftDataset["version_key"] = Date.now().toString() draftDataset["version"] = _.add(_.get(dataset, ["version"]), 1); // increment the dataset version draftDataset["status"] = DatasetStatus.Draft + draftDataset["created_by"] = userID; const result = await DatasetDraft.create(draftDataset); return _.get(result, "dataValues") } @@ -256,20 +259,27 @@ class DatasetService { } } - retireDataset = async (dataset: Record) => { + private deleteAlerts = async (dataset: any) => { + await deleteAlertByDataset(dataset); + await deleteMetricAliasByDataset(dataset); + } + + retireDataset = async (dataset: Record, updatedBy: any) => { const transaction = await sequelize.transaction(); try { - await Dataset.update({ status: DatasetStatus.Retired }, { where: { id: dataset.id }, transaction }); - await DatasetSourceConfig.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id }, transaction }); - await Datasource.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id }, transaction }); - await DatasetTransformations.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id }, transaction }); + await Dataset.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { id: dataset.id }, transaction }); + await DatasetSourceConfig.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { dataset_id: dataset.id }, transaction }); + await Datasource.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { dataset_id: dataset.id }, transaction }); + await DatasetTransformations.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { dataset_id: dataset.id }, transaction }); await transaction.commit(); - await this.deleteDruidSupervisors(dataset); } catch (err: any) { await transaction.rollback(); throw obsrvError(dataset.id, "FAILED_TO_RETIRE_DATASET", err.message, "SERVER_ERROR", 500, err); } + // Deleting dataset alerts and druid supervisors + await this.deleteDruidSupervisors(dataset); + await this.deleteAlerts(dataset); } findDatasources = async (where?: Record, attributes?: string[], order?: any): Promise => { @@ -321,31 +331,40 @@ class DatasetService { private createDruidDataSource = async (draftDataset: Record, transaction: Transaction) => { + const {created_by, updated_by} = draftDataset; const allFields = await tableGenerator.getAllFields(draftDataset, "druid"); const draftDatasource = this.createDraftDatasource(draftDataset, "druid"); const ingestionSpec = tableGenerator.getDruidIngestionSpec(draftDataset, allFields, draftDatasource.datasource_ref); _.set(draftDatasource, "ingestion_spec", ingestionSpec) - await DatasourceDraft.create(draftDatasource, { transaction }) + _.set(draftDatasource, "created_by", created_by); + _.set(draftDatasource, "updated_by", updated_by); + await DatasourceDraft.upsert(draftDatasource, { transaction }) } private createHudiDataSource = async (draftDataset: Record, transaction: Transaction) => { + const {created_by, updated_by} = draftDataset; const allFields = await tableGenerator.getAllFields(draftDataset, "hudi"); const draftDatasource = this.createDraftDatasource(draftDataset, "hudi"); const ingestionSpec = tableGenerator.getHudiIngestionSpecForCreate(draftDataset, allFields, draftDatasource.datasource_ref); _.set(draftDatasource, "ingestion_spec", ingestionSpec) - await DatasourceDraft.create(draftDatasource, { transaction }) + _.set(draftDatasource, "created_by", created_by); + _.set(draftDatasource, "updated_by", updated_by); + await DatasourceDraft.upsert(draftDatasource, { transaction }) } private updateHudiDataSource = async (draftDataset: Record, transaction: Transaction) => { + const {created_by, updated_by} = draftDataset; const allFields = await tableGenerator.getAllFields(draftDataset, "hudi"); const draftDatasource = this.createDraftDatasource(draftDataset, "hudi"); const dsId = _.join([draftDataset.dataset_id, "events", "hudi"], "_") const liveDatasource = await Datasource.findOne({ where: { id: dsId }, attributes: ["ingestion_spec"], raw: true }) as unknown as Record const ingestionSpec = tableGenerator.getHudiIngestionSpecForUpdate(draftDataset, liveDatasource?.ingestion_spec, allFields, draftDatasource?.datasource_ref); _.set(draftDatasource, "ingestion_spec", ingestionSpec) - await DatasourceDraft.create(draftDatasource, { transaction }) + _.set(draftDatasource, "created_by", created_by); + _.set(draftDatasource, "updated_by", updated_by); + await DatasourceDraft.upsert(draftDatasource, { transaction }) } private createDraftDatasource = (draftDataset: Record, type: string): Record => { @@ -368,7 +387,7 @@ export const getLiveDatasetConfigs = async (dataset_id: string) => { const transformations = await datasetService.getTransformations(dataset_id, ["field_key", "transformation_function", "mode"]) const connectorsV2 = await datasetService.getConnectors(dataset_id, ["id", "connector_id", "connector_config", "operations_config"]) const connectorsV1 = await getV1Connectors(dataset_id) - const connectors = _.concat(connectorsV1,connectorsV2) + const connectors = _.concat(connectorsV1, connectorsV2) if (!_.isEmpty(transformations)) { datasetRecord["transformations_config"] = transformations diff --git a/api-service/src/services/managers/index.ts b/api-service/src/services/managers/index.ts index 84b666d9..585b32b7 100644 --- a/api-service/src/services/managers/index.ts +++ b/api-service/src/services/managers/index.ts @@ -24,16 +24,16 @@ const getService = (manager: string) => { }; export const publishAlert = async (payload: Record) => { - const { id, manager } = payload; + const { id, manager, updated_by } = payload; const service = getService(manager); const publishResponse = await service.publishAlert(payload) - await updateStatus(id, "live"); + await updateStatus(id, "live", updated_by); return publishResponse; }; -const updateStatus = (id: string, status: string) => { - return Alert.update({ status }, { where: { id } }); +const updateStatus = (id: string, status: string, updated_by: string) => { + return Alert.update({ status, updated_by }, { where: { id } }); } const deleteRule = (id: string) => { @@ -41,7 +41,7 @@ const deleteRule = (id: string) => { } export const deleteAlertRule = async (payload: Record, hardDelete: boolean) => { - const { id, manager, status } = payload; + const { id, manager, status, updated_by } = payload; if (status == "live") { try { @@ -56,7 +56,7 @@ export const deleteAlertRule = async (payload: Record, hardDelete: return deleteRule(id); } - return updateStatus(id, "retired"); + return updateStatus(id, "retired", updated_by); } @@ -138,11 +138,7 @@ export const deleteAlertByDataset = async (payload: Record) => { export const deleteMetricAliasByDataset = async (payload: Record) => { try { const { name } = payload; - const metricAliasPayload = await Metrics.findAll({ where: { component: "datasets", subComponent: name } }) - if (!metricAliasPayload) throw new Error(constants.METRIC_ALIAS_NOT_FOUND) - for (const payload of metricAliasPayload) { - await payload.destroy() - } + await Metrics.destroy({ where: { component: "datasets", subComponent: name } }) return constants.METRIC_ALIAS_DELETED_SUCCESSFULLY; } catch (error: any) { throw new Error(constants.METRIC_ALIAS_NOT_DELETED); diff --git a/api-service/src/services/telemetry.ts b/api-service/src/services/telemetry.ts index d4eca61b..8408dfb3 100644 --- a/api-service/src/services/telemetry.ts +++ b/api-service/src/services/telemetry.ts @@ -10,15 +10,15 @@ const telemetryTopic = _.get(appConfig, "telemetry_dataset"); export enum OperationType { CREATE = 1, UPDATE, PUBLISH, RETIRE, LIST, GET } -const getDefaults = () => { +const getDefaults = (userID:any) => { return { eid: "AUDIT", ets: Date.now(), ver: "1.0.0", mid: v4(), actor: { - id: "SYSTEM", - type: "User" + id: userID || "SYSTEM", + type: "User", }, context: { env, @@ -128,7 +128,7 @@ export const processAuditEvents = (request: Request) => { _.set(auditEvent, "edata.transition.toState", toState); _.set(auditEvent, "edata.transition.fromState", fromState); } - const telemetryEvent = getDefaults(); + const telemetryEvent = getDefaults((request as any)?.userID); _.set(telemetryEvent, "edata", edata); _.set(telemetryEvent, "object", { ...(object.id && object.type && { ...object, ver: "1.0.0" }) }); sendTelemetryEvents(telemetryEvent); diff --git a/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts b/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts index 9f5cf5b3..ea905305 100644 --- a/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts +++ b/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts @@ -7,6 +7,7 @@ import { config } from "../../../configs/Config"; import chaiSpies from "chai-spies" import { describe, it } from "mocha"; import { Datasource } from "../../../models/Datasource"; +import { druidHttpService } from "../../../connections/druidConnection"; chai.use(chaiSpies) chai.should(); chai.use(chaiHttp); @@ -18,6 +19,7 @@ const nativeQueryEndpointDruid = config?.query_api?.druid?.native_query_path; const sqlQueryEndpoint = config?.query_api?.druid?.sql_query_path; const response = [{ dataValues: { datasource_ref: "test.1_rollup_week", metadata: { aggregated: true, granularity: "week" } } }] +const invalidResponse = [{ dataValues: { datasource_ref: "test.1_rollup_week", metadata: { aggregated: true, granularity: "n/a" } } }] const msgid = "e180ecac-8f41-4f21-9a21-0b3a1a368917"; describe("QUERY API TESTS", () => { @@ -33,6 +35,11 @@ describe("QUERY API TESTS", () => { response ) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["telemetry-events.1_rollup"]) @@ -46,7 +53,7 @@ describe("QUERY API TESTS", () => { res.body.params.msgid.should.be.eq(msgid); res.body.responseCode.should.be.eq("NOT_FOUND"); res.body.error.message.should.be.eq("Dataset telemetry-events with table week is not available for querying"); - res.body.error.code.should.be.eq("DATA_OUT_SOURCE_NOT_FOUND"); + res.body.error.code.should.be.eq("DATASOURCE_NOT_FOUND"); done(); }); }); @@ -65,7 +72,80 @@ describe("QUERY API TESTS", () => { res.body.responseCode.should.be.eq("NOT_FOUND"); res.body.params.msgid.should.be.eq(msgid); res.body.error.message.should.be.eq("Datasource telemetry-events not available for querying"); - res.body.error.code.should.be.eq("DATA_OUT_SOURCE_NOT_FOUND"); + res.body.error.code.should.be.eq("DATASOURCE_NOT_FOUND"); + done(); + }); + }); + + it("Query api failure: Datasource not available in druid", (done) => { + chai.spy.on(Datasource, "findAll", () => { + return Promise.resolve( + response + ) + }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "telemetry_events": 100 } + }) + }) + chai + .request(app) + .post("/v2/data/query/telemetry-events") + .send(JSON.parse(TestQueries.VALID_QUERY)) + .end((err, res) => { + res.should.have.status(404); + res.body.params.status.should.be.eq("FAILED"); + res.body.responseCode.should.be.eq("NOT_FOUND"); + res.body.params.msgid.should.be.eq(msgid); + res.body.error.message.should.be.eq("Datasource not available for querying"); + res.body.error.code.should.be.eq("DATASOURCE_NOT_AVAILABLE"); + done(); + }); + }); + + it("Query api failure: Datasource not fully loaded in druid", (done) => { + chai.spy.on(Datasource, "findAll", () => { + return Promise.resolve( + response + ) + }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 20 } + }) + }) + chai + .request(app) + .post("/v2/data/query/telemetry-events") + .send(JSON.parse(TestQueries.VALID_QUERY)) + .end((err, res) => { + res.should.have.status(416); + res.body.params.status.should.be.eq("FAILED"); + res.body.responseCode.should.be.eq("RANGE_NOT_SATISFIABLE"); + res.body.params.msgid.should.be.eq(msgid); + res.body.error.message.should.be.eq("Datasource not fully available for querying"); + res.body.error.code.should.be.eq("DATASOURCE_NOT_FULLY_AVAILABLE"); + done(); + }); + }); + + it("Query api failure: Datasource not found", (done) => { + chai.spy.on(Datasource, "findAll", () => { + return Promise.resolve( + invalidResponse + ) + }) + chai + .request(app) + .post("/v2/data/query/telemetry-events") + .send(JSON.parse(TestQueries.VALID_QUERY)) + .end((err, res) => { + res.should.have.status(404); + res.body.params.status.should.be.eq("FAILED"); + res.body.responseCode.should.be.eq("NOT_FOUND"); + res.body.params.msgid.should.be.eq(msgid); + res.body.error.message.should.be.eq("Datasource not found to query"); + res.body.error.code.should.be.eq("DATASOURCE_NOT_FOUND"); done(); }); }); @@ -74,6 +154,11 @@ describe("QUERY API TESTS", () => { chai.spy.on(Datasource, "findAll", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_week"]) @@ -100,6 +185,11 @@ describe("QUERY API TESTS", () => { chai.spy.on(Datasource, "findAll", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_week"]) @@ -126,6 +216,11 @@ describe("QUERY API TESTS", () => { chai.spy.on(Datasource, "findAll", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_week"]) @@ -153,6 +248,11 @@ describe("QUERY API TESTS", () => { chai.spy.on(Datasource, "findAll", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_week"]) @@ -180,6 +280,11 @@ describe("QUERY API TESTS", () => { chai.spy.on(Datasource, "findAll", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_week"]) @@ -223,6 +328,11 @@ describe("QUERY API TESTS", () => { chai.spy.on(Datasource, "findAll", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_week": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_week"]) diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts index e26ddafe..38dced09 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts @@ -38,7 +38,7 @@ describe("DATASET STATUS TRANSITION LIVE", () => { chai.spy.on(Dataset, "findOne", () => { return Promise.resolve({ "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) }) - chai.spy.on(DatasourceDraft, "create", () => { + chai.spy.on(DatasourceDraft, "upsert", () => { return Promise.resolve({}) }) const t = chai.spy.on(sequelize, "transaction", () => { @@ -80,7 +80,7 @@ describe("DATASET STATUS TRANSITION LIVE", () => { chai.spy.on(Dataset, "findOne", () => { return Promise.resolve({ "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) }) - chai.spy.on(DatasourceDraft, "create", () => { + chai.spy.on(DatasourceDraft, "upsert", () => { return Promise.resolve({}) }) const t = chai.spy.on(sequelize, "transaction", () => { @@ -122,7 +122,7 @@ describe("DATASET STATUS TRANSITION LIVE", () => { chai.spy.on(Dataset, "findOne", () => { return Promise.resolve({ "api_version":"v2", "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) }) - chai.spy.on(DatasourceDraft, "create", () => { + chai.spy.on(DatasourceDraft, "upsert", () => { return Promise.resolve({}) }) chai.spy.on(Datasource, "findOne", () => { @@ -289,7 +289,7 @@ describe("DATASET STATUS TRANSITION LIVE", () => { chai.spy.on(Dataset, "findOne", () => { return Promise.resolve({ "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) }) - chai.spy.on(DatasourceDraft, "create", () => { + chai.spy.on(DatasourceDraft, "upsert", () => { return Promise.resolve({}) }) const t = chai.spy.on(sequelize, "transaction", () => { diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts index 1a0428af..e09dcc41 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts @@ -14,6 +14,7 @@ import { Datasource } from "../../../models/Datasource"; import { commandHttpService } from "../../../connections/commandServiceConnection"; import { druidHttpService } from "../../../connections/druidConnection"; import { sequelize } from "../../../connections/databaseConnection"; +import { datasetService } from "../../../services/DatasetService"; chai.use(spies); chai.should(); @@ -46,6 +47,9 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { chai.spy.on(Datasource, "findAll", () => { return Promise.resolve([{ datasource_ref: "telemetry" }]) }) + chai.spy.on(datasetService, "deleteAlerts", () => { + return Promise.resolve({}) + }) chai.spy.on(druidHttpService, "post", () => { return Promise.resolve({}) }) @@ -91,6 +95,9 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { chai.spy.on(Dataset, "update", () => { return Promise.resolve({}) }) + chai.spy.on(datasetService, "deleteAlerts", () => { + return Promise.resolve({}) + }) chai.spy.on(commandHttpService, "post", () => { return Promise.resolve({}) }) @@ -130,6 +137,9 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { chai.spy.on(Datasource, "update", () => { return Promise.resolve({}) }) + chai.spy.on(datasetService, "deleteAlerts", () => { + return Promise.resolve({}) + }) chai.spy.on(Dataset, "update", () => { return Promise.resolve({}) }) diff --git a/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts b/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts index 8da1818b..55368903 100644 --- a/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts +++ b/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts @@ -8,6 +8,7 @@ import { Datasource } from "../../../models/Datasource"; import nock from "nock"; import { config } from "../../../configs/Config"; import { templateQueryApiFixtures } from "./Fixtures"; +import { druidHttpService } from "../../../connections/druidConnection"; const apiId = "api.query.template.query"; const msgid = "4a7f14c3-d61e-4d4f-be78-181834eeff6d" @@ -50,6 +51,11 @@ describe("QUERY TEMPLATE API", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_month": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_month"]) @@ -93,6 +99,11 @@ describe("QUERY TEMPLATE API", () => { return Promise.resolve(response) }) + chai.spy.on(druidHttpService, "get", () => { + return Promise.resolve({ + data: { "test.1_rollup_month": 100 } + }) + }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) .reply(200, ["test.1_rollup_month"]) diff --git a/command-service/src/command/alert_manager_command.py b/command-service/src/command/alert_manager_command.py index 1c989a5c..6d478c3e 100644 --- a/command-service/src/command/alert_manager_command.py +++ b/command-service/src/command/alert_manager_command.py @@ -176,7 +176,7 @@ def create_alert_rule(self, payload: dict) -> ActionResponse: "subComponent": dataset_name, "metric": prom_metric, "operator": operator, - "threshold": threshold, + "threshold": [+threshold], "metricAlias": metric_alias, } }, diff --git a/command-service/src/command/db_command.py b/command-service/src/command/db_command.py index 89db3392..20e9c527 100644 --- a/command-service/src/command/db_command.py +++ b/command-service/src/command/db_command.py @@ -48,7 +48,6 @@ def _change_dataset_to_active(self, command_payload: CommandPayload): self._insert_datasource_record(dataset_id, draft_dataset_id) self._insert_connector_instances(dataset_id, draft_dataset_record) self._insert_dataset_transformations(dataset_id, draft_dataset_record) - self._delete_draft_dataset(dataset_id, draft_dataset_id) return ActionResponse(status="OK", status_code=200) else: return ActionResponse( @@ -412,18 +411,4 @@ def _insert_dataset_transformations(self, dataset_id, draft_dataset_record): """ result = self.db_service.execute_upsert(sql=insert_query, params=params) print(f"Dataset Transformation {dataset_id + '_' + transformation.field_key} record inserted successfully...") - return result - - def _delete_draft_dataset(self, dataset_id, draft_dataset_id): - - self.db_service.execute_delete(sql=f"""DELETE from datasources_draft where dataset_id = %s""", params=(draft_dataset_id,)) - print(f"Draft datasources/tables for {dataset_id} are deleted successfully...") - - self.db_service.execute_delete(sql=f"""DELETE from dataset_transformations_draft where dataset_id = %s""", params=(draft_dataset_id,)) - print(f"Draft transformations/tables for {dataset_id} are deleted successfully...") - - self.db_service.execute_delete(sql=f"""DELETE from dataset_source_config_draft where dataset_id = %s""", params=(draft_dataset_id,)) - print(f"Draft source config/tables for {dataset_id} are deleted successfully...") - - self.db_service.execute_delete(sql=f"""DELETE from datasets_draft where id = %s""", params=(draft_dataset_id,)) - print(f"Draft Dataset for {dataset_id} is deleted successfully...") \ No newline at end of file + return result \ No newline at end of file diff --git a/command-service/src/command/druid_command.py b/command-service/src/command/druid_command.py index f43222b4..73d5bf86 100644 --- a/command-service/src/command/druid_command.py +++ b/command-service/src/command/druid_command.py @@ -34,6 +34,7 @@ def _submit_ingestion_task(self, dataset_id): print( f"Invoking SUBMIT_INGESTION_TASKS command for dataset_id {dataset_id}..." ) + task_submitted = 1 for record in datasources_records: if record["dataset_type"] == "event" and record["type"] == "druid": print(f"Submitting ingestion task for datasource ...") @@ -41,8 +42,15 @@ def _submit_ingestion_task(self, dataset_id): response = self.http_service.post( url=f"{self.router_url}/{self.supervisor_endpoint}", body=ingestion_spec, - headers={"Content-Type": "application/json"}, + headers={"Content-Type": "application/json"} ) + if response.status != 200: + task_submitted = 0 + break + if task_submitted: + query=f"SELECT id FROM datasets_draft WHERE dataset_id= %s" + response = self.db_service.execute_select_one(sql=query, params=(dataset_id,)) + self._delete_draft_dataset(dataset_id, response[0]) return ActionResponse(status="OK", status_code=200) else: print( @@ -51,3 +59,17 @@ def _submit_ingestion_task(self, dataset_id): return ActionResponse( status="ERROR", status_code=404, error_message="DATASET_ID_NOT_FOUND" ) + + def _delete_draft_dataset(self, dataset_id, draft_dataset_id): + + self.db_service.execute_delete(sql=f"""DELETE from datasources_draft where dataset_id = %s""", params=(draft_dataset_id,)) + print(f"Draft datasources/tables for {dataset_id} are deleted successfully...") + + self.db_service.execute_delete(sql=f"""DELETE from dataset_transformations_draft where dataset_id = %s""", params=(draft_dataset_id,)) + print(f"Draft transformations/tables for {dataset_id} are deleted successfully...") + + self.db_service.execute_delete(sql=f"""DELETE from dataset_source_config_draft where dataset_id = %s""", params=(draft_dataset_id,)) + print(f"Draft source config/tables for {dataset_id} are deleted successfully...") + + self.db_service.execute_delete(sql=f"""DELETE from datasets_draft where id = %s""", params=(draft_dataset_id,)) + print(f"Draft Dataset for {dataset_id} is deleted successfully...") diff --git a/command-service/src/config/service_config.yml b/command-service/src/config/service_config.yml index 2c432ba0..8b9f2bdc 100644 --- a/command-service/src/config/service_config.yml +++ b/command-service/src/config/service_config.yml @@ -117,7 +117,7 @@ config_service: port: 4000 druid: - router_host: localhost + router_host: http://localhost router_port: 8888 supervisor_endpoint: indexer/v1/supervisor