forked from Sunbird-Obsrv/obsrv-api-service
-
Notifications
You must be signed in to change notification settings - Fork 9
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge remote-tracking branch 'origin/release-1.5.0' into dataset-alia…
…sing
- Loading branch information
Showing
23 changed files
with
936 additions
and
92 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
26 changes: 26 additions & 0 deletions
26
api-service/src/controllers/DataAnalyzePII/DataAnalyzePIIController.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
import { Request, Response } from "express"; | ||
import { ResponseHandler } from "../../helpers/ResponseHandler"; | ||
import httpStatus from "http-status"; | ||
import * as _ from "lodash"; | ||
import logger from "../../logger"; | ||
import { detectPII } from "../../connections/commandServiceConnection"; | ||
|
||
const code = "FAILED_TO_DETECT_PII"; | ||
export const dataAnalyzePII = async (req: Request, res: Response) => { | ||
const apiId = _.get(req, 'id') | ||
try { | ||
const userToken = req.get('authorization') as string; | ||
const piiSuggestionsResponse = await detectPII(_.get(req, ['body', 'data']), userToken); | ||
logger.info({apiId , message: `Detected PII successfully` }) | ||
ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: piiSuggestionsResponse?.data}) | ||
} catch (error: any) { | ||
const errMessage = _.get(error, "response.data.detail") | ||
logger.error(error, apiId, code); | ||
let errorMessage = error; | ||
const statusCode = _.get(error, "status") | ||
if (!statusCode || statusCode == 500) { | ||
errorMessage = { code, message: errMessage || "Failed to detect pii" } | ||
} | ||
ResponseHandler.errorResponse(errorMessage, req, res); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
38 changes: 38 additions & 0 deletions
38
api-service/src/controllers/DataMetrics/DataMetricsController.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
import { Request, Response } from "express"; | ||
import _ from "lodash"; | ||
import { executeNativeQuery } from "../../connections/druidConnection"; | ||
import { ResponseHandler } from "../../helpers/ResponseHandler"; | ||
import vaidationSchema from "./DataMetricsValidationSchema.json" | ||
import { schemaValidation } from "../../services/ValidationService"; | ||
import logger from "../../logger"; | ||
import { obsrvError } from "../../types/ObsrvError"; | ||
import axios from "axios"; | ||
import { config } from "../../configs/Config"; | ||
|
||
const getBaseUrl = (url: string) => { | ||
if (_.startsWith(url, "/prom")) return config.query_api.prometheus.url + _.replace(url, "/prom", "") | ||
} | ||
|
||
const dataMetrics = async (req: Request, res: Response) => { | ||
const isValidSchema = schemaValidation(req.body, vaidationSchema); | ||
if (!isValidSchema?.isValid) { | ||
logger.error({ message: isValidSchema?.message, code: "INVALID_QUERY" }) | ||
throw obsrvError("", "INVALID_QUERY", isValidSchema.message, "BAD_REQUEST", 400) | ||
} | ||
const { query } = req.body || {}; | ||
const endpoint = query.url; | ||
if (_.startsWith(endpoint, "/prom")) { | ||
query.url = getBaseUrl(endpoint) | ||
const { url, method, headers = {}, body = {}, params = {}, ...rest } = query; | ||
const apiResponse = await axios.request({ url, method, headers, params, data: body, ...rest }) | ||
const data = _.get(apiResponse, "data"); | ||
return res.json(data); | ||
} | ||
else { | ||
const query = _.get(req, ["body", "query", "body", "query"]); | ||
const response = await executeNativeQuery(query); | ||
ResponseHandler.successResponse(req, res, { status: 200, data: _.get(response, "data") }); | ||
} | ||
} | ||
|
||
export default dataMetrics; |
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
61 changes: 61 additions & 0 deletions
61
api-service/src/controllers/DatasetMetrics/DatasetMetrics.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
{ | ||
"type": "object", | ||
"properties": { | ||
"id": { | ||
"type": "string", | ||
"enum": [ | ||
"api.dataset.metrics" | ||
] | ||
}, | ||
"ver": { | ||
"type": "string" | ||
}, | ||
"ts": { | ||
"type": "string" | ||
}, | ||
"params": { | ||
"type": "object", | ||
"properties": { | ||
"msgid": { | ||
"type": "string" | ||
} | ||
}, | ||
"required": [ | ||
"msgid" | ||
], | ||
"additionalProperties": false | ||
}, | ||
"request": { | ||
"type": "object", | ||
"properties": { | ||
"dataset_id": { | ||
"type": "string" | ||
}, | ||
"category": { | ||
"type": "array", | ||
"items": { | ||
"type": "string", | ||
"enum": [ | ||
"data_freshness", | ||
"data_observability", | ||
"data_volume", | ||
"data_lineage", | ||
"connectors", | ||
"data_quality" | ||
] | ||
}, | ||
"minItems": 1 | ||
}, | ||
"query_time_period":{ | ||
"type": "integer", | ||
"minimum": 1 | ||
} | ||
}, | ||
"required": [ | ||
"category", | ||
"dataset_id" | ||
] | ||
} | ||
}, | ||
"required": ["id", "ver", "ts", "params", "request"] | ||
} |
93 changes: 68 additions & 25 deletions
93
api-service/src/controllers/DatasetMetrics/DatasetMetricsController.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,38 +1,81 @@ | ||
import { Request, Response } from "express"; | ||
import _ from "lodash"; | ||
import { executeNativeQuery } from "../../connections/druidConnection"; | ||
import * as _ from "lodash" | ||
import { ResponseHandler } from "../../helpers/ResponseHandler"; | ||
import vaidationSchema from "./DatasetMetricsValidationSchema.json" | ||
import { schemaValidation } from "../../services/ValidationService"; | ||
import dayjs from 'dayjs'; | ||
import logger from "../../logger"; | ||
import { obsrvError } from "../../types/ObsrvError"; | ||
import axios from "axios"; | ||
import { schemaValidation } from "../../services/ValidationService"; | ||
import validationSchema from "./DatasetMetrics.json"; | ||
import { config } from "../../configs/Config"; | ||
import { datasetService } from "../../services/DatasetService"; | ||
import { getConnectors, getDataFreshness, getDataLineage, getDataObservability, getDataQuality, getDataVolume } from "../../services/DatasetMetricsService"; | ||
|
||
const getBaseUrl = (url: string) => { | ||
if (_.startsWith(url, "/prom")) return config.query_api.prometheus.url + _.replace(url, "/prom", "") | ||
} | ||
|
||
const apiId = "api.dataset.metrics"; | ||
const datasetMetrics = async (req: Request, res: Response) => { | ||
const isValidSchema = schemaValidation(req.body, vaidationSchema); | ||
const msgid = _.get(req, "body.params.msgid"); | ||
const requestBody = req.body; | ||
const dataset_id = _.get(req, "body.request.dataset_id"); | ||
const timePeriod = _.get(req, "body.request.query_time_period") || config?.data_observability?.default_query_time_period; | ||
|
||
const { category }: any = req.body.request; | ||
const defaultThreshold = (typeof config?.data_observability?.default_freshness_threshold === 'number' ? config?.data_observability?.default_freshness_threshold : 5) * 60 * 1000; // 5 minutes in milliseconds | ||
const dateFormat = 'YYYY-MM-DDTHH:mm:ss'; | ||
const endDate = dayjs().add(1, 'day').format(dateFormat); | ||
const startDate = dayjs(endDate).subtract(timePeriod, 'day').format(dateFormat); | ||
const intervals = `${startDate}/${endDate}`; | ||
const isValidSchema = schemaValidation(requestBody, validationSchema); | ||
const results = []; | ||
|
||
if (!isValidSchema?.isValid) { | ||
logger.error({ message: isValidSchema?.message, code: "INVALID_QUERY" }) | ||
throw obsrvError("", "INVALID_QUERY", isValidSchema.message, "BAD_REQUEST", 400) | ||
logger.error({ apiId, datasetId: dataset_id, msgid, requestBody, message: isValidSchema?.message, code: "DATA_OUT_INVALID_INPUT" }) | ||
return ResponseHandler.errorResponse({ message: isValidSchema?.message, statusCode: 400, errCode: "BAD_REQUEST", code: "DATA_OUT_INVALID_INPUT" }, req, res); | ||
} | ||
const { query } = req.body || {}; | ||
const endpoint = query.url; | ||
if (_.startsWith(endpoint, "/prom")) { | ||
query.url = getBaseUrl(endpoint) | ||
const { url, method, headers = {}, body = {}, params = {}, ...rest } = query; | ||
const apiResponse = await axios.request({ url, method, headers, params, data: body, ...rest }) | ||
const data = _.get(apiResponse, "data"); | ||
return res.json(data); | ||
|
||
const dataset = await datasetService.getDataset(dataset_id, ["id"], true) | ||
if (!dataset) { | ||
logger.error({ apiId, message: `Dataset with id ${dataset_id} not found in live table`, code: "DATASET_NOT_FOUND" }) | ||
return ResponseHandler.errorResponse({ message: `Dataset with id ${dataset_id} not found in live table`, code: "DATASET_NOT_FOUND", statusCode: 404, errCode: "NOT_FOUND" }, req, res); | ||
} | ||
else { | ||
const query = _.get(req, ["body", "query", "body", "query"]); | ||
const response = await executeNativeQuery(query); | ||
ResponseHandler.successResponse(req, res, { status: 200, data: _.get(response, "data") }); | ||
|
||
try { | ||
if (!category || category.includes("data_freshness")) { | ||
const dataFreshnessResult = await getDataFreshness(dataset_id, intervals, defaultThreshold); | ||
results.push(dataFreshnessResult); | ||
} | ||
|
||
if (!category || category.includes("data_observability")) { | ||
const dataObservabilityResult = await getDataObservability(dataset_id, intervals); | ||
results.push(dataObservabilityResult); | ||
} | ||
|
||
if (!category || category.includes("data_volume")) { | ||
const dataVolumeResult = await getDataVolume(dataset_id, timePeriod, dateFormat); | ||
results.push(dataVolumeResult); | ||
} | ||
|
||
if (!category || category.includes("data_lineage")) { | ||
const dataLineageResult = await getDataLineage(dataset_id, intervals); | ||
results.push(dataLineageResult); | ||
} | ||
|
||
if (!category || category.includes("connectors")) { | ||
const connectorsResult = await getConnectors(dataset_id, intervals); | ||
results.push(connectorsResult); | ||
} | ||
|
||
if (!category || category.includes("data_quality")) { | ||
const connectorsResult = await getDataQuality(dataset_id, intervals); | ||
results.push(connectorsResult); | ||
} | ||
|
||
logger.info({ apiId, msgid, requestBody, datasetId: dataset_id, message: "Metrics fetched successfully" }) | ||
return ResponseHandler.successResponse(req, res, { status: 200, data: results }); | ||
|
||
} | ||
catch (error: any) { | ||
logger.error({ apiId, msgid, requestBody: req?.body, datasetId: dataset_id, message: "Error while fetching metrics", code: "FAILED_TO_FETCH_METRICS", error }); | ||
return ResponseHandler.errorResponse({ message: "Error while fetching metrics", statusCode: 500, errCode: "FAILED", code: "FAILED_TO_FETCH_METRICS" }, req, res); | ||
} | ||
|
||
} | ||
|
||
export default datasetMetrics; |
Oops, something went wrong.