diff --git a/.gitignore b/.gitignore index 3e5ebb51..f94d1b12 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ datadog_sync/version.py **/*.pyc **/*.pyo +**/*~ .vscode/ .idea/ .coverage diff --git a/README.md b/README.md index 7ba73798..41b27ca1 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,79 @@ +# Deepomatic fork +Dump of additional hackish import/sync/cleanup_org for extra resources not supported by upstream, used by Deepomatic when migrating region. Maybe it will help somebody. + +Inspiration: https://careers.wolt.com/en/blog/tech/datadog-migration-wolt + +For some resources: it uses non-official api (from web frontend), using `dogweb` cookie and `x-csrf-token` header +``` +source_cookie_dogweb="xxx" +destination_cookie_dogweb="xxx" +destination_csrf_token="xxx" +``` +Warning: it's a hack, with shortcuts: +- it is *not* endorsed by Datadog (or supported by Deepomatic) +- authentication is either/or: cookie_dogweb config are required for those resources, and datadog-cli switches to cookie dogweb mode if config set, it *will not* work for other resources +- web frontend api is not documented, it could break at any time + + +## extra resources +### logs_facets +how to use: +- edit hardcoded `sourceid` in `datadog_sync/model/logs_facets.py` for your organizations, by getting the values in URLs with manual update facet on the web ui. +- setup dogweb cookie mode, cf above + +### logs_views +how to use: +- setup dogweb cookie mode, cf above + +### metric_metadatas +create metric metadata is *not* supported by datadog api, we can just update it on already existing metric. +- first push data-points on metric, then rerun the script when new metrics are populated + +### incidents +The supported scenario is importing all incidents (in order) so `public_id` (1, 2, etc.) are identical in source & destination organizations: never create new incidents in the destination organization before finishing the migration with datadog-sync-cli. + +Only the base incident data is supported, related resources (integrations(slack), todos(remediations), attachments) may be done later with dedicated resources. + +The import is lossy: for example the creation date is on sync, timeline is lost, etc. + +'notifications' explicitly not-sync'ed to avoid spamming people during import (although later tests seem to conclude 'inactive' user (invitation pending: sync'ed users, but they never connected to the destination region) are *not* notified) + +### incidents_integrations +- api bug: it url-escapes slack `redirect_url` `&` query-string separator character before saving: this leads to a forever diff: datadog-sync-cli tries to PATCH the correct value on each sync, the server saves a wrong value. + +### incidents_todos +- creation date & author is lost, as usual + +### incident_org_settings +- undocumented api, but standard v2 api used by web frontend, works with API/APP key +- just one resource per org, forcing update, ignoring ids, etc. + +### incidents_config_fields +- perpetual diff: on 'metadata' for ootb service & team: + - PATCH ok (maybe ignores metadata?) + - but PATCH response contains `metadata: null` + => `diffs` always shows it; it's ok, we can ignore those + +### incidents_config_notifications_templates + +### incidents_config_integrations_workflows +Covers General>Integrations & Notifications>Rules +- (api inconsistency: `attributes.triggers.variables.severity_values` and `attributes.triggers.variables.status_values` are `null` in read calls, and require an array in write calls) +- errors (probably because some workflows are hardcoded, not duplicable, but no obvious attribute to distingish them) + - Error: 400 Bad Request - {"errors":["a workflow like that already exists"]} + - Error: 400 Bad Request - {"errors":["Invalid payload: 'name' is invalid"]} + => ignoring those errors for now, and manually fixed `Send all incident updates to a global channel` via web frontend. + +### integrations_slack_channels +how to use: +- supports only *one* slack account +- api doesn't support `muting` option +- manually create the slack integration in destination organization, with *same name* as in source +- edit hardcoded `slack_account_name` in `datadog_sync/model/integrations_slack_channels.py` for your organizations +- run import & diffs & sync as usual + +--- + # datadog-sync-cli Datadog cli tool to sync resources across organizations. diff --git a/datadog_sync/commands/shared/options.py b/datadog_sync/commands/shared/options.py index 8a30ff95..7aa2a7a9 100644 --- a/datadog_sync/commands/shared/options.py +++ b/datadog_sync/commands/shared/options.py @@ -51,6 +51,13 @@ def handle_parse_result(self, ctx: Context, opts: Dict[Any, Any], args: List[Any help="Datadog source organization API url.", cls=CustomOptionClass, ), + option( + "--source-cookie-dogweb", + envvar=constants.DD_SOURCE_COOKIE_DOGWEB, + required=False, + help="Datadog source organization 'dogweb' cookie.", + cls=CustomOptionClass, + ), ] _destination_auth_options = [ @@ -77,6 +84,20 @@ def handle_parse_result(self, ctx: Context, opts: Dict[Any, Any], args: List[Any help="Datadog destination organization API url.", cls=CustomOptionClass, ), + option( + "--destination-cookie-dogweb", + envvar=constants.DD_DESTINATION_COOKIE_DOGWEB, + required=False, + help="Datadog destination organization 'dogweb' cookie.", + cls=CustomOptionClass, + ), + option( + "--destination-csrf-token", + envvar=constants.DD_DESTINATION_CSRF_TOKEN, + required=False, + help="Datadog destination organization 'x-csrf-token' header.", + cls=CustomOptionClass, + ), ] diff --git a/datadog_sync/constants.py b/datadog_sync/constants.py index 17117f03..70b5a6b9 100644 --- a/datadog_sync/constants.py +++ b/datadog_sync/constants.py @@ -7,9 +7,12 @@ DD_SOURCE_API_URL = "DD_SOURCE_API_URL" DD_SOURCE_API_KEY = "DD_SOURCE_API_KEY" DD_SOURCE_APP_KEY = "DD_SOURCE_APP_KEY" +DD_SOURCE_COOKIE_DOGWEB = "DD_SOURCE_COOKIE_DOGWEB" DD_DESTINATION_API_URL = "DD_DESTINATION_API_URL" DD_DESTINATION_API_KEY = "DD_DESTINATION_API_KEY" DD_DESTINATION_APP_KEY = "DD_DESTINATION_APP_KEY" +DD_DESTINATION_COOKIE_DOGWEB = "DD_DESTINATION_COOKIE_DOGWEB" +DD_DESTINATION_CSRF_TOKEN = "DD_DESTINATION_CSRF_TOKEN" DD_HTTP_CLIENT_RETRY_TIMEOUT = "DD_HTTP_CLIENT_RETRY_TIMEOUT" DD_HTTP_CLIENT_TIMEOUT = "DD_HTTP_CLIENT_TIMEOUT" DD_RESOURCES = "DD_RESOURCES" @@ -30,6 +33,7 @@ SOURCE_ORIGIN = "source" DESTINATION_ORIGIN = "destination" VALIDATE_ENDPOINT = "/api/v1/validate" +VALIDATE_ENDPOINT_COOKIEAUTH = "/api/v1/settings/favorite/list" # Commands CMD_IMPORT = "import" diff --git a/datadog_sync/model/incident_org_settings.py b/datadog_sync/model/incident_org_settings.py new file mode 100644 index 00000000..be5f2b9c --- /dev/null +++ b/datadog_sync/model/incident_org_settings.py @@ -0,0 +1,63 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class IncidentOrgSettings(BaseResource): + resource_type = "incident_org_settings" + resource_config = ResourceConfig( + base_path="/api/v2/incidents/config/org/settings", + excluded_attributes=[ + "id", + "attributes.modified", + ] + ) + # Additional Incidents specific attributes + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.get(self.resource_config.base_path).json()["data"] + return [ resp ] + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + # there is only one settings, ignoring id + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path).json()["data"] + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + # the settings is always there, just update + self.update_resource(_id, resource) + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = destination_client.patch( + self.resource_config.base_path, + payload, + ).json()["data"] + + self.resource_config.destination_resources[_id] = resp + + def delete_resource(self, _id: str) -> None: + raise Exception("deleting incident_org_settings is not supported") + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass diff --git a/datadog_sync/model/incidents.py b/datadog_sync/model/incidents.py new file mode 100644 index 00000000..19b04a76 --- /dev/null +++ b/datadog_sync/model/incidents.py @@ -0,0 +1,130 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig +from datadog_sync.utils.custom_client import PaginationConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class Incidents(BaseResource): + resource_type = "incidents" + resource_config = ResourceConfig( + resource_connections={ + "users": [ + "relationships.commander_user.data.id", + ] + }, + base_path="/api/v2/incidents", + excluded_attributes=[ + "id", + "attributes.public_id", + "attributes.commander", # somehow returned by create or update, not by get + "attributes.last_modified_by", # somehow returned by create or update, not by get + "attributes.last_modified_by_uuid", + "attributes.created", + "attributes.modified", + "attributes.created_by", # somehow returned by create or update, not by get + "attributes.created_by_uuid", + "attributes.notification_handles", # too hard to support properly, also, it gives wrong dates, and possibly spams people, we don't want that; ok to loose that info + "attributes.time_to_resolve", + "attributes.customer_impact_duration", # computed field + "relationships.created_by_user", + "relationships.last_modified_by_user", + "relationships.user_defined_fields", + "relationships.integrations", + "relationships.attachments", + "relationships.responders", + "relationships.impacts", + ], + non_nullable_attr=[ + "attributes.creation_idempotency_key", + "attributes.customer_impact_scope", + ], + + ) + # Additional Incidents specific attributes + pagination_config = PaginationConfig( + page_size=100, + page_number_param="page[offset]", + page_size_param="page[size]", + # this endpoint uses offset (number of items) instead of page number, workaround the paginated client by reusing `page_number` to store offset instead (computed here because we don't have `resp`) + page_number_func=lambda idx, page_size, page_number: page_size * (idx + 1), + # just return 1, the pagination loop already handles breaking when a page is smaller than page size + remaining_func=lambda *args: 1, + ) + + def get_resources(self, client: CustomClient) -> List[Dict]: + # we return the incidents in public_id order, so creating them on a fresh organizations will gives us the same public_id in source & destination organizations + + resp = client.paginated_request(client.get)( + self.resource_config.base_path, pagination_config=self.pagination_config + ) + + return resp + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path + f"/{_id}").json()["data"] + + resource = cast(dict, resource) + + # it's the new default imposed by the api; forcing it here so we don't have a forever-diff + if "visibility" in resource["attributes"] and resource["attributes"]["visibility"] is None: + resource["attributes"]["visibility"] = "organization" + + # let's do some deepomatic-specific incidents fields migrations: + if "Namespace" in resource["attributes"]["fields"] and resource["attributes"]["fields"]["Namespace"]["value"] is not None and "kube_namespace" in resource["attributes"]["fields"] and resource["attributes"]["fields"]["kube_namespace"]["value"] is None: + resource["attributes"]["fields"]["kube_namespace"]["value"] = resource["attributes"]["fields"]["Namespace"]["value"] + resource["attributes"]["fields"]["Namespace"]["value"] = None + + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + # the datadog api documentation says only a subset of accepted fields for creation; in practice it does handles only a subset, and ignores the others + resp = destination_client.post( + self.resource_config.base_path, + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + # create doesn't accept everything right away, e.g. attributes.resolved; follow the create by an update to sync more data + self.update_resource(_id, resource) + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + + resp = destination_client.patch( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + destination_client.delete( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}" + ) + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + return super(Incidents, self).connect_id(key, r_obj, resource_to_connect) diff --git a/datadog_sync/model/incidents_config_fields.py b/datadog_sync/model/incidents_config_fields.py new file mode 100644 index 00000000..c626c8b0 --- /dev/null +++ b/datadog_sync/model/incidents_config_fields.py @@ -0,0 +1,113 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig +from datadog_sync.utils.custom_client import PaginationConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class IncidentsConfigFields(BaseResource): + resource_type = "incidents_config_fields" + resource_config = ResourceConfig( + base_path="/api/v2/incidents/config/fields", + excluded_attributes=[ + "attributes.created_by", + "attributes.created_by_uuid", + "attributes.last_modified_by", + "attributes.last_modified_by_uuid", + "attributes.created", + "attributes.modified", + "relationships.created_by_user", + "relationships.last_modified_by_user", + "id", + ] + ) + # Additional Incidents specific attributes + pagination_config = PaginationConfig( + page_size=1000, + page_number_param="page[offset]", + page_size_param="page[limit]", + # this endpoint uses offset (number of items) instead of page number, workaround the paginated client by reusing `page_number` to store offset instead (computed here because we don't have `resp`) + page_number_func=lambda idx, page_size, page_number: page_size * (idx + 1), + # just return 1, the pagination loop already handles breaking when a page is smaller than page size + remaining_func=lambda *args: 1, + ) + # key: (unique) attributes.name + destination_incidents_config_fields: Dict[str, Dict] = dict() + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.paginated_request(client.get)( + self.resource_config.base_path, + pagination_config=self.pagination_config + ) + + return resp + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path + f"/{_id}").json()["data"] + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + self.destination_incidents_config_fields = self.get_destination_incidents_config_fields() + + def create_resource(self, _id: str, resource: Dict) -> None: + # names are unique: patching existing ones instead of create + name = resource["attributes"]["name"] + if name in self.destination_incidents_config_fields: + self.resource_config.destination_resources[_id] = self.destination_incidents_config_fields[name] + self.update_resource(_id, resource) + return + + destination_client = self.config.destination_client + payload = {"data": resource} + resp = destination_client.post( + self.resource_config.base_path, + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = destination_client.patch( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + destination_client.delete( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}" + ) + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass + + def get_destination_incidents_config_fields(self) -> Dict[str, Dict]: + destination_incidents_config_fields = {} + destination_client = self.config.destination_client + + resp = self.get_resources(destination_client) + for log_facet in resp: + destination_incidents_config_fields[log_facet["attributes"]["name"]] = log_facet + + return destination_incidents_config_fields diff --git a/datadog_sync/model/incidents_config_integrations_workflows.py b/datadog_sync/model/incidents_config_integrations_workflows.py new file mode 100644 index 00000000..5a26dfe2 --- /dev/null +++ b/datadog_sync/model/incidents_config_integrations_workflows.py @@ -0,0 +1,81 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class IncidentsConfigIntegrationsWorkflows(BaseResource): + resource_type = "incidents_config_integrations_workflows" + resource_config = ResourceConfig( + resource_connections={ + "incidents_config_notifications_templates": [ + "attributes.steps.variables.notification_template.id" + ] + }, + base_path="/api/v2/incidents/config/integrations/workflows", + excluded_attributes=[ + "id", + ], + non_nullable_attr=[ + "attributes.triggers.variables.severity_values", + "attributes.triggers.variables.status_values", + ], + ) + # Additional IncidentsConfigIntegrationsWorkflows specific attributes + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.get(self.resource_config.base_path).json() + return resp["data"] + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path + f"/{_id}").json()["data"] + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = destination_client.post( + self.resource_config.base_path, + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = destination_client.patch( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + destination_client.delete( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}" + ) + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + return super(IncidentsConfigIntegrationsWorkflows, self).connect_id(key, r_obj, resource_to_connect) diff --git a/datadog_sync/model/incidents_config_notifications_templates.py b/datadog_sync/model/incidents_config_notifications_templates.py new file mode 100644 index 00000000..642d6b2d --- /dev/null +++ b/datadog_sync/model/incidents_config_notifications_templates.py @@ -0,0 +1,76 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class IncidentsConfigNotificationsTemplates(BaseResource): + resource_type = "incidents_config_notifications_templates" + resource_config = ResourceConfig( + base_path="/api/v2/incidents/config/notifications/templates", + excluded_attributes=[ + "attributes.created_by_uuid", + "attributes.last_modified_by_uuid", + "attributes.created", + "attributes.modified", + "id", + ] + ) + # Additional IncidentsConfigNotificationsTemplates specific attributes + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.get(self.resource_config.base_path).json() + return resp["data"] + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path + f"/{_id}").json()["data"] + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = destination_client.post( + self.resource_config.base_path, + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = destination_client.patch( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + destination_client.delete( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}" + ) + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass diff --git a/datadog_sync/model/incidents_integrations.py b/datadog_sync/model/incidents_integrations.py new file mode 100644 index 00000000..19aa1587 --- /dev/null +++ b/datadog_sync/model/incidents_integrations.py @@ -0,0 +1,117 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig +from datadog_sync.utils.custom_client import PaginationConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class IncidentsIntegrations(BaseResource): + resource_type = "incidents_integrations" + resource_config = ResourceConfig( + resource_connections={ + "incidents": [ + "attributes.incident_id", + ] + }, + base_path="/api/v2/incidents", + excluded_attributes=[ + "id", + "attributes.last_modified_by", # somehow returned by create or update, not by get + "attributes.last_modified_by_uuid", + "attributes.created", + "attributes.modified", + "attributes.created_by", # somehow returned by create or update, not by get + "attributes.created_by_uuid", + "relationships.created_by_user", + "relationships.last_modified_by_user", + "attributes.status", # after create, it's always `4`: `indicates manually updated` + "attributes.metadata.channels.org_id", + "attributes.metadata.channels.incident_uuid", + ] + ) + # Additional IncidentsIntegrations specific attributes + pagination_config = PaginationConfig( + page_size=100, + page_number_param="page[offset]", + page_size_param="page[size]", + # this endpoint uses offset (number of items) instead of page number, workaround the paginated client by reusing `page_number` to store offset instead (computed here because we don't have `resp`) + page_number_func=lambda idx, page_size, page_number: page_size * (idx + 1), + # just return 1, the pagination loop already handles breaking when a page is smaller than page size + remaining_func=lambda *args: 1, + ) + integrations_path: str = "/api/v2/incidents/{incident_id}/relationships/integrations" + + def get_resources(self, client: CustomClient) -> List[Dict]: + # first, get all incidents, then for each incidents, get all incidents integrations + resp_incidents = client.paginated_request(client.get)( + self.resource_config.base_path, + pagination_config=self.pagination_config + ) + + resp = [] + for incident in resp_incidents: + resp += client.paginated_request(client.get)( + # use public id, to avoid connecting manually the resource here (we are in the get_resources, it's not usually done there, so not free); this assumes the public IDs between source & destination are in sync, which should be the case if importing incidents via datadog-sync-cli, cf comments in that resource + self.integrations_path.format(incident_id=incident["attributes"]["public_id"]), + pagination_config=self.pagination_config + ) + return resp + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + raise Exception("importing incidents_integrations by id is not supported: we need not only the incidents_integrations id (which we have) but also the parent incident id, which we do not have.") + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + destination_incident_id = resource["attributes"].pop("incident_id") + payload = {"data": resource} + + resp = destination_client.post( + # incidents api works both with public_id and id, here we use the connected (converted to the destination incident) uuid id + self.integrations_path.format(incident_id=destination_incident_id), + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + destination_incident_id = resource["attributes"].pop("incident_id") + payload = {"data": resource} + resp = destination_client.patch( + # incidents api works both with public_id and id, here we use the connected (converted to the destination incident) uuid id + self.integrations_path.format(incident_id=destination_incident_id) + + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + destination_incident_id = resource["attributes"].pop("incident_id") + destination_client.delete( + # incidents api works both with public_id and id, here we use the connected (converted to the destination incident) uuid id + self.integrations_path.format(incident_id=destination_incident_id) + + f"/{self.resource_config.destination_resources[_id]['id']}" + ) + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + return super(IncidentsIntegrations, self).connect_id(key, r_obj, resource_to_connect) diff --git a/datadog_sync/model/incidents_todos.py b/datadog_sync/model/incidents_todos.py new file mode 100644 index 00000000..76a36206 --- /dev/null +++ b/datadog_sync/model/incidents_todos.py @@ -0,0 +1,114 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig +from datadog_sync.utils.custom_client import PaginationConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class IncidentsTodos(BaseResource): + resource_type = "incidents_todos" + resource_config = ResourceConfig( + resource_connections={ + "incidents": [ + "attributes.incident_id", + ] + }, + base_path="/api/v2/incidents", + excluded_attributes=[ + "id", + "attributes.last_modified_by", # somehow returned by create or update, not by get + "attributes.last_modified_by_uuid", + "attributes.created", + "attributes.modified", + "attributes.created_by", # somehow returned by create or update, not by get + "attributes.created_by_uuid", + "relationships.created_by_user", + "relationships.last_modified_by_user", + ] + ) + # Additional IncidentsTodos specific attributes + pagination_config = PaginationConfig( + page_size=100, + page_number_param="page[offset]", + page_size_param="page[size]", + # this endpoint uses offset (number of items) instead of page number, workaround the paginated client by reusing `page_number` to store offset instead (computed here because we don't have `resp`) + page_number_func=lambda idx, page_size, page_number: page_size * (idx + 1), + # just return 1, the pagination loop already handles breaking when a page is smaller than page size + remaining_func=lambda *args: 1, + ) + todos_path: str = "/api/v2/incidents/{incident_id}/relationships/todos" + + def get_resources(self, client: CustomClient) -> List[Dict]: + # first, get all incidents, then for each incidents, get all incidents todos + resp_incidents = client.paginated_request(client.get)( + self.resource_config.base_path, + pagination_config=self.pagination_config + ) + + resp = [] + for incident in resp_incidents: + resp += client.paginated_request(client.get)( + # use public id, to avoid connecting manually the resource here (we are in the get_resources, it's not usually done there, so not free); this assumes the public IDs between source & destination are in sync, which should be the case if importing incidents via datadog-sync-cli, cf comments in that resource + self.todos_path.format(incident_id=incident["attributes"]["public_id"]), + pagination_config=self.pagination_config + ) + return resp + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + raise Exception("importing incidents_todos by id is not supported: we need not only the incidents_todos id (which we have) but also the parent incident id, which we do not have.") + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + destination_incident_id = resource["attributes"].pop("incident_id") + payload = {"data": resource} + + resp = destination_client.post( + # incidents api works both with public_id and id, here we use the connected (converted to the destination incident) uuid id + self.todos_path.format(incident_id=destination_incident_id), + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + destination_incident_id = resource["attributes"].pop("incident_id") + payload = {"data": resource} + resp = destination_client.patch( + # incidents api works both with public_id and id, here we use the connected (converted to the destination incident) uuid id + self.todos_path.format(incident_id=destination_incident_id) + + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp["data"] + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + destination_incident_id = resource["attributes"].pop("incident_id") + destination_client.delete( + # incidents api works both with public_id and id, here we use the connected (converted to the destination incident) uuid id + self.todos_path.format(incident_id=destination_incident_id) + + f"/{self.resource_config.destination_resources[_id]['id']}" + ) + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + return super(IncidentsTodos, self).connect_id(key, r_obj, resource_to_connect) diff --git a/datadog_sync/model/integrations_slack_channels.py b/datadog_sync/model/integrations_slack_channels.py new file mode 100644 index 00000000..6d1bc776 --- /dev/null +++ b/datadog_sync/model/integrations_slack_channels.py @@ -0,0 +1,83 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class IntegrationsSlackChannels(BaseResource): + resource_type = "integrations_slack_channels" + resource_config = ResourceConfig( + base_path="/api/v1/integration/slack/configuration/accounts/{account_name}/channels", + excluded_attributes=[ + "id", + ] + ) + # Additional Incidents specific attributes + slack_account_name = "deepo" # <-- to edit + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.get( + self.resource_config.base_path.format(account_name=self.slack_account_name) + ).json() + # fabricate id == channel name as required by datadog_sync + return [{"id": r["name"].strip('#'), **r} for r in resp] + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + # there is only one settings, ignoring id + source_client = self.config.source_client + resource = source_client.get( + self.resource_config.base_path.format(account_name=self.slack_account_name) + + f"/{_id}" + ).json() + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = resource + resp = destination_client.post( + self.resource_config.base_path.format(account_name=self.slack_account_name), + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = resource + resp = destination_client.post( + # same id in source & destination: the channel name + self.resource_config.base_path.format(account_name=self.slack_account_name) + + f"/{_id}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + destination_client.delete( + # same id in source & destination: the channel name + self.resource_config.base_path.format(account_name=self.slack_account_name) + + f"/{_id}" + ) + + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass diff --git a/datadog_sync/model/logs_facets.py b/datadog_sync/model/logs_facets.py new file mode 100644 index 00000000..280437c4 --- /dev/null +++ b/datadog_sync/model/logs_facets.py @@ -0,0 +1,103 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from copy import deepcopy +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class LogsFacets(BaseResource): + resource_type = "logs_facets" + resource_config = ResourceConfig( + base_path="/api/v1/logs", + excluded_attributes=["bounded", "bundledAndUsed"], + ) + # Additional LogsFacets specific attributes + destination_logs_facets: Dict[str, Dict] = dict() + + # TODO stop hardcoding those; see what the web frontend does + source_scopeid = "1762986" + destination_scopeid = "1000288307" + + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.get(self.resource_config.base_path + "/facet_lists?type=logs").json() + + return resp["facets"]["logs"] + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path + f"/scopes/{self.source_scopeid}/facets/{_id}").json() + + resource = cast(dict, resource) + if not resource["editable"]: + return + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + self.destination_logs_facets = self.get_destination_logs_facets() + + def create_resource(self, _id: str, resource: Dict) -> None: + if _id in self.destination_logs_facets: + self.resource_config.destination_resources[_id] = self.destination_logs_facets[_id] + self.update_resource(_id, resource) + return + + destination_client = self.config.destination_client + payload = deepcopy(resource) + payload["_authentication_token"] = destination_client.csrf_token + resp = destination_client.post( + self.resource_config.base_path + + f"/scopes/{self.destination_scopeid}/facets?type=logs", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = deepcopy(resource) + payload["_authentication_token"] = destination_client.csrf_token + resp = destination_client.post( + self.resource_config.base_path + + f"/scopes/{self.destination_scopeid}/facets/" + + f"{self.resource_config.destination_resources[_id]['id']}?type=logs", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + payload = {} + payload["_authentication_token"] = destination_client.csrf_token + destination_client.delete( + self.resource_config.base_path + + f"/scopes/{self.destination_scopeid}/facets/" + + f"{self.resource_config.destination_resources[_id]['id']}?type=logs", + payload, + ).json() + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass + + def get_destination_logs_facets(self) -> Dict[str, Dict]: + destination_logs_facets = {} + destination_client = self.config.destination_client + + resp = self.get_resources(destination_client) + for log_facet in resp: + destination_logs_facets[log_facet["id"]] = log_facet + + return destination_logs_facets diff --git a/datadog_sync/model/logs_views.py b/datadog_sync/model/logs_views.py new file mode 100644 index 00000000..692517bb --- /dev/null +++ b/datadog_sync/model/logs_views.py @@ -0,0 +1,87 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class LogsViews(BaseResource): + resource_type = "logs_views" + resource_config = ResourceConfig( + resource_connections={"logs_indexes": ["index"]}, + base_path="/api/v1/logs/views", + excluded_attributes=[ + "modified_at", + "author", + "id", + "integration_id", + "integration_short_name", + "is_favorite" + ] + ) + # Additional LogsViews specific attributes + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.get(self.resource_config.base_path).json() + + return resp["logs_views"] + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if _id: + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path + f"/{_id}").json()["logs_view"] + + resource = cast(dict, resource) + # skip integrations saved views + if resource["integration_id"]: + return + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + pass + + def create_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = resource + payload["_authentication_token"] = destination_client.csrf_token + resp = destination_client.post( + self.resource_config.base_path, + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = resource + payload["_authentication_token"] = destination_client.csrf_token + resp = destination_client.put( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + + self.resource_config.destination_resources[_id] = resp + + def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + payload = {} + payload["_authentication_token"] = destination_client.csrf_token + destination_client.delete( + self.resource_config.base_path + + f"/{self.resource_config.destination_resources[_id]['id']}?type=logs", + payload, + ).json() + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass diff --git a/datadog_sync/model/metric_metadatas.py b/datadog_sync/model/metric_metadatas.py new file mode 100644 index 00000000..98e81633 --- /dev/null +++ b/datadog_sync/model/metric_metadatas.py @@ -0,0 +1,85 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class MetricMetadatas(BaseResource): + resource_type = "metric_metadatas" + resource_config = ResourceConfig( + base_path="/api/v1/metrics", + excluded_attributes=["integration"], + ) + # Additional MetricMetadatas specific attributes + destination_metric_metadatas: Dict[str, Dict] = dict() + + def get_resources(self, client: CustomClient) -> List[Dict]: + resp = client.get("/api/v2/metrics").json()["data"] + + # cleanup "type": "metrics", + for metric in resp: + del metric['type'] + + # return objects with only "id" field + return resp + + def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> None: + if resource.keys() == {'id'}: + # we get only the id from the metrics list, force getting metric metadata individually + _id = resource['id'] + if _id: + source_client = self.config.source_client + resource = source_client.get(self.resource_config.base_path + f"/{_id}").json() + resource['id'] = _id + + resource = cast(dict, resource) + self.resource_config.source_resources[resource["id"]] = resource + + def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + def pre_apply_hook(self) -> None: + self.destination_metric_metadatas = self.get_destination_metric_metadatas() + + def create_resource(self, _id: str, resource: Dict) -> None: + if _id in self.destination_metric_metadatas: + self.resource_config.destination_resources[_id] = self.destination_metric_metadatas[_id] + self.update_resource(_id, resource) + return + + raise Exception("creating metric_metadatas is not supported: push data-points to it and the rerun (it will then update it instead of trying to create)") + + def update_resource(self, _id: str, resource: Dict) -> None: + destination_client = self.config.destination_client + payload = resource + resp = destination_client.put( + self.resource_config.base_path + f"/{self.resource_config.destination_resources[_id]['id']}", + payload, + ).json() + resp['id'] = self.resource_config.destination_resources[_id]['id'] + + self.resource_config.destination_resources[_id] = resp + + def delete_resource(self, _id: str) -> None: + raise Exception("deleting metric_metadatas is not supported") + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass + + def get_destination_metric_metadatas(self) -> Dict[str, Dict]: + destination_metric_metadatas = {} + destination_client = self.config.destination_client + + resp = self.get_resources(destination_client) + for metric_metadata in resp: + destination_metric_metadatas[metric_metadata["id"]] = metric_metadata + + return destination_metric_metadatas diff --git a/datadog_sync/models/__init__.py b/datadog_sync/models/__init__.py index adc0d140..5a9686c6 100644 --- a/datadog_sync/models/__init__.py +++ b/datadog_sync/models/__init__.py @@ -21,7 +21,18 @@ from datadog_sync.model.notebooks import Notebooks from datadog_sync.model.logs_metrics import LogsMetrics from datadog_sync.model.host_tags import HostTags +from datadog_sync.model.metric_metadatas import MetricMetadatas from datadog_sync.model.metric_tag_configurations import MetricTagConfigurations from datadog_sync.model.logs_indexes import LogsIndexes from datadog_sync.model.logs_restriction_queries import LogsRestrictionQueries from datadog_sync.model.spans_metrics import SpansMetrics +from datadog_sync.model.logs_facets import LogsFacets +from datadog_sync.model.logs_views import LogsViews +from datadog_sync.model.incidents import Incidents +from datadog_sync.model.incidents_integrations import IncidentsIntegrations +from datadog_sync.model.incidents_todos import IncidentsTodos +from datadog_sync.model.incident_org_settings import IncidentOrgSettings +from datadog_sync.model.incidents_config_fields import IncidentsConfigFields +from datadog_sync.model.incidents_config_notifications_templates import IncidentsConfigNotificationsTemplates +from datadog_sync.model.incidents_config_integrations_workflows import IncidentsConfigIntegrationsWorkflows +from datadog_sync.model.integrations_slack_channels import IntegrationsSlackChannels diff --git a/datadog_sync/utils/configuration.py b/datadog_sync/utils/configuration.py index dccf2cfd..0065fc19 100644 --- a/datadog_sync/utils/configuration.py +++ b/datadog_sync/utils/configuration.py @@ -27,6 +27,7 @@ RESOURCE_FILE_PATH, TRUE, VALIDATE_ENDPOINT, + VALIDATE_ENDPOINT_COOKIEAUTH, ) from datadog_sync.utils.resource_utils import CustomClientHTTPError @@ -63,12 +64,15 @@ def build_config(cmd: str, **kwargs: Optional[Any]) -> Configuration: source_auth = { "apiKeyAuth": kwargs.get("source_api_key", ""), "appKeyAuth": kwargs.get("source_app_key", ""), + "cookieDogWeb": kwargs.get("source_cookie_dogweb", ""), } source_client = CustomClient(source_api_url, source_auth, retry_timeout, timeout) destination_auth = { "apiKeyAuth": kwargs.get("destination_api_key", ""), "appKeyAuth": kwargs.get("destination_app_key", ""), + "cookieDogWeb": kwargs.get("destination_cookie_dogweb", ""), + "x-csrf-token": kwargs.get("destination_csrf_token", ""), } destination_client = CustomClient(destination_api_url, destination_auth, retry_timeout, timeout) @@ -171,7 +175,10 @@ def init_resources(cfg: Configuration) -> Dict[str, BaseResource]: def _validate_client(client: CustomClient) -> None: logger = logging.getLogger(LOGGER_NAME) try: - client.get(VALIDATE_ENDPOINT).json() + if client.cookieauth: + client.get(VALIDATE_ENDPOINT_COOKIEAUTH).json() + else: + client.get(VALIDATE_ENDPOINT).json() except CustomClientHTTPError as e: logger.error(f"invalid api key: {e}") exit(1) diff --git a/datadog_sync/utils/custom_client.py b/datadog_sync/utils/custom_client.py index cb2ac530..ec748c90 100644 --- a/datadog_sync/utils/custom_client.py +++ b/datadog_sync/utils/custom_client.py @@ -62,7 +62,9 @@ def __init__(self, host: Optional[str], auth: Dict[str, str], retry_timeout: int self.timeout = timeout self.session = requests.Session() self.retry_timeout = retry_timeout - self.session.headers.update(build_default_headers(auth)) + self.cookieauth = bool(auth.get("cookieDogWeb")) + self.csrf_token = auth.get("x-csrf-token") + self.session.headers.update(build_default_headers(auth, self.cookieauth)) self.default_pagination = PaginationConfig() @request_with_retry @@ -130,13 +132,21 @@ def wrapper(*args, **kwargs): return wrapper -def build_default_headers(auth_obj: Dict[str, str]) -> Dict[str, str]: +def build_default_headers(auth_obj: Dict[str, str], is_cookieauth_mode: bool) -> Dict[str, str]: headers = { - "DD-API-KEY": auth_obj["apiKeyAuth"], - "DD-APPLICATION-KEY": auth_obj["appKeyAuth"], "Content-Type": "application/json", "User-Agent": _get_user_agent(), } + if is_cookieauth_mode: + headers |= { + "Cookie": "dogweb=" + auth_obj["cookieDogWeb"], + } + + else: + headers |= { + "DD-API-KEY": auth_obj["apiKeyAuth"], + "DD-APPLICATION-KEY": auth_obj["appKeyAuth"], + } return headers @@ -146,7 +156,7 @@ def _get_user_agent() -> str: except (ModuleNotFoundError, ImportError): version = None - return "datadog-sync-cli/{version} (python {pyver}; os {os}; arch {arch})".format( + return "datadog-sync-cli/{version}-deepomatic-patch (python {pyver}; os {os}; arch {arch})".format( version=version, pyver=platform.python_version(), os=platform.system().lower(), diff --git a/datadog_sync/utils/resource_utils.py b/datadog_sync/utils/resource_utils.py index 72414429..99510de9 100644 --- a/datadog_sync/utils/resource_utils.py +++ b/datadog_sync/utils/resource_utils.py @@ -69,6 +69,8 @@ def find_attr(keys_list_str: str, resource_to_connect: str, r_obj: Any, connect_ if failed: failed_connections.extend(failed) return failed_connections + elif r_obj is None: + return None else: keys_list = keys_list_str.split(".", 1) @@ -103,6 +105,10 @@ def remove_non_nullable_attributes(resource_config, resource): def del_attr(k_list, resource): + if isinstance(resource, list): + for r in resource: + del_attr(k_list, r) + return if len(k_list) == 1: resource.pop(k_list[0], None) else: @@ -112,6 +118,10 @@ def del_attr(k_list, resource): def del_null_attr(k_list, resource): + if isinstance(resource, list): + for r in resource: + del_null_attr(k_list, r) + return if len(k_list) == 1 and k_list[0] in resource and resource[k_list[0]] is None: resource.pop(k_list[0], None) elif len(k_list) > 1 and resource[k_list[0]] is not None: diff --git a/scripts/cleanup_org.py b/scripts/cleanup_org.py index b68aa3d1..13632082 100644 --- a/scripts/cleanup_org.py +++ b/scripts/cleanup_org.py @@ -18,7 +18,7 @@ class Cleanup: def __init__(self): self.headers = get_headers() - self.base_url = os.getenv("DD_DESTINATION_API_URL") + self.base_url = os.environ["DESTINATION_API_URL"] # Validate test org self.validate_org() @@ -36,11 +36,11 @@ def __init__(self): self.cleanup_logs_custom_pipelines() self.cleanup_monitors() self.cleanup_notebooks() - self.cleanup_users() + # self.cleanup_users() self.cleanup_roles() self.cleanup_logs_metrics() self.cleanup_metric_tag_configurations() - self.cleanup_host_tags() + # self.cleanup_host_tags() self.cleanup_logs_restriction_queries() # self.cleanup_integrations_aws() @@ -229,7 +229,7 @@ def get_resources(self, path, *args, **kwargs): resp = requests.get(url, headers=self.headers, timeout=60, *args, **kwargs) resp.raise_for_status() except requests.exceptions.HTTPError as e: - print("Error getting url %s: %s", url, e) + print("Error getting url %s: %s, %s", url, e, e.response.headers) return return resp.json() @@ -240,13 +240,13 @@ def delete_resource(self, _id, path, **kwargs): resp.raise_for_status() print("deleted resource ", url, _id) except requests.exceptions.HTTPError as e: - print("Error deleting resource: %s", e) + print("Error deleting resource: %s, %s", e, e.response.headers) def get_headers(): return { - "DD-API-KEY": os.getenv("DD_DESTINATION_API_KEY"), - "DD-APPLICATION-KEY": os.getenv("DD_DESTINATION_APP_KEY"), + "DD-API-KEY": os.environ["DESTINATION_API_KEY"], + "DD-APPLICATION-KEY": os.environ["DESTINATION_APP_KEY"], "Content-Type": "application/json", }