From f41b22935d01ad367e87de45705b2ee5b0f0b5d0 Mon Sep 17 00:00:00 2001 From: Emanuela Mitreva Date: Thu, 25 Jul 2024 15:52:48 +0300 Subject: [PATCH 1/4] remove dead code --- bin/cbapi-defense | 94 -- bin/cbapi-psc | 97 -- .../DellBiosVerification/BiosVerification.py | 97 -- .../cblr/DellBiosVerification/README.md | 82 -- .../cblr/DellBiosVerification/dellbios.bat | 4 - .../DEPRECATED_defense/cblr/examplejob.py | 10 - examples/DEPRECATED_defense/cblr/jobrunner.py | 70 -- examples/DEPRECATED_defense/cblr_cli.py | 44 - examples/DEPRECATED_defense/event_export.py | 76 -- examples/DEPRECATED_defense/list_devices.py | 32 - examples/DEPRECATED_defense/list_events.py | 71 -- .../list_events_with_cmdline_csv.py | 86 -- examples/DEPRECATED_defense/move_device.py | 40 - examples/DEPRECATED_defense/notifications.py | 21 - .../DEPRECATED_defense/policy_operations.py | 206 --- .../alert_search_suggestions.py | 22 - examples/DEPRECATED_psc/bulk_update_alerts.py | 50 - .../bulk_update_cbanalytics_alerts.py | 50 - .../bulk_update_threat_alerts.py | 47 - .../bulk_update_vmware_alerts.py | 50 - .../bulk_update_watchlist_alerts.py | 50 - examples/DEPRECATED_psc/device_control.py | 73 -- .../DEPRECATED_psc/download_device_list.py | 50 - examples/DEPRECATED_psc/helpers/alertsv6.py | 159 --- examples/DEPRECATED_psc/list_alert_facets.py | 34 - examples/DEPRECATED_psc/list_alerts.py | 33 - .../list_cbanalytics_alert_facets.py | 34 - .../DEPRECATED_psc/list_cbanalytics_alerts.py | 33 - examples/DEPRECATED_psc/list_devices.py | 45 - .../list_vmware_alert_facets.py | 34 - examples/DEPRECATED_psc/list_vmware_alerts.py | 33 - .../list_watchlist_alert_facets.py | 34 - .../DEPRECATED_psc/list_watchlist_alerts.py | 33 - .../DEPRECATED_threathunter/create_feed.py | 78 -- examples/DEPRECATED_threathunter/events.py | 33 - .../events_exporter.py | 50 - .../feed_operations.py | 268 ---- .../import_response_feeds.py | 144 --- .../DEPRECATED_threathunter/modify_feed.py | 51 - .../process_exporter.py | 60 - .../DEPRECATED_threathunter/process_query.py | 40 - .../DEPRECATED_threathunter/process_tree.py | 29 - .../process_tree_exporter.py | 44 - examples/DEPRECATED_threathunter/search.py | 78 -- .../threat_intelligence/README.md | 129 -- .../threat_intelligence/Taxii_README.md | 41 - .../threat_intelligence/config.yml | 78 -- .../threat_intelligence/feed_helper.py | 45 - .../threat_intelligence/get_feed_ids.py | 21 - .../threat_intelligence/requirements.txt | 10 - .../threat_intelligence/results.py | 81 -- .../threat_intelligence/schemas.py | 44 - .../threat_intelligence/stix_parse.py | 466 ------- .../threat_intelligence/stix_taxii.py | 392 ------ .../threat_intelligence/threatintel.py | 82 -- .../watchlist_operations.py | 327 ----- setup.py | 8 +- src/cbapi/__init__.py | 5 - src/cbapi/defense.py | 2 - src/cbapi/example_helpers.py | 30 - src/cbapi/psc/__init__.py | 2 +- src/cbapi/psc/alerts_query.py | 704 ----------- src/cbapi/psc/cblr.py | 250 ---- src/cbapi/psc/defense/__init__.py | 6 - src/cbapi/psc/defense/models.py | 164 --- src/cbapi/psc/defense/models/deviceInfo.yaml | 221 ---- src/cbapi/psc/defense/models/policyInfo.yaml | 25 - src/cbapi/psc/defense/rest_api.py | 194 --- src/cbapi/psc/devices_query.py | 2 +- src/cbapi/psc/models.py | 190 --- src/cbapi/psc/models/base_alert.yaml | 139 -- src/cbapi/psc/models/workflow.yaml | 23 - src/cbapi/psc/models/workflow_status.yaml | 56 - src/cbapi/psc/rest_api.py | 12 - src/cbapi/psc/threathunter/__init__.py | 9 - src/cbapi/psc/threathunter/models.py | 1117 ----------------- src/cbapi/psc/threathunter/models/binary.yaml | 79 -- src/cbapi/psc/threathunter/models/feed.yaml | 33 - src/cbapi/psc/threathunter/models/ioc_v2.yaml | 23 - src/cbapi/psc/threathunter/models/iocs.yaml | 32 - src/cbapi/psc/threathunter/models/report.yaml | 45 - .../threathunter/models/report_severity.yaml | 12 - .../psc/threathunter/models/watchlist.yaml | 43 - src/cbapi/psc/threathunter/query.py | 654 ---------- src/cbapi/psc/threathunter/rest_api.py | 115 -- test/cbapi/psc/test_alertsv6_api.py | 535 -------- test/cbapi/psc/test_models.py | 136 +- tests/test_defense_policy.py | 53 - 88 files changed, 5 insertions(+), 9399 deletions(-) delete mode 100644 bin/cbapi-defense delete mode 100644 bin/cbapi-psc delete mode 100755 examples/DEPRECATED_defense/cblr/DellBiosVerification/BiosVerification.py delete mode 100644 examples/DEPRECATED_defense/cblr/DellBiosVerification/README.md delete mode 100644 examples/DEPRECATED_defense/cblr/DellBiosVerification/dellbios.bat delete mode 100755 examples/DEPRECATED_defense/cblr/examplejob.py delete mode 100755 examples/DEPRECATED_defense/cblr/jobrunner.py delete mode 100644 examples/DEPRECATED_defense/cblr_cli.py delete mode 100644 examples/DEPRECATED_defense/event_export.py delete mode 100644 examples/DEPRECATED_defense/list_devices.py delete mode 100644 examples/DEPRECATED_defense/list_events.py delete mode 100644 examples/DEPRECATED_defense/list_events_with_cmdline_csv.py delete mode 100644 examples/DEPRECATED_defense/move_device.py delete mode 100644 examples/DEPRECATED_defense/notifications.py delete mode 100644 examples/DEPRECATED_defense/policy_operations.py delete mode 100755 examples/DEPRECATED_psc/alert_search_suggestions.py delete mode 100755 examples/DEPRECATED_psc/bulk_update_alerts.py delete mode 100755 examples/DEPRECATED_psc/bulk_update_cbanalytics_alerts.py delete mode 100755 examples/DEPRECATED_psc/bulk_update_threat_alerts.py delete mode 100755 examples/DEPRECATED_psc/bulk_update_vmware_alerts.py delete mode 100755 examples/DEPRECATED_psc/bulk_update_watchlist_alerts.py delete mode 100755 examples/DEPRECATED_psc/device_control.py delete mode 100755 examples/DEPRECATED_psc/download_device_list.py delete mode 100755 examples/DEPRECATED_psc/helpers/alertsv6.py delete mode 100755 examples/DEPRECATED_psc/list_alert_facets.py delete mode 100755 examples/DEPRECATED_psc/list_alerts.py delete mode 100755 examples/DEPRECATED_psc/list_cbanalytics_alert_facets.py delete mode 100755 examples/DEPRECATED_psc/list_cbanalytics_alerts.py delete mode 100755 examples/DEPRECATED_psc/list_devices.py delete mode 100755 examples/DEPRECATED_psc/list_vmware_alert_facets.py delete mode 100755 examples/DEPRECATED_psc/list_vmware_alerts.py delete mode 100755 examples/DEPRECATED_psc/list_watchlist_alert_facets.py delete mode 100755 examples/DEPRECATED_psc/list_watchlist_alerts.py delete mode 100644 examples/DEPRECATED_threathunter/create_feed.py delete mode 100644 examples/DEPRECATED_threathunter/events.py delete mode 100644 examples/DEPRECATED_threathunter/events_exporter.py delete mode 100644 examples/DEPRECATED_threathunter/feed_operations.py delete mode 100644 examples/DEPRECATED_threathunter/import_response_feeds.py delete mode 100644 examples/DEPRECATED_threathunter/modify_feed.py delete mode 100644 examples/DEPRECATED_threathunter/process_exporter.py delete mode 100644 examples/DEPRECATED_threathunter/process_query.py delete mode 100644 examples/DEPRECATED_threathunter/process_tree.py delete mode 100644 examples/DEPRECATED_threathunter/process_tree_exporter.py delete mode 100644 examples/DEPRECATED_threathunter/search.py delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/README.md delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/Taxii_README.md delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/config.yml delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/feed_helper.py delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/get_feed_ids.py delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/requirements.txt delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/results.py delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/schemas.py delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/stix_parse.py delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/stix_taxii.py delete mode 100644 examples/DEPRECATED_threathunter/threat_intelligence/threatintel.py delete mode 100644 examples/DEPRECATED_threathunter/watchlist_operations.py delete mode 100644 src/cbapi/defense.py delete mode 100755 src/cbapi/psc/alerts_query.py delete mode 100644 src/cbapi/psc/cblr.py delete mode 100644 src/cbapi/psc/defense/__init__.py delete mode 100644 src/cbapi/psc/defense/models.py delete mode 100644 src/cbapi/psc/defense/models/deviceInfo.yaml delete mode 100644 src/cbapi/psc/defense/models/policyInfo.yaml delete mode 100644 src/cbapi/psc/defense/rest_api.py delete mode 100755 src/cbapi/psc/models/base_alert.yaml delete mode 100755 src/cbapi/psc/models/workflow.yaml delete mode 100755 src/cbapi/psc/models/workflow_status.yaml delete mode 100644 src/cbapi/psc/threathunter/__init__.py delete mode 100644 src/cbapi/psc/threathunter/models.py delete mode 100644 src/cbapi/psc/threathunter/models/binary.yaml delete mode 100644 src/cbapi/psc/threathunter/models/feed.yaml delete mode 100644 src/cbapi/psc/threathunter/models/ioc_v2.yaml delete mode 100644 src/cbapi/psc/threathunter/models/iocs.yaml delete mode 100644 src/cbapi/psc/threathunter/models/report.yaml delete mode 100644 src/cbapi/psc/threathunter/models/report_severity.yaml delete mode 100644 src/cbapi/psc/threathunter/models/watchlist.yaml delete mode 100644 src/cbapi/psc/threathunter/query.py delete mode 100644 src/cbapi/psc/threathunter/rest_api.py delete mode 100755 test/cbapi/psc/test_alertsv6_api.py delete mode 100644 tests/test_defense_policy.py diff --git a/bin/cbapi-defense b/bin/cbapi-defense deleted file mode 100644 index d5b0b398..00000000 --- a/bin/cbapi-defense +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -import argparse -import contextlib - -from cbapi.six import iteritems -from cbapi.six.moves import input -import os -import sys -import cbapi.six as six -if six.PY3: - from io import StringIO as StringIO -else: - from cStringIO import StringIO - -from cbapi.six.moves.configparser import RawConfigParser - - -@contextlib.contextmanager -def temp_umask(umask): - oldmask = os.umask(umask) - try: - yield - finally: - os.umask(oldmask) - - -def configure(opts): - credential_path = os.path.join(os.path.expanduser("~"), ".carbonblack") - credential_file = os.path.join(credential_path, "credentials.defense") - - print("Welcome to the CbAPI.") - if os.path.exists(credential_file): - print("An existing credential file exists at {0}.".format(credential_file)) - resp = input("Do you want to continue and overwrite the existing configuration? [Y/N] ") - if resp.strip().upper() != "Y": - print("Exiting.") - return 1 - - if not os.path.exists(credential_path): - os.makedirs(credential_path, 0o700) - - url = input("URL to the Cb Defense API server (do not include '/integrationServices') [https://hostname]: ") - - ssl_verify = True - - connector_id = input("Connector ID: ") - token = input("API key: ") - - config = RawConfigParser() - config.read_file(StringIO('[default]')) - config.set("default", "url", url) - config.set("default", "token", "{0}/{1}".format(token, connector_id)) - config.set("default", "ssl_verify", ssl_verify) - with temp_umask(0): - with os.fdopen(os.open(credential_file, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0o600), 'w') as fp: - os.chmod(credential_file, 0o600) - config.write(fp) - print("Successfully wrote credentials to {0}.".format(credential_file)) - - -command_map = { - "configure": { - "extra_args": {}, - "help": "Configure CbAPI", - "method": configure - } -} - - -def main(args): - parser = argparse.ArgumentParser() - commands = parser.add_subparsers(dest="command_name", help="CbAPI subcommand") - - for cmd_name, cmd_config in iteritems(command_map): - cmd_parser = commands.add_parser(cmd_name, help=cmd_config.get("help", None)) - for cmd_arg_name, cmd_arg_config in iteritems(cmd_config.get("extra_args", {})): - cmd_parser.add_argument(cmd_arg_name, **cmd_arg_config) - - opts = parser.parse_args(args) - command = command_map.get(opts.command_name) - if not command: - parser.print_usage() - return - - command_method = command.get("method", None) - if command_method: - return command_method(opts) - else: - parser.print_usage() - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/bin/cbapi-psc b/bin/cbapi-psc deleted file mode 100644 index af7b1764..00000000 --- a/bin/cbapi-psc +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python - -import argparse -import contextlib - -from cbapi.six import iteritems -from cbapi.six.moves import input -import os -import sys -import cbapi.six as six -if six.PY3: - from io import StringIO as StringIO -else: - from cStringIO import StringIO - -from cbapi.six.moves.configparser import RawConfigParser - - -@contextlib.contextmanager -def temp_umask(umask): - oldmask = os.umask(umask) - try: - yield - finally: - os.umask(oldmask) - - -def configure(opts): - credential_path = os.path.join(os.path.expanduser("~"), ".carbonblack") - credential_file = os.path.join(credential_path, "credentials.psc") - - print("Welcome to the CbAPI.") - if os.path.exists(credential_file): - print("An existing credential file exists at {0}.".format(credential_file)) - resp = input("Do you want to continue and overwrite the existing configuration? [Y/N] ") - if resp.strip().upper() != "Y": - print("Exiting.") - return 1 - - if not os.path.exists(credential_path): - os.makedirs(credential_path, 0o700) - - url = input("URL to the Carbon Black Cloud API server (do not include '/integrationServices') [https://hostname]: ") - - ssl_verify = True - - connector_id = input("Connector ID: ") - token = input("API key: ") - - org_key = input("Org Key: ") - - config = RawConfigParser() - config.read_file(StringIO('[default]')) - config.set("default", "url", url) - config.set("default", "token", "{0}/{1}".format(token, connector_id)) - config.set("default", "org_key", org_key) - config.set("default", "ssl_verify", ssl_verify) - with temp_umask(0): - with os.fdopen(os.open(credential_file, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0o600), 'w') as fp: - os.chmod(credential_file, 0o600) - config.write(fp) - print("Successfully wrote credentials to {0}.".format(credential_file)) - - -command_map = { - "configure": { - "extra_args": {}, - "help": "Configure CbAPI", - "method": configure - } -} - - -def main(args): - parser = argparse.ArgumentParser() - commands = parser.add_subparsers(dest="command_name", help="CbAPI subcommand") - - for cmd_name, cmd_config in iteritems(command_map): - cmd_parser = commands.add_parser(cmd_name, help=cmd_config.get("help", None)) - for cmd_arg_name, cmd_arg_config in iteritems(cmd_config.get("extra_args", {})): - cmd_parser.add_argument(cmd_arg_name, **cmd_arg_config) - - opts = parser.parse_args(args) - command = command_map.get(opts.command_name) - if not command: - parser.print_usage() - return - - command_method = command.get("method", None) - if command_method: - return command_method(opts) - else: - parser.print_usage() - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/examples/DEPRECATED_defense/cblr/DellBiosVerification/BiosVerification.py b/examples/DEPRECATED_defense/cblr/DellBiosVerification/BiosVerification.py deleted file mode 100755 index 5d55ca73..00000000 --- a/examples/DEPRECATED_defense/cblr/DellBiosVerification/BiosVerification.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 - -# Carbon Black Cloud -Dell Bios Verification LiveResponse -# Copyright VMware 2020 -# May 2020 -# Version 0.1 -# pdrapeau [at] vmware . com -# -# usage: BiosVerification.py [-h] [-m MACHINENAME] [-g] [-o ORGPROFILE] -# -# optional arguments: -# -h, --help show this help message and exit -# -m MACHINENAME, --machinename MACHINENAME -# machinename to run host bios forensics on -# -g, --get Get BIOS images -# -# -o ORGPROFILE, --orgprofile ORGPROFILE -# Select your cbapi credential profile - -import os, sys, time, argparse -from cbapi.defense import * - -def live_response(cb, host=None, response=None): - - print ("") - - #Select the device you want to gather forensic data from - query_hostname = "hostNameExact:%s" % host - print ("[ * ] Establishing LiveResponse Session with Remote Host:") - - #Create a new device object to launch LR on - device = cb.select(Device).where(query_hostname).first() - print(" - Hostname: {}".format(device.name)) - print(" - OS Version: {}".format(device.osVersion)) - print(" - Sensor Version: {}".format(device.sensorVersion)) - print(" - AntiVirus Status: {}".format(device.avStatus)) - print(" - Internal IP Address: {}".format(device.lastInternalIpAddress)) - print(" - External IP Address: {}".format(device.lastExternalIpAddress)) - print ("") - - #Execute our LR session - with device.lr_session() as lr_session: - print ("[ * ] Uploading scripts to the remote host") - lr_session.put_file(open("dellbios.bat", "rb"), "C:\\Program Files\\Confer\\temp\\dellbios.bat") - - if response == "get": - print ("[ * ] Getting the images") - result = lr_session.create_process("cmd.exe /c .\\dellbios.bat", wait_for_output=True, remote_output_file_name=None, working_directory="C:\\Program Files\\Confer\\temp\\", wait_timeout=120, wait_for_completion=True).decode("utf-8") - print ("") - print("{}".format(result)) - - print ("[ * ] Removing scripts") - lr_session.create_process("powershell.exe del .\\dellbios.bat", wait_for_output=False, remote_output_file_name=None, working_directory="C:\\Program Files\\Confer\\temp\\", wait_timeout=30, wait_for_completion=False) - - - print ("[ * ] Downloading images") - zipdata = lr_session.get_file("C:\\Program Files\\Confer\\temp\\BiosImages.zip") - - print ("[ * ] Writing out " + host + "-BiosImages.zip") - zipfile = open(host + "-BiosImages.zip","wb") - zipfile.write(zipdata) - - print ("") - - - - else: - print ("[ * ] Nothing to do") - - - print ("[ * ] Cleaning up") - lr_session.create_process("powershell.exe del .\\BiosImages.zip", wait_for_output=False, remote_output_file_name=None, working_directory="C:\\Program Files\\Confer\\temp\\", wait_timeout=30, wait_for_completion=False) - lr_session.create_process("powershell.exe del C:\\tmpbios\\*.*", wait_for_output=False, remote_output_file_name=None, working_directory="C:\\Program Files\\Confer\\temp\\", wait_timeout=30, wait_for_completion=False) - - - print ("") - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("-m", "--machinename", help = "machinename to run host forensics recon on") - parser.add_argument("-g", "--get", help = "Get the Dell BIOS Verification images", action = "store_true") - parser.add_argument('-o', '--orgprofile', help = "Select your cbapi credential profile", dest = "orgprofile", default = "default") - args = parser.parse_args() - - #Create the CbD LR API object - cb = CbDefenseAPI(profile="{}".format(args.orgprofile)) - - if args.machinename: - if args.get: - live_response(cb, host=args.machinename, response="get") - else: - print ("Nothing to do...") - else: - print ("[ ! ] You must specify a machinename with a --machinename parameter. IE ./BiosVerification.py --machinename cheese") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/examples/DEPRECATED_defense/cblr/DellBiosVerification/README.md b/examples/DEPRECATED_defense/cblr/DellBiosVerification/README.md deleted file mode 100644 index 1ef0fa35..00000000 --- a/examples/DEPRECATED_defense/cblr/DellBiosVerification/README.md +++ /dev/null @@ -1,82 +0,0 @@ -# Dell BiosVerification.py Live Response API Script - -## References - -Dell Trusted Device Product Information: https://www.delltechnologies.com/endpointsecurity - -Dell Trusted Device Installation Instructions: https://www.dell.com/support/manuals/us/en/04/trusted-device/trusted_device/installation?guid=guid-b9217d4f-6932-47d2-8db5-50633eb47691&lang=en-us - -Troubleshooting: https://www.dell.com/support/manuals/us/en/04/trusted-device/trusted_device/results-troubleshooting-and-remediation?guid=guid-240f1964-167a-41b0-9fb3-687dddbdb71f&lang=en-us - - - -## Summary - -This set of tools uses the VMware Carbon Black Security Cloud Live Response APIs to retrieve -artifacts generated by the Dell Trusted Device SafeBIOS verification service. The Dell Trusted -Device agent saves BIOS image files to the filesystem when a verification failure event is -detected. - -Incident responders can use this set of scripts to retrieve the BIOS image files for forensic -analysis. - - -## Instructions - -Usage: - -To retrieve the BIOS image files from a device in a failed verification state via the Live Response API: - - -1. Copy the BiosVerification.py and dellbios.bat files to the same directory on the administrator system. -2. Install the cbapi Python bindings: https://github.com/carbonblack/cbapi-python -3. Create a Live Response API key https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/ -4. Configure credentials on the administrator system: https://cbapi.readthedocs.io/en/latest/getting-started.html -5. Run the provided BiosVerification.py utility with the following command line to target the failed system: -``` -BiosVerification.py --get --machinename -``` - -If failed BIOS image files are found the script will retrieve the image files to the local administrator system in a compressed archive named -``` --BiosImages.zip -``` - -## Example - -``` -$ ./BiosVerification.py --get --machinename "x\LT-7400" - -[ * ] Establishing LiveResponse Session with Remote Host: - - Hostname: x\LT-7400 - - OS Version: Windows 10 x64 - - Sensor Version: 3.6.0.1201 - - AntiVirus Status: ['AV_ACTIVE', 'ONDEMAND_SCAN_DISABLED'] - - Internal IP Address: 172.16.0.196 - - External IP Address: x.x.x.x - -[ * ] Uploading scripts to the remote host -[ * ] Getting the images - - -c:\program files\confer\temp>mkdir c:\tmpbios -A subdirectory or file c:\tmpbios already exists. - -c:\program files\confer\temp>del BiosImages.zip -Could Not Find c:\program files\confer\temp\BiosImages.zip - -c:\program files\confer\temp>"C:\Program Files\Dell\BiosVerification\Dell.TrustedDevice.Service.Console.exe" -exportall -export c:\tmpbios -Wrote image to c:\tmpbios\BIOSImageCaptureBVS06092020_120255.bv - -c:\program files\confer\temp>powershell.exe -ExecutionPolicy Bypass Compress-Archive -Path c:\tmpbios\*.* -DestinationPath BiosImages.zip -Force - -[ * ] Removing scripts -[ * ] Downloading images -[ * ] Writing out x\LT-7400-BiosImages.zip - -[ * ] Cleaning up - -``` - - -This script is compatible with the full VMware Carbon Black Cloud API and requires the python cbapi. \ No newline at end of file diff --git a/examples/DEPRECATED_defense/cblr/DellBiosVerification/dellbios.bat b/examples/DEPRECATED_defense/cblr/DellBiosVerification/dellbios.bat deleted file mode 100644 index b4d8e57a..00000000 --- a/examples/DEPRECATED_defense/cblr/DellBiosVerification/dellbios.bat +++ /dev/null @@ -1,4 +0,0 @@ -mkdir c:\tmpbios -del BiosImages.zip -"C:\Program Files\Dell\BiosVerification\Dell.TrustedDevice.Service.Console.exe" -exportall -export c:\tmpbios -powershell.exe -ExecutionPolicy Bypass Compress-Archive -Path c:\tmpbios\*.* -DestinationPath BiosImages.zip -Force \ No newline at end of file diff --git a/examples/DEPRECATED_defense/cblr/examplejob.py b/examples/DEPRECATED_defense/cblr/examplejob.py deleted file mode 100755 index b5bf0eec..00000000 --- a/examples/DEPRECATED_defense/cblr/examplejob.py +++ /dev/null @@ -1,10 +0,0 @@ -class GetFileJob(object): - def __init__(self, file_name): - self._file_name = file_name - - def run(self, session): - return session.get_file(self._file_name) - - -def getjob(): - return GetFileJob("c:\\test.txt") diff --git a/examples/DEPRECATED_defense/cblr/jobrunner.py b/examples/DEPRECATED_defense/cblr/jobrunner.py deleted file mode 100755 index 6ec0c062..00000000 --- a/examples/DEPRECATED_defense/cblr/jobrunner.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python - -from cbapi.defense import Device -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object -from concurrent.futures import as_completed -import sys -from datetime import datetime, timedelta - - -def main(): - parser = build_cli_parser() - parser.add_argument("--job", action="store", default="examplejob", required=True) - - args = parser.parse_args() - - cb = get_cb_defense_object(args) - - sensor_query = cb.select(Device) - - # Retrieve the list of sensors that are online - # calculate based on sensors that have checked in during the last five minutes - now = datetime.utcnow() - delta = timedelta(minutes=5) - - online_sensors = [] - offline_sensors = [] - for sensor in sensor_query: - if now - sensor.lastContact < delta: - online_sensors.append(sensor) - else: - offline_sensors.append(sensor) - - print("The following sensors are offline and will not be queried:") - for sensor in offline_sensors: - print(" {0}: {1}".format(sensor.deviceId, sensor.name)) - - print("The following sensors are online and WILL be queried:") - for sensor in online_sensors: - print(" {0}: {1}".format(sensor.deviceId, sensor.name)) - - # import our job object from the jobfile - job = __import__(args.job) - jobobject = job.getjob() - - completed_sensors = [] - futures = {} - - # collect 'future' objects for all jobs - for sensor in online_sensors: - f = cb.live_response.submit_job(jobobject.run, sensor) - futures[f] = sensor.deviceId - - # iterate over all the futures - for f in as_completed(futures.keys(), timeout=100): - if f.exception() is None: - print("Sensor {0} had result:".format(futures[f])) - print(f.result()) - completed_sensors.append(futures[f]) - else: - print("Sensor {0} had error:".format(futures[f])) - print(f.exception()) - - still_to_do = set([s.deviceId for s in online_sensors]) - set(completed_sensors) - print("The following sensors were attempted but not completed or errored out:") - for sensor in still_to_do: - print(" {0}".format(still_to_do)) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/examples/DEPRECATED_defense/cblr_cli.py b/examples/DEPRECATED_defense/cblr_cli.py deleted file mode 100644 index 5e2594e7..00000000 --- a/examples/DEPRECATED_defense/cblr_cli.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -import sys - -import logging - -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object, CblrCli -from cbapi.psc.defense import Device - -log = logging.getLogger(__name__) - - -def connect_callback(cb, line): - try: - sensor_id = int(line) - except ValueError: - sensor_id = None - - if not sensor_id: - q = cb.select(Device).where("hostNameExact:{0}".format(line)) - sensor = q.first() - else: - sensor = cb.select(Device, sensor_id) - - return sensor - - -def main(): - parser = build_cli_parser("Cb Defense Live Response CLI") - parser.add_argument("--log", help="Log activity to a file", default='') - args = parser.parse_args() - cb = get_cb_defense_object(args) - - if args.log: - file_handler = logging.FileHandler(args.log) - file_handler.setLevel(logging.DEBUG) - log.addHandler(file_handler) - - cli = CblrCli(cb, connect_callback) - cli.cmdloop() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_defense/event_export.py b/examples/DEPRECATED_defense/event_export.py deleted file mode 100644 index 55053b0a..00000000 --- a/examples/DEPRECATED_defense/event_export.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -Event Export Tool - -usage: event_export.py [-h] [--appName APPNAME] startTime endTime fileName -""" - -import requests -import argparse -import json -from datetime import datetime, timedelta - - -parser = argparse.ArgumentParser() -parser.add_argument("startTime", help="Start Time (2020-08-04T00:00:00.000Z)") -parser.add_argument("endTime", help="End Time (2020-08-05T00:00:00.000Z)") -parser.add_argument("fileName", help="The name of the json file ie. events.json") -parser.add_argument("--appName", "-a", help="The app name to limit events") -args = parser.parse_args() - -with open(args.fileName, "a") as file: - - hostname = "!!REPLACE WITH HOSTNAME!!" - - url_with_app = '{}/integrationServices/v3/event?startTime={}&endTime={}&applicationName={}&rows=10000' - url_without_app = '{}/integrationServices/v3/event?startTime={}&endTime={}&rows=10000' - - headers = {'x-auth-token': '!!REPLACE WITH API SECRET KEY!!/!!REPLACE WITH API ID!!'} # key/id - - orig_end = datetime.strptime(args.endTime, '%Y-%m-%dT%H:%M:%S.%fZ') - orig_start = datetime.strptime(args.startTime, '%Y-%m-%dT%H:%M:%S.%fZ') - start = orig_end - timedelta(days=1) - end = orig_end - triggerEnd = False - file.write('[') - - while True: - print("Next End Event Time: {}".format(end.strftime('%Y-%m-%dT%H:%M:%S.%fZ'))) - if start == orig_start: - triggerEnd = True - - if args.appName: - resp = requests.get(url_with_app.format(hostname, - start.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), - end.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), - args.appName), headers=headers) - else: - resp = requests.get(url_without_app.format(hostname, - start.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), - end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')), headers=headers) - - resp_json = resp.json() - if resp_json["success"]: - results = resp_json['results'] - - end = datetime.fromtimestamp(((results[-1]["eventTime"] + 1) / 1000)) - start = end - timedelta(days=1) - - if start < orig_start: - start = orig_start - - file.write(json.dumps(results)[1:-2]) - - if resp_json["totalResults"] >= 10000: - triggerEnd = False - elif triggerEnd or end < start: - print("Events have been exported") - file.write(']') - break - file.write(',') - - else: - breakpoint() - print("API Call Failed!") - print(resp.content) - break - file.close() diff --git a/examples/DEPRECATED_defense/list_devices.py b/examples/DEPRECATED_defense/list_devices.py deleted file mode 100644 index 1c8a5406..00000000 --- a/examples/DEPRECATED_defense/list_devices.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object -from cbapi.psc.defense import Device - - -def main(): - parser = build_cli_parser("List devices") - device_options = parser.add_mutually_exclusive_group(required=False) - device_options.add_argument("-i", "--id", type=int, help="Device ID of sensor") - device_options.add_argument("-n", "--hostname", help="Hostname") - - args = parser.parse_args() - cb = get_cb_defense_object(args) - - if args.id: - devices = [cb.select(Device, args.id)] - elif args.hostname: - devices = list(cb.select(Device).where("hostNameExact:{0}".format(args.hostname))) - else: - devices = list(cb.select(Device)) - - print("{0:9} {1:40}{2:18}{3}".format("ID", "Hostname", "IP Address", "Last Checkin Time")) - for device in devices: - print("{0:9} {1:40s}{2:18s}{3}".format(device.deviceId, device.name or "None", - device.lastInternalIpAddress or "Unknown", device.lastContact)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_defense/list_events.py b/examples/DEPRECATED_defense/list_events.py deleted file mode 100644 index 882650c0..00000000 --- a/examples/DEPRECATED_defense/list_events.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python -# Example of using cbapi to get event data -# usage: -# python list_events.py --hostname --start --end - - -# Notes on this script: -# - script can only pull up to 2 weeks of events at one time ( this is an API limitation) -# - if no data exists between the start and end range, the script will pull no data. -# - script can be run with no arguments, it will return events from all endpoints for the past 2 weeks - -import sys -import re -from datetime import datetime -from cbapi.psc.defense.models import Event -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object - - -# Function to format epoch time -def convert_time(epoch_time): - converted_time = datetime.fromtimestamp(int(epoch_time / 1000.0)).strftime(' %b %d %Y %H:%M:%S') - return converted_time - - -# Function to strip HTML from a string. -def strip_html(string): - p = re.compile(r'<.*?>') - return p.sub('', string) - - -def main(): - parser = build_cli_parser("List Events for a device") - event_options = parser.add_mutually_exclusive_group(required=False) - event_date_options = parser.add_argument_group("Date Range Arguments") - event_date_options.add_argument("--start", help="start time") - event_date_options.add_argument("--end", help="end time") - event_options.add_argument("-n", "--hostname", help="Hostname") - - args = parser.parse_args() - cb = get_cb_defense_object(args) - - if args.hostname: - events = list(cb.select(Event).where("hostNameExact:{0}".format(args.hostname))) - elif args.start and args.end: - # flipped the start and end arguments around so script can be called with the start date being - # the earliest date. it's just easier on the eyes for most folks. - - events = list(cb.select(Event).where("startTime:{0}".format(args.end))) and ( - cb.select(Event).where("endTime:{0}".format(args.start))) - else: - events = list(cb.select(Event)) - - for event in events: - # convert event and create times - event_time = str(convert_time(event.createTime)) - create_time = str(convert_time(event.eventTime)) - - # stripping HTML tags out of the long description - long_description = strip_html(event.longDescription) - - # format and print out the event time, Event ID, Creation time, Event type and Description - print("{0:^25}{1:^25}{2:^32}{3}".format("Event Time", "Event ID", "Create Time", "Event Type")) - print("{0} | {1} | {2} | {3}".format(event_time, event.eventId, create_time, event.eventType)) - print("{0:50}".format(" ")) - print("{0} {1}".format("Description: ", long_description)) - print("{0:50}".format("------------------------------------")) - print("{0:50}".format(" ")) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_defense/list_events_with_cmdline_csv.py b/examples/DEPRECATED_defense/list_events_with_cmdline_csv.py deleted file mode 100644 index ab0f13fa..00000000 --- a/examples/DEPRECATED_defense/list_events_with_cmdline_csv.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Example of using cbapi to get event data -# usage: -# python list_events.py --hostname --start --end - - -# Notes on this script: -# - based on https://github.com/carbonblack/cbapi-python/blob/master/examples/defense/list_events.py -# with 2 primary changes -# 1. this script outputs the command line of the main process process -# 2. this script places a '|' delimiter between fields so it can be read into a spreadsheet -# - can only pull up to 2 weeks of events at one time ( this is an API limitation) -# - if no data exists between the start and end range, the script will pull no data. -# - script can be run with no arguments, it will return events from all endpoints for the past 2 weeks - -import sys -import re -import unicodedata -from datetime import datetime -from cbapi.psc.defense.models import Event -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object - - -# Function to format epoch time -def convert_time(epoch_time): - converted_time = datetime.fromtimestamp(int(epoch_time / 1000.0)).strftime(' %b %d %Y %H:%M:%S') - return converted_time - - -# Function to strip HTML from a string. -def strip_html(string): - p = re.compile(r'<.*?>') - return p.sub('', string) - - -def main(): - parser = build_cli_parser("List Events for a device") - event_options = parser.add_mutually_exclusive_group(required=False) - event_date_options = parser.add_argument_group("Date Range Arguments") - event_date_options.add_argument("--start", help="start time") - event_date_options.add_argument("--end", help="end time") - event_options.add_argument("-n", "--hostname", help="Hostname") - - args = parser.parse_args() - cb = get_cb_defense_object(args) - - if args.hostname: - events = list(cb.select(Event).where("hostNameExact:{0}".format(args.hostname))) - elif args.start and args.end: - # flipped the start and end arguments around so script can be called with the start date - # being the earliest date. it's just easier on the eyes for most folks. - - events = list(cb.select(Event).where("startTime:{0}".format(args.end))) and ( - cb.select(Event).where("endTime:{0}".format(args.start))) - else: - events = list(cb.select(Event)) - - # print the column headers - print("Event Time|Event ID|Create Time|Event Type|Description|Command Line") - - for event in events: - # convert event and create times - event_time = str(convert_time(event.createTime)) - create_time = str(convert_time(event.eventTime)) - - # stripping HTML tags out of the long description - long_description = unicodedata.normalize('NFD', strip_html(event.longDescription)) - - if event.processDetails: - # stripping out the command line arguments from the processDetails field - processDetails = str(event.processDetails) - start_cmdline = processDetails.find("u'commandLine'") - end_cmdline = processDetails.find(", u'parentName'") - commandline = processDetails[start_cmdline + 18: end_cmdline - 1] - print("{0}|{1}|{2}|{3}|{4}|{5}".format(event_time, event.eventId, create_time, event.eventType, - long_description, commandline)) - else: - print("{0}|{1}|{2}|{3}|{4}".format(event_time, event.eventId, create_time, event.eventType, - long_description)) - # format and print out the event time, Event ID, Creation time, Event type and Description - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_defense/move_device.py b/examples/DEPRECATED_defense/move_device.py deleted file mode 100644 index f28289bb..00000000 --- a/examples/DEPRECATED_defense/move_device.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object -from cbapi.psc.defense import Device - - -def main(): - parser = build_cli_parser("Move a device into a new security policy") - device_options = parser.add_mutually_exclusive_group(required=True) - device_options.add_argument("-i", "--id", type=int, help="Device ID of sensor to move") - device_options.add_argument("-n", "--hostname", help="Hostname to move") - - policy_options = parser.add_mutually_exclusive_group(required=True) - policy_options.add_argument("--policyid", type=int, help="Policy ID") - policy_options.add_argument("--policyname", help="Policy name") - - args = parser.parse_args() - cb = get_cb_defense_object(args) - - if args.id: - devices = [cb.select(Device, args.id)] - else: - devices = list(cb.select(Device).where("hostNameExact:{0}".format(args.hostname))) - - for device in devices: - if args.policyid: - destpolicy = int(args.policyid) - device.policyId = int(args.policyid) - else: - destpolicy = args.policyname - device.policyName = args.policyname - - device.save() - print("Moved device id {0} (hostname {1}) into policy {2}".format(device.deviceId, device.name, destpolicy)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_defense/notifications.py b/examples/DEPRECATED_defense/notifications.py deleted file mode 100644 index e414b05f..00000000 --- a/examples/DEPRECATED_defense/notifications.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object -import json - - -def main(): - parser = build_cli_parser("Listen to real-time notifications") - parser.add_argument("-s", type=int, help="# of seconds to sleep between polls", default=30) - - args = parser.parse_args() - cb = get_cb_defense_object(args) - - while True: - for notification in cb.notification_listener(args.s): - print(json.dumps(notification, indent=2)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_defense/policy_operations.py b/examples/DEPRECATED_defense/policy_operations.py deleted file mode 100644 index 6f5b2199..00000000 --- a/examples/DEPRECATED_defense/policy_operations.py +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/env python -# - -import sys - -import json -import logging - -from cbapi.errors import ServerError -from cbapi.example_helpers import build_cli_parser, get_cb_defense_object -from cbapi.psc.defense import Policy - -log = logging.getLogger(__name__) - - -def get_policy_by_name_or_id(cb, policy_id=None, name=None, return_all_if_none=False): - policies = [] - - try: - if policy_id: - if isinstance(policy_id, list): - attempted_to_find = "IDs of {0}".format(", ".join([str(pid) for pid in policy_id])) - policies = [p for p in cb.select(Policy) if p.id in policy_id] - else: - attempted_to_find = "ID of {0}".format(policy_id) - policies = [cb.select(Policy, policy_id, force_init=True)] - elif name: - if isinstance(name, list): - attempted_to_find = "names of {0}".format(", ".join(name)) - policies = [p for p in cb.select(Policy) if p.name in name] - else: - attempted_to_find = "name {0}".format(name) - policies = [p for p in cb.select(Policy) if p.name == name] - elif return_all_if_none: - attempted_to_find = "all policies" - policies = list(cb.select(Policy)) - except Exception as e: - print("Could not find policy with {0}: {1}".format(attempted_to_find, str(e))) - - return policies - - -def list_policies(cb, parser, args): - for p in cb.select(Policy): - print(u"Policy id {0}: {1} {2}".format(p.id, p.name, "({0})".format(p.description) if p.description else "")) - print("Rules:") - for r in p.rules.values(): - print(" {0}: {1} when {2} {3} is {4}".format(r.get('id'), r.get("action"), - r.get("application", {}).get("type"), - r.get("application", {}).get("value"), r.get("operation"))) - - -def import_policy(cb, parser, args): - p = cb.create(Policy) - - p.policy = json.load(open(args.policyfile, "r")) - p.description = args.description - p.name = args.name - p.priorityLevel = args.prioritylevel - p.version = 2 - - try: - p.save() - except ServerError as se: - print("Could not add policy: {0}".format(str(se))) - except Exception as e: - print("Could not add policy: {0}".format(str(e))) - else: - print("Added policy. New policy ID is {0}".format(p.id)) - - -def delete_policy(cb, parser, args): - policies = get_policy_by_name_or_id(cb, args.id, args.name) - if len(policies) == 0: - return - - num_matching_policies = len(policies) - if num_matching_policies > 1 and not args.force: - print("{0:d} policies match and --force not specified. No action taken.".format(num_matching_policies)) - return - - for p in policies: - try: - p.delete() - except Exception as e: - print("Could not delete policy: {0}".format(str(e))) - else: - print("Deleted policy id {0} with name {1}".format(p.id, p.name)) - - -def export_policy(cb, parser, args): - policies = get_policy_by_name_or_id(cb, args.id, args.name, return_all_if_none=True) - - for p in policies: - json.dump(p.policy, open("policy-{0}.json".format(p.id), "w"), indent=2) - print("Wrote policy {0} {1} to file policy-{0}.json".format(p.id, p.name)) - - -def add_rule(cb, parser, args): - policies = get_policy_by_name_or_id(cb, args.id, args.name) - - num_matching_policies = len(policies) - if num_matching_policies < 1: - print("No policies match. No action taken.".format(num_matching_policies)) - - for policy in policies: - policy.add_rule(json.load(open(args.rulefile, "r"))) - print("Added rule from {0} to policy {1}.".format(args.rulefile, policy.name)) - - -def del_rule(cb, parser, args): - policies = get_policy_by_name_or_id(cb, args.id, args.name) - - num_matching_policies = len(policies) - if num_matching_policies != 1: - print("{0:d} policies match. No action taken.".format(num_matching_policies)) - - policy = policies[0] - policy.delete_rule(args.ruleid) - - print("Removed rule id {0} from policy {1}.".format(args.ruleid, policy.name)) - - -def replace_rule(cb, parser, args): - policies = get_policy_by_name_or_id(cb, args.id, args.name) - - num_matching_policies = len(policies) - if num_matching_policies != 1: - print("{0:d} policies match. No action taken.".format(num_matching_policies)) - - policy = policies[0] - policy.replace_rule(args.ruleid, json.load(open(args.rulefile, "r"))) - - print("Replaced rule id {0} from policy {1} with rule from file {2}.".format(args.ruleid, policy.name, - args.rulefile)) - - -def main(): - parser = build_cli_parser("Policy operations") - commands = parser.add_subparsers(help="Policy commands", dest="command_name") - - commands.add_parser("list", help="List all configured policies") - - import_policy_command = commands.add_parser("import", help="Import policy from JSON file") - import_policy_command.add_argument("-N", "--name", help="Name of new policy", required=True) - import_policy_command.add_argument("-d", "--description", help="Description of new policy", required=True) - import_policy_command.add_argument("-p", "--prioritylevel", help="Priority level (HIGH, MEDIUM, LOW)", - default="LOW") - import_policy_command.add_argument("-f", "--policyfile", help="Filename containing the JSON policy description", - required=True) - - export_policy_command = commands.add_parser("export", help="Export policy to JSON file") - export_policy_specifier = export_policy_command.add_mutually_exclusive_group(required=False) - export_policy_specifier.add_argument("-i", "--id", type=int, help="ID of policy") - export_policy_specifier.add_argument("-N", "--name", help="Name of policy") - - del_command = commands.add_parser("delete", help="Delete policies") - del_policy_specifier = del_command.add_mutually_exclusive_group(required=True) - del_policy_specifier.add_argument("-i", "--id", type=int, help="ID of policy to delete") - del_policy_specifier.add_argument("-N", "--name", help="Name of policy to delete. Specify --force to delete" - " multiple policies that have the same name") - del_command.add_argument("--force", help="If NAME matches multiple policies, delete all matching policies", - action="store_true", default=False) - - add_rule_command = commands.add_parser("add-rule", help="Add rule to existing policy from JSON rule file") - add_rule_specifier = add_rule_command.add_mutually_exclusive_group(required=True) - add_rule_specifier.add_argument("-i", "--id", type=int, help="ID of policy (can specify multiple)", - action="append", metavar="POLICYID") - add_rule_specifier.add_argument("-N", "--name", help="Name of policy (can specify multiple)", - action="append", metavar="POLICYNAME") - add_rule_command.add_argument("-f", "--rulefile", help="Filename containing the JSON rule", required=True) - - del_rule_command = commands.add_parser("del-rule", help="Delete rule from existing policy") - del_rule_specifier = del_rule_command.add_mutually_exclusive_group(required=True) - del_rule_specifier.add_argument("-i", "--id", type=int, help="ID of policy") - del_rule_specifier.add_argument("-N", "--name", help="Name of policy") - del_rule_command.add_argument("-r", "--ruleid", type=int, help="ID of rule", required=True) - - replace_rule_command = commands.add_parser("replace-rule", help="Replace existing rule with a new one") - replace_rule_specifier = replace_rule_command.add_mutually_exclusive_group(required=True) - replace_rule_specifier.add_argument("-i", "--id", type=int, help="ID of policy") - replace_rule_specifier.add_argument("-N", "--name", help="Name of policy") - replace_rule_command.add_argument("-r", "--ruleid", type=int, help="ID of rule", required=True) - replace_rule_command.add_argument("-f", "--rulefile", help="Filename containing the JSON rule", required=True) - - args = parser.parse_args() - cb = get_cb_defense_object(args) - - if args.command_name == "list": - return list_policies(cb, parser, args) - elif args.command_name == "import": - return import_policy(cb, parser, args) - elif args.command_name == "export": - return export_policy(cb, parser, args) - elif args.command_name == "delete": - return delete_policy(cb, parser, args) - elif args.command_name == "add-rule": - return add_rule(cb, parser, args) - elif args.command_name == "del-rule": - return del_rule(cb, parser, args) - elif args.command_name == "replace-rule": - return replace_rule(cb, parser, args) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/alert_search_suggestions.py b/examples/DEPRECATED_psc/alert_search_suggestions.py deleted file mode 100755 index cd871e19..00000000 --- a/examples/DEPRECATED_psc/alert_search_suggestions.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object - - -def main(): - parser = build_cli_parser("Get suggestions for searching alerts") - parser.add_argument("-q", "--query", default="", help="Query string for looking for alerts") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - suggestions = cb.alert_search_suggestions(args.query) - for suggestion in suggestions: - print("Search term: '{0}'".format(suggestion["term"])) - print("\tWeight: {0}".format(suggestion["weight"])) - print("\tAvailable with products: {0}".format(", ".join(suggestion["required_skus_some"]))) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/bulk_update_alerts.py b/examples/DEPRECATED_psc/bulk_update_alerts.py deleted file mode 100755 index 2c3ec81c..00000000 --- a/examples/DEPRECATED_psc/bulk_update_alerts.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python - -import sys -from time import sleep -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import BaseAlert, WorkflowStatus -from helpers.alertsv6 import setup_parser_with_basic_criteria, load_basic_criteria - - -def main(): - parser = build_cli_parser("Bulk update the status of alerts") - setup_parser_with_basic_criteria(parser) - parser.add_argument("-R", "--remediation", help="Remediation message to store for the selected alerts") - parser.add_argument("-C", "--comment", help="Comment message to store for the selected alerts") - operation = parser.add_mutually_exclusive_group(required=True) - operation.add_argument("--dismiss", action="store_true", help="Dismiss all selected alerts") - operation.add_argument("--undismiss", action="store_true", help="Undismiss all selected alerts") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(BaseAlert) - load_basic_criteria(query, args) - - if args.dismiss: - reqid = query.dismiss(args.remediation, args.comment) - elif args.undismiss: - reqid = query.update(args.remediation, args.comment) - else: - raise NotImplementedError("one of --dismiss or --undismiss must be specified") - - print("Submitted query with ID {0}".format(reqid)) - statobj = cb.select(WorkflowStatus, reqid) - while not statobj.finished: - print("Waiting...") - sleep(1) - if statobj.errors: - print("Errors encountered:") - for err in statobj.errors: - print("\t{0}".format(err)) - if statobj.failed_ids: - print("Failed alert IDs:") - for i in statobj.failed_ids: - print("\t{0}".format(err)) - print("{0} total alert(s) found, of which {1} were successfully changed" - .format(statobj.num_hits, statobj.num_success)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/bulk_update_cbanalytics_alerts.py b/examples/DEPRECATED_psc/bulk_update_cbanalytics_alerts.py deleted file mode 100755 index 147558c5..00000000 --- a/examples/DEPRECATED_psc/bulk_update_cbanalytics_alerts.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python - -import sys -from time import sleep -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import CBAnalyticsAlert, WorkflowStatus -from helpers.alertsv6 import setup_parser_with_cbanalytics_criteria, load_cbanalytics_criteria - - -def main(): - parser = build_cli_parser("Bulk update the status of CB Analytics alerts") - setup_parser_with_cbanalytics_criteria(parser) - parser.add_argument("-R", "--remediation", help="Remediation message to store for the selected alerts") - parser.add_argument("-C", "--comment", help="Comment message to store for the selected alerts") - operation = parser.add_mutually_exclusive_group(required=True) - operation.add_argument("--dismiss", action="store_true", help="Dismiss all selected alerts") - operation.add_argument("--undismiss", action="store_true", help="Undismiss all selected alerts") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(CBAnalyticsAlert) - load_cbanalytics_criteria(query, args) - - if args.dismiss: - reqid = query.dismiss(args.remediation, args.comment) - elif args.undismiss: - reqid = query.update(args.remediation, args.comment) - else: - raise NotImplementedError("one of --dismiss or --undismiss must be specified") - - print("Submitted query with ID {0}".format(reqid)) - statobj = cb.select(WorkflowStatus, reqid) - while not statobj.finished: - print("Waiting...") - sleep(1) - if statobj.errors: - print("Errors encountered:") - for err in statobj.errors: - print("\t{0}".format(err)) - if statobj.failed_ids: - print("Failed alert IDs:") - for i in statobj.failed_ids: - print("\t{0}".format(err)) - print("{0} total alert(s) found, of which {1} were successfully changed" - .format(statobj.num_hits, statobj.num_success)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/bulk_update_threat_alerts.py b/examples/DEPRECATED_psc/bulk_update_threat_alerts.py deleted file mode 100755 index b3922390..00000000 --- a/examples/DEPRECATED_psc/bulk_update_threat_alerts.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -import sys -from time import sleep -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import WorkflowStatus - - -def main(): - parser = build_cli_parser("Bulk update the status of alerts by threat ID") - parser.add_argument("-T", "--threatid", action="append", type=str, required=True, - help="Threat IDs to update the alerts for") - parser.add_argument("-R", "--remediation", help="Remediation message to store for the selected alerts") - parser.add_argument("-C", "--comment", help="Comment message to store for the selected alerts") - operation = parser.add_mutually_exclusive_group(required=True) - operation.add_argument("--dismiss", action="store_true", help="Dismiss all selected alerts") - operation.add_argument("--undismiss", action="store_true", help="Undismiss all selected alerts") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - if args.dismiss: - reqid = cb.bulk_threat_dismiss(args.threatid, args.remediation, args.comment) - elif args.undismiss: - reqid = cb.bulk_threat_update(args.threatid, args.remediation, args.comment) - else: - raise NotImplementedError("one of --dismiss or --undismiss must be specified") - - print("Submitted query with ID {0}".format(reqid)) - statobj = cb.select(WorkflowStatus, reqid) - while not statobj.finished: - print("Waiting...") - sleep(1) - if statobj.errors: - print("Errors encountered:") - for err in statobj.errors: - print("\t{0}".format(err)) - if statobj.failed_ids: - print("Failed alert IDs:") - for i in statobj.failed_ids: - print("\t{0}".format(err)) - print("{0} total alert(s) found, of which {1} were successfully changed" - .format(statobj.num_hits, statobj.num_success)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/bulk_update_vmware_alerts.py b/examples/DEPRECATED_psc/bulk_update_vmware_alerts.py deleted file mode 100755 index a1cb0b58..00000000 --- a/examples/DEPRECATED_psc/bulk_update_vmware_alerts.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python - -import sys -from time import sleep -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import VMwareAlert, WorkflowStatus -from helpers.alertsv6 import setup_parser_with_vmware_criteria, load_vmware_criteria - - -def main(): - parser = build_cli_parser("Bulk update the status of VMware alerts") - setup_parser_with_vmware_criteria(parser) - parser.add_argument("-R", "--remediation", help="Remediation message to store for the selected alerts") - parser.add_argument("-C", "--comment", help="Comment message to store for the selected alerts") - operation = parser.add_mutually_exclusive_group(required=True) - operation.add_argument("--dismiss", action="store_true", help="Dismiss all selected alerts") - operation.add_argument("--undismiss", action="store_true", help="Undismiss all selected alerts") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(VMwareAlert) - load_vmware_criteria(query, args) - - if args.dismiss: - reqid = query.dismiss(args.remediation, args.comment) - elif args.undismiss: - reqid = query.update(args.remediation, args.comment) - else: - raise NotImplementedError("one of --dismiss or --undismiss must be specified") - - print("Submitted query with ID {0}".format(reqid)) - statobj = cb.select(WorkflowStatus, reqid) - while not statobj.finished: - print("Waiting...") - sleep(1) - if statobj.errors: - print("Errors encountered:") - for err in statobj.errors: - print("\t{0}".format(err)) - if statobj.failed_ids: - print("Failed alert IDs:") - for i in statobj.failed_ids: - print("\t{0}".format(err)) - print("{0} total alert(s) found, of which {1} were successfully changed" - .format(statobj.num_hits, statobj.num_success)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/bulk_update_watchlist_alerts.py b/examples/DEPRECATED_psc/bulk_update_watchlist_alerts.py deleted file mode 100755 index bef036c5..00000000 --- a/examples/DEPRECATED_psc/bulk_update_watchlist_alerts.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python - -import sys -from time import sleep -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import WatchlistAlert, WorkflowStatus -from helpers.alertsv6 import setup_parser_with_watchlist_criteria, load_watchlist_criteria - - -def main(): - parser = build_cli_parser("Bulk update the status of watchlist alerts") - setup_parser_with_watchlist_criteria(parser) - parser.add_argument("-R", "--remediation", help="Remediation message to store for the selected alerts") - parser.add_argument("-C", "--comment", help="Comment message to store for the selected alerts") - operation = parser.add_mutually_exclusive_group(required=True) - operation.add_argument("--dismiss", action="store_true", help="Dismiss all selected alerts") - operation.add_argument("--undismiss", action="store_true", help="Undismiss all selected alerts") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(WatchlistAlert) - load_watchlist_criteria(query, args) - - if args.dismiss: - reqid = query.dismiss(args.remediation, args.comment) - elif args.undismiss: - reqid = query.update(args.remediation, args.comment) - else: - raise NotImplementedError("one of --dismiss or --undismiss must be specified") - - print("Submitted query with ID {0}".format(reqid)) - statobj = cb.select(WorkflowStatus, reqid) - while not statobj.finished: - print("Waiting...") - sleep(1) - if statobj.errors: - print("Errors encountered:") - for err in statobj.errors: - print("\t{0}".format(err)) - if statobj.failed_ids: - print("Failed alert IDs:") - for i in statobj.failed_ids: - print("\t{0}".format(err)) - print("{0} total alert(s) found, of which {1} were successfully changed" - .format(statobj.num_hits, statobj.num_success)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/device_control.py b/examples/DEPRECATED_psc/device_control.py deleted file mode 100755 index bb07228a..00000000 --- a/examples/DEPRECATED_psc/device_control.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc import Device - - -def toggle_value(args): - if args.on: - return True - if args.off: - return False - raise Exception("Unknown toggle value") - - -def main(): - parser = build_cli_parser("Send control messages to device") - parser.add_argument("-d", "--device_id", type=int, required=True, help="The ID of the device to be controlled") - subparsers = parser.add_subparsers(dest="command", help="Device command help") - - bgscan_p = subparsers.add_parser("background_scan", help="Set background scanning status") - toggle = bgscan_p.add_mutually_exclusive_group(required=True) - toggle.add_argument("--on", action="store_true", help="Turn background scanning on") - toggle.add_argument("--off", action="store_true", help="Turn background scanning off") - - bypass_p = subparsers.add_parser("bypass", help="Set bypass mode") - toggle = bypass_p.add_mutually_exclusive_group(required=True) - toggle.add_argument("--on", action="store_true", help="Enable bypass mode") - toggle.add_argument("--off", action="store_true", help="Disable bypass mode") - - subparsers.add_parser("delete", help="Delete sensor") - subparsers.add_parser("uninstall", help="Uninstall sensor") - - quarantine_p = subparsers.add_parser("quarantine", help="Set quarantine mode") - toggle = quarantine_p.add_mutually_exclusive_group(required=True) - toggle.add_argument("--on", action="store_true", help="Enable quarantine mode") - toggle.add_argument("--off", action="store_true", help="Disable quarantine mode") - - policy_p = subparsers.add_parser("policy", help="Update policy for node") - policy_p.add_argument("-p", "--policy_id", type=int, required=True, help="New policy ID to set for node") - - sensorv_p = subparsers.add_parser("sensor_version", help="Update sensor version for node") - sensorv_p.add_argument("-o", "--os", required=True, help="Operating system for sensor") - sensorv_p.add_argument("-V", "--version", required=True, help="Version number of sensor") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - dev = cb.select(Device, args.device_id) - - if args.command: - if args.command == "background_scan": - dev.background_scan(toggle_value(args)) - elif args.command == "bypass": - dev.bypass(toggle_value(args)) - elif args.command == "delete": - dev.delete_sensor() - elif args.command == "uninstall": - dev.uninstall_sensor() - elif args.command == "quarantine": - dev.quarantine(toggle_value(args)) - elif args.command == "policy": - dev.update_policy(args.policy_id) - elif args.command == "sensor_version": - dev.update_sensor_version({args.os: args.version}) - else: - raise NotImplementedError("Unknown command") - print("OK") - else: - print(dev) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/download_device_list.py b/examples/DEPRECATED_psc/download_device_list.py deleted file mode 100755 index 25be3a43..00000000 --- a/examples/DEPRECATED_psc/download_device_list.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc import Device - -import logging -logging.basicConfig(level=logging.DEBUG) - - -def main(): - parser = build_cli_parser("Download device list in CSV format") - parser.add_argument("-q", "--query", help="Query string for looking for devices") - parser.add_argument("-A", "--ad_group_id", action="append", type=int, help="Active Directory Group ID") - parser.add_argument("-p", "--policy_id", action="append", type=int, help="Policy ID") - parser.add_argument("-s", "--status", action="append", help="Status of device") - parser.add_argument("-P", "--priority", action="append", help="Target priority of device") - parser.add_argument("-S", "--sort_by", help="Field to sort the output by") - parser.add_argument("-R", "--reverse", action="store_true", help="Reverse order of sort") - parser.add_argument("-O", "--output", help="File to save output to (default stdout)") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(Device) - if args.query: - query = query.where(args.query) - if args.ad_group_id: - query = query.set_ad_group_ids(args.ad_group_id) - if args.policy_id: - query = query.set_policy_ids(args.policy_id) - if args.status: - query = query.set_status(args.status) - if args.priority: - query = query.set_target_priorities(args.priority) - if args.sort_by: - direction = "DESC" if args.reverse else "ASC" - query = query.sort_by(args.sort_by, direction) - - data = query.download() - if args.output: - file = open(args.output, "w") - file.write(data) - file.close() - else: - print(data) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/helpers/alertsv6.py b/examples/DEPRECATED_psc/helpers/alertsv6.py deleted file mode 100755 index 3d789669..00000000 --- a/examples/DEPRECATED_psc/helpers/alertsv6.py +++ /dev/null @@ -1,159 +0,0 @@ -def setup_parser_with_basic_criteria(parser): - parser.add_argument("-q", "--query", help="Query string for looking for alerts") - parser.add_argument("--category", action="append", choices=["THREAT", "MONITORED", "INFO", - "MINOR", "SERIOUS", "CRITICAL"], - help="Restrict search to the specified categories") - parser.add_argument("--deviceid", action="append", type=int, help="Restrict search to the specified device IDs") - parser.add_argument("--devicename", action="append", type=str, help="Restrict search to the specified device names") - parser.add_argument("--os", action="append", choices=["WINDOWS", "ANDROID", "MAC", "IOS", "LINUX", "OTHER"], - help="Restrict search to the specified device operating systems") - parser.add_argument("--osversion", action="append", type=str, - help="Restrict search to the specified device operating system versions") - parser.add_argument("--username", action="append", type=str, help="Restrict search to the specified user names") - parser.add_argument("--group", action="store_true", help="Group results") - parser.add_argument("--alertid", action="append", type=str, help="Restrict search to the specified alert IDs") - parser.add_argument("--legacyalertid", action="append", type=str, - help="Restrict search to the specified legacy alert IDs") - parser.add_argument("--severity", type=int, help="Restrict search to the specified minimum severity level") - parser.add_argument("--policyid", action="append", type=int, help="Restrict search to the specified policy IDs") - parser.add_argument("--policyname", action="append", type=str, help="Restrict search to the specified policy names") - parser.add_argument("--processname", action="append", type=str, - help="Restrict search to the specified process names") - parser.add_argument("--processhash", action="append", type=str, - help="Restrict search to the specified process SHA-256 hash values") - parser.add_argument("--reputation", action="append", choices=["KNOWN_MALWARE", "SUSPECT_MALWARE", "PUP", - "NOT_LISTED", "ADAPTIVE_WHITE_LIST", - "COMMON_WHITE_LIST", "TRUSTED_WHITE_LIST", - "COMPANY_BLACK_LIST"], - help="Restrict search to the specified reputation values") - parser.add_argument("--tag", action="append", type=str, help="Restrict search to the specified tag values") - parser.add_argument("--priority", action="append", choices=["LOW", "MEDIUM", "HIGH", "MISSION_CRITICAL"], - help="Restrict search to the specified priority values") - parser.add_argument("--threatid", action="append", type=str, help="Restrict search to the specified threat IDs") - parser.add_argument("--type", action="append", choices=["CB_ANALYTICS", "VMWARE", "WATCHLIST"], - help="Restrict search to the specified alert types") - parser.add_argument("--workflow", action="append", choices=["OPEN", "DISMISSED"], - help="Restrict search to the specified workflow statuses") - - -def setup_parser_with_cbanalytics_criteria(parser): - setup_parser_with_basic_criteria(parser) - parser.add_argument("--blockedthreat", action="append", choices=["UNKNOWN", "NON_MALWARE", "NEW_MALWARE", - "KNOWN_MALWARE", "RISKY_PROGRAM"], - help="Restrict search to the specified threat categories that were blocked") - parser.add_argument("--location", action="append", choices=["ONSITE", "OFFSITE", "UNKNOWN"], - help="Restrict search to the specified device locations") - parser.add_argument("--killchain", action="append", choices=["RECONNAISSANCE", "WEAPONIZE", "DELIVER_EXPLOIT", - "INSTALL_RUN", "COMMAND_AND_CONTROL", "EXECUTE_GOAL", - "BREACH"], - help="Restrict search to the specified kill chain status values") - parser.add_argument("--notblockedthreat", action="append", choices=["UNKNOWN", "NON_MALWARE", "NEW_MALWARE", - "KNOWN_MALWARE", "RISKY_PROGRAM"], - help="Restrict search to the specified threat categories that were NOT blocked") - parser.add_argument("--policyapplied", action="append", choices=["APPLIED", "NOT_APPLIED"], - help="Restrict search to the specified policy-application status values") - parser.add_argument("--reason", action="append", type=str, help="Restrict search to the specified reason codes") - parser.add_argument("--runstate", action="append", choices=["DID_NOT_RUN", "RAN", "UNKNOWN"], - help="Restrict search to the specified run states") - parser.add_argument("--sensoraction", action="append", choices=["POLICY_NOT_APPLIED", "ALLOW", "ALLOW_AND_LOG", - "TERMINATE", "DENY"], - help="Restrict search to the specified sensor actions") - parser.add_argument("--vector", action="append", choices=["EMAIL", "WEB", "GENERIC_SERVER", "GENERIC_CLIENT", - "REMOTE_DRIVE", "REMOVABLE_MEDIA", "UNKNOWN", - "APP_STORE", "THIRD_PARTY"], - help="Restrict search to the specified threat cause vectors") - - -def setup_parser_with_vmware_criteria(parser): - setup_parser_with_basic_criteria(parser) - parser.add_argument("--groupid", action="append", type=int, - help="Restrict search to the specified AppDefense alarm group IDs") - - -def setup_parser_with_watchlist_criteria(parser): - setup_parser_with_basic_criteria(parser) - parser.add_argument("--watchlistid", action="append", type=str, - help="Restrict search to the specified watchlists by ID") - parser.add_argument("--watchlistname", action="append", type=str, - help="Restrict search to the specified watchlists by name") - - -def load_basic_criteria(query, args): - if args.query: - query = query.where(args.query) - if args.category: - query = query.set_categories(args.category) - if args.deviceid: - query = query.set_device_ids(args.deviceid) - if args.devicename: - query = query.set_device_names(args.devicename) - if args.os: - query = query.set_device_os(args.os) - if args.osversion: - query = query.set_device_os_versions(args.osversion) - if args.username: - query = query.set_device_username(args.username) - if args.group: - query = query.set_group_results(True) - if args.alertid: - query = query.set_alert_ids(args.alertid) - if args.legacyalertid: - query = query.set_legacy_alert_ids(args.legacyalertid) - if args.severity: - query = query.set_minimum_severity(args.severity) - if args.policyid: - query = query.set_policy_ids(args.policyid) - if args.policyname: - query = query.set_policy_names(args.policyname) - if args.processname: - query = query.set_process_names(args.processname) - if args.processhash: - query = query.set_process_sha256(args.processhash) - if args.reputation: - query = query.set_reputations(args.reputation) - if args.tag: - query = query.set_tags(args.tag) - if args.priority: - query = query.set_target_priorities(args.priority) - if args.threatid: - query = query.set_threat_ids(args.threatid) - if args.type: - query = query.set_types(args.type) - if args.workflow: - query = query.set_workflows(args.workflow) - - -def load_cbanalytics_criteria(query, args): - load_basic_criteria(query, args) - if args.blockedthreat: - query = query.set_blocked_threat_categories(args.blockedthreat) - if args.location: - query = query.set_device_locations(args.location) - if args.killchain: - query = query.set_kill_chain_statuses(args.killchain) - if args.notblockedthreat: - query = query.set_not_blocked_threat_categories(args.notblockedthreat) - if args.policyapplied: - query = query.set_policy_applied(args.policyapplied) - if args.reason: - query = query.set_reason_code(args.reason) - if args.runstate: - query = query.set_run_states(args.runstate) - if args.sensoraction: - query = query.set_sensor_actions(args.sensoraction) - if args.vector: - query = query.set_threat_cause_vectors(args.vector) - - -def load_vmware_criteria(query, args): - load_basic_criteria(query, args) - if args.groupid: - query = query.set_group_ids(args.groupid) - - -def load_watchlist_criteria(query, args): - load_basic_criteria(query, args) - if args.watchlistid: - query = query.set_watchlist_ids(args.watchlistid) - if args.watchlistname: - query = query.set_watchlist_names(args.watchlistname) diff --git a/examples/DEPRECATED_psc/list_alert_facets.py b/examples/DEPRECATED_psc/list_alert_facets.py deleted file mode 100755 index 957e92da..00000000 --- a/examples/DEPRECATED_psc/list_alert_facets.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import BaseAlert -from helpers.alertsv6 import setup_parser_with_basic_criteria, load_basic_criteria - - -def main(): - parser = build_cli_parser("List alert facets") - setup_parser_with_basic_criteria(parser) - parser.add_argument("-F", "--facet", action="append", choices=["ALERT_TYPE", "CATEGORY", "REPUTATION", "WORKFLOW", - "TAG", "POLICY_ID", "POLICY_NAME", "DEVICE_ID", - "DEVICE_NAME", "APPLICATION_HASH", - "APPLICATION_NAME", "STATUS", "RUN_STATE", - "POLICY_APPLIED_STATE", "POLICY_APPLIED", - "SENSOR_ACTION"], - required=True, help="Retrieve these fields as facet information") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(BaseAlert) - load_basic_criteria(query, args) - - facetinfos = query.facets(args.facet) - for facetinfo in facetinfos: - print("For field '{0}':".format(facetinfo["field"])) - for facetval in facetinfo["values"]: - print("\tValue {0}: {1} occurrences".format(facetval["id"], facetval["total"])) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_alerts.py b/examples/DEPRECATED_psc/list_alerts.py deleted file mode 100755 index ff2fd079..00000000 --- a/examples/DEPRECATED_psc/list_alerts.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import BaseAlert -from helpers.alertsv6 import setup_parser_with_basic_criteria, load_basic_criteria - - -def main(): - parser = build_cli_parser("List alerts") - setup_parser_with_basic_criteria(parser) - parser.add_argument("-S", "--sort_by", help="Field to sort the output by") - parser.add_argument("-R", "--reverse", action="store_true", help="Reverse order of sort") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(BaseAlert) - load_basic_criteria(query, args) - if args.sort_by: - direction = "DESC" if args.reverse else "ASC" - query = query.sort_by(args.sort_by, direction) - - alerts = list(query) - print("{0:40} {1:40s} {2:40s} {3}".format("ID", "Hostname", "Threat ID", "Last Updated")) - for alert in alerts: - print("{0:40} {1:40s} {2:40s} {3}".format(alert.id, alert.device_name or "None", - alert.threat_id or "Unknown", - alert.last_update_time)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_cbanalytics_alert_facets.py b/examples/DEPRECATED_psc/list_cbanalytics_alert_facets.py deleted file mode 100755 index d654671b..00000000 --- a/examples/DEPRECATED_psc/list_cbanalytics_alert_facets.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import CBAnalyticsAlert -from helpers.alertsv6 import setup_parser_with_cbanalytics_criteria, load_cbanalytics_criteria - - -def main(): - parser = build_cli_parser("List CB Analytics alert facets") - setup_parser_with_cbanalytics_criteria(parser) - parser.add_argument("-F", "--facet", action="append", choices=["ALERT_TYPE", "CATEGORY", "REPUTATION", "WORKFLOW", - "TAG", "POLICY_ID", "POLICY_NAME", "DEVICE_ID", - "DEVICE_NAME", "APPLICATION_HASH", - "APPLICATION_NAME", "STATUS", "RUN_STATE", - "POLICY_APPLIED_STATE", "POLICY_APPLIED", - "SENSOR_ACTION"], - required=True, help="Retrieve these fields as facet information") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(CBAnalyticsAlert) - load_cbanalytics_criteria(query, args) - - facetinfos = query.facets(args.facet) - for facetinfo in facetinfos: - print("For field '{0}':".format(facetinfo["field"])) - for facetval in facetinfo["values"]: - print("\tValue {0}: {1} occurrences".format(facetval["id"], facetval["total"])) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_cbanalytics_alerts.py b/examples/DEPRECATED_psc/list_cbanalytics_alerts.py deleted file mode 100755 index a45c62d8..00000000 --- a/examples/DEPRECATED_psc/list_cbanalytics_alerts.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import CBAnalyticsAlert -from helpers.alertsv6 import setup_parser_with_cbanalytics_criteria, load_cbanalytics_criteria - - -def main(): - parser = build_cli_parser("List CB Analytics alerts") - setup_parser_with_cbanalytics_criteria(parser) - parser.add_argument("-S", "--sort_by", help="Field to sort the output by") - parser.add_argument("-R", "--reverse", action="store_true", help="Reverse order of sort") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(CBAnalyticsAlert) - load_cbanalytics_criteria(query, args) - if args.sort_by: - direction = "DESC" if args.reverse else "ASC" - query = query.sort_by(args.sort_by, direction) - - alerts = list(query) - print("{0:40} {1:40s} {2:40s} {3}".format("ID", "Hostname", "Threat ID", "Last Updated")) - for alert in alerts: - print("{0:40} {1:40s} {2:40s} {3}".format(alert.id, alert.device_name or "None", - alert.threat_id or "Unknown", - alert.last_update_time)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_devices.py b/examples/DEPRECATED_psc/list_devices.py deleted file mode 100755 index 24a5bb18..00000000 --- a/examples/DEPRECATED_psc/list_devices.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc import Device - - -def main(): - parser = build_cli_parser("List devices") - parser.add_argument("-q", "--query", help="Query string for looking for devices") - parser.add_argument("-A", "--ad_group_id", action="append", type=int, help="Active Directory Group ID") - parser.add_argument("-p", "--policy_id", action="append", type=int, help="Policy ID") - parser.add_argument("-s", "--status", action="append", help="Status of device") - parser.add_argument("-P", "--priority", action="append", help="Target priority of device") - parser.add_argument("-S", "--sort_by", help="Field to sort the output by") - parser.add_argument("-R", "--reverse", action="store_true", help="Reverse order of sort") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(Device) - if args.query: - query = query.where(args.query) - if args.ad_group_id: - query = query.set_ad_group_ids(args.ad_group_id) - if args.policy_id: - query = query.set_policy_ids(args.policy_id) - if args.status: - query = query.set_status(args.status) - if args.priority: - query = query.set_target_priorities(args.priority) - if args.sort_by: - direction = "DESC" if args.reverse else "ASC" - query = query.sort_by(args.sort_by, direction) - - devices = list(query) - print("{0:9} {1:40}{2:18}{3}".format("ID", "Hostname", "IP Address", "Last Checkin Time")) - for device in devices: - print("{0:9} {1:40s}{2:18s}{3}".format(device.id, device.name or "None", - device.last_internal_ip_address or "Unknown", - device.last_contact_time)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_vmware_alert_facets.py b/examples/DEPRECATED_psc/list_vmware_alert_facets.py deleted file mode 100755 index c8420037..00000000 --- a/examples/DEPRECATED_psc/list_vmware_alert_facets.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import VMwareAlert -from helpers.alertsv6 import setup_parser_with_vmware_criteria, load_vmware_criteria - - -def main(): - parser = build_cli_parser("List VMware alert facets") - setup_parser_with_vmware_criteria(parser) - parser.add_argument("-F", "--facet", action="append", choices=["ALERT_TYPE", "CATEGORY", "REPUTATION", "WORKFLOW", - "TAG", "POLICY_ID", "POLICY_NAME", "DEVICE_ID", - "DEVICE_NAME", "APPLICATION_HASH", - "APPLICATION_NAME", "STATUS", "RUN_STATE", - "POLICY_APPLIED_STATE", "POLICY_APPLIED", - "SENSOR_ACTION"], - required=True, help="Retrieve these fields as facet information") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(VMwareAlert) - load_vmware_criteria(query, args) - - facetinfos = query.facets(args.facet) - for facetinfo in facetinfos: - print("For field '{0}':".format(facetinfo["field"])) - for facetval in facetinfo["values"]: - print("\tValue {0}: {1} occurrences".format(facetval["id"], facetval["total"])) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_vmware_alerts.py b/examples/DEPRECATED_psc/list_vmware_alerts.py deleted file mode 100755 index ee0fb44e..00000000 --- a/examples/DEPRECATED_psc/list_vmware_alerts.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import VMwareAlert -from helpers.alertsv6 import setup_parser_with_vmware_criteria, load_vmware_criteria - - -def main(): - parser = build_cli_parser("List VMware alerts") - setup_parser_with_vmware_criteria(parser) - parser.add_argument("-S", "--sort_by", help="Field to sort the output by") - parser.add_argument("-R", "--reverse", action="store_true", help="Reverse order of sort") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(VMwareAlert) - load_vmware_criteria(query, args) - if args.sort_by: - direction = "DESC" if args.reverse else "ASC" - query = query.sort_by(args.sort_by, direction) - - alerts = list(query) - print("{0:40} {1:40s} {2:40s} {3}".format("ID", "Hostname", "Threat ID", "Last Updated")) - for alert in alerts: - print("{0:40} {1:40s} {2:40s} {3}".format(alert.id, alert.device_name or "None", - alert.threat_id or "Unknown", - alert.last_update_time)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_watchlist_alert_facets.py b/examples/DEPRECATED_psc/list_watchlist_alert_facets.py deleted file mode 100755 index 35776ef1..00000000 --- a/examples/DEPRECATED_psc/list_watchlist_alert_facets.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import WatchlistAlert -from helpers.alertsv6 import setup_parser_with_watchlist_criteria, load_watchlist_criteria - - -def main(): - parser = build_cli_parser("List watchlist alert facets") - setup_parser_with_watchlist_criteria(parser) - parser.add_argument("-F", "--facet", action="append", choices=["ALERT_TYPE", "CATEGORY", "REPUTATION", "WORKFLOW", - "TAG", "POLICY_ID", "POLICY_NAME", "DEVICE_ID", - "DEVICE_NAME", "APPLICATION_HASH", - "APPLICATION_NAME", "STATUS", "RUN_STATE", - "POLICY_APPLIED_STATE", "POLICY_APPLIED", - "SENSOR_ACTION"], - required=True, help="Retrieve these fields as facet information") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(WatchlistAlert) - load_watchlist_criteria(query, args) - - facetinfos = query.facets(args.facet) - for facetinfo in facetinfos: - print("For field '{0}':".format(facetinfo["field"])) - for facetval in facetinfo["values"]: - print("\tValue {0}: {1} occurrences".format(facetval["id"], facetval["total"])) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_psc/list_watchlist_alerts.py b/examples/DEPRECATED_psc/list_watchlist_alerts.py deleted file mode 100755 index 708efa07..00000000 --- a/examples/DEPRECATED_psc/list_watchlist_alerts.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys -from cbapi.example_helpers import build_cli_parser, get_cb_psc_object -from cbapi.psc.models import WatchlistAlert -from helpers.alertsv6 import setup_parser_with_watchlist_criteria, load_watchlist_criteria - - -def main(): - parser = build_cli_parser("List watchlist alerts") - setup_parser_with_watchlist_criteria(parser) - parser.add_argument("-S", "--sort_by", help="Field to sort the output by") - parser.add_argument("-R", "--reverse", action="store_true", help="Reverse order of sort") - - args = parser.parse_args() - cb = get_cb_psc_object(args) - - query = cb.select(WatchlistAlert) - load_watchlist_criteria(query, args) - if args.sort_by: - direction = "DESC" if args.reverse else "ASC" - query = query.sort_by(args.sort_by, direction) - - alerts = list(query) - print("{0:40} {1:40s} {2:40s} {3}".format("ID", "Hostname", "Threat ID", "Last Updated")) - for alert in alerts: - print("{0:40} {1:40s} {2:40s} {3}".format(alert.id, alert.device_name or "None", - alert.threat_id or "Unknown", - alert.last_update_time)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/create_feed.py b/examples/DEPRECATED_threathunter/create_feed.py deleted file mode 100644 index 619c7166..00000000 --- a/examples/DEPRECATED_threathunter/create_feed.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python - -import sys -import time - -from cbapi.example_helpers import read_iocs, build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Feed - - -def main(): - parser = build_cli_parser("Create a CbTH feed and, optionally, a report from a stream of IOCs") - - # Feed metadata arguments. - parser.add_argument("--name", type=str, help="Feed name", required=True) - parser.add_argument("--owner", type=str, help="Feed owner", required=True) - parser.add_argument("--url", type=str, help="Feed provider url", required=True) - parser.add_argument("--summary", type=str, help="Feed summary", required=True) - parser.add_argument("--category", type=str, help="Feed category", required=True) - parser.add_argument("--source_label", type=str, help="Feed source label", default=None) - parser.add_argument("--access", type=str, help="Feed access scope", default="private") - - # Report metadata arguments. - parser.add_argument("--read_report", action="store_true", help="Read a report from stdin") - parser.add_argument("--rep_timestamp", type=int, help="Report timestamp", default=int(time.time())) - parser.add_argument("--rep_title", type=str, help="Report title") - parser.add_argument("--rep_desc", type=str, help="Report description") - parser.add_argument("--rep_severity", type=int, help="Report severity", default=1) - parser.add_argument("--rep_link", type=str, help="Report link") - parser.add_argument("--rep_tags", type=str, help="Report tags, comma separated") - parser.add_argument("--rep_visibility", type=str, help="Report visibility") - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - feed_info = { - "name": args.name, - "owner": args.owner, - "provider_url": args.url, - "summary": args.summary, - "category": args.category, - "access": args.access, - } - - reports = [] - if args.read_report: - rep_tags = [] - if args.rep_tags: - rep_tags = args.rep_tags.split(",") - - report = { - "timestamp": args.rep_timestamp, - "title": args.rep_title, - "description": args.rep_desc, - "severity": args.rep_severity, - "link": args.rep_link, - "tags": rep_tags, - "iocs_v2": [], # NOTE(ww): The feed server will convert IOCs to v2s for us. - } - - report_id, iocs = read_iocs(cb) - - report["id"] = report_id - report["iocs"] = iocs - reports.append(report) - - feed = { - "feedinfo": feed_info, - "reports": reports - } - - feed = cb.create(Feed, feed) - feed.save() - - print(feed) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/events.py b/examples/DEPRECATED_threathunter/events.py deleted file mode 100644 index 244fd7cd..00000000 --- a/examples/DEPRECATED_threathunter/events.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Event - - -def main(): - parser = build_cli_parser("Query processes") - parser.add_argument("-p", type=str, help="process guid", default=None) - parser.add_argument("-n", type=int, help="only output N events", default=None) - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - if not args.p: - print("Error: Missing Process GUID to search for events with") - sys.exit(1) - - events = cb.select(Event).where(process_guid=args.p) - - if args.n: - events = events[0:args.n] - - for event in events: - print("Event type: {}".format(event.event_type)) - print("\tEvent GUID: {}".format(event.event_guid)) - print("\tEvent Timestamp: {}".format(event.event_timestamp)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/events_exporter.py b/examples/DEPRECATED_threathunter/events_exporter.py deleted file mode 100644 index 2f31c9f4..00000000 --- a/examples/DEPRECATED_threathunter/events_exporter.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Event -import json -import csv - - -def main(): - parser = build_cli_parser("Query processes") - parser.add_argument("-p", type=str, help="process guid", default=None) - parser.add_argument("-s", type=bool, help="silent mode", default=False) - parser.add_argument("-n", type=int, help="only output N events", default=None) - parser.add_argument("-f", type=str, help="output file name", default=None) - parser.add_argument("-of", type=str, help="output file format: csv or json", default="json") - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - if not args.p: - print("Error: Missing Process GUID to search for events with") - sys.exit(1) - - events = cb.select(Event).where(process_guid=args.p) - - if args.n: - events = events[0:args.n] - - if not args.s: - for event in events: - print("Event type: {}".format(event.event_type)) - print("\tEvent GUID: {}".format(event.event_guid)) - print("\tEvent Timestamp: {}".format(event.event_timestamp)) - - if args.f is not None: - if args.of == "json": - with open(args.f, 'w') as outfile: - for event in events: - json.dump(event.original_document, outfile) - else: - with open(args.f, 'w') as outfile: - csvwriter = csv.writer(outfile) - for event in events: - csvwriter.writerows(event) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/feed_operations.py b/examples/DEPRECATED_threathunter/feed_operations.py deleted file mode 100644 index 9ab4469c..00000000 --- a/examples/DEPRECATED_threathunter/feed_operations.py +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python -# - -import sys -from cbapi.psc.threathunter.models import Feed, Report -from cbapi.example_helpers import eprint, build_cli_parser, get_cb_threathunter_object -import logging -import json - -log = logging.getLogger(__name__) - - -def get_feed(cb, feed_id=None, feed_name=None): - if feed_id: - return cb.select(Feed, feed_id) - elif feed_name: - feeds = [feed for feed in cb.select(Feed) if feed.name == feed_name] - - if not feeds: - eprint("No feeds named '{}'".format(feed_name)) - sys.exit(1) - elif len(feeds) > 1: - eprint("More than one feed named '{}'".format(feed_name)) - sys.exit(1) - - return feeds[0] - else: - raise ValueError("expected either feed_id or feed_name") - - -def get_report(feed, report_id=None, report_name=None): - if report_id: - reports = [report for report in feed.reports if report.id == report_id] - - if not reports: - eprint("No reports with ID '{}'".format(report_id)) - sys.exit(1) - elif len(reports) > 1: - eprint("More than one report with ID '{}'".format(report_id)) - sys.exit(1) - elif report_name: - reports = [report for report in feed.reports if report.title == report_name] - - if not reports: - eprint("No reports named '{}'".format(report_name)) - sys.exit(1) - elif len(reports) > 1: - eprint("More than one report named '{}'".format(report_name)) - sys.exit(1) - else: - raise ValueError("expected either report_id or report_name") - - return reports[0] - - -def list_feeds(cb, parser, args): - if args.iocs and not args.reports: - eprint("--iocs specified without --reports") - sys.exit(1) - - feeds = cb.select(Feed).where(include_public=args.public) - - for feed in feeds: - print(feed) - if args.reports: - for report in feed.reports: - print(report) - if args.iocs: - for ioc in report.iocs_: - print(ioc) - - -def list_iocs(cb, parser, args): - feed = get_feed(cb, feed_id=args.id, feed_name=args.feedname) - - for report in feed.reports: - for ioc in report.iocs_: - print(ioc) - - -def export_feed(cb, parser, args): - feed = get_feed(cb, feed_id=args.id, feed_name=args.feedname) - - exported = {} - - exported["feedinfo"] = feed._info - exported["reports"] = [report._info for report in feed.reports] - print(json.dumps(exported)) - - -def import_feed(cb, parser, args): - imported = json.loads(sys.stdin.read()) - - if args.feedname: - imported["feedinfo"]["name"] = args.feedname - - feed = cb.create(Feed, imported) - feed.save(public=args.public) - - -def delete_feed(cb, parser, args): - feed = get_feed(cb, feed_id=args.id, feed_name=args.feedname) - feed.delete() - - -def export_report(cb, parser, args): - feed = get_feed(cb, feed_id=args.id, feed_name=args.feedname) - report = get_report(feed, report_id=args.reportid, report_name=args.reportname) - - print(json.dumps(report._info)) - - -def import_report(cb, parser, args): - feed = get_feed(cb, feed_id=args.id, feed_name=args.feedname) - - imp_dict = json.loads(sys.stdin.read()) - - reports = feed.reports - existing_report = next( - (report for report in reports if imp_dict["id"] == report.id), None - ) - - if existing_report: - eprint("Report already exists; use replace-report.") - sys.exit(1) - else: - imp_report = cb.create(Report, imp_dict) - feed.append_reports([imp_report]) - - -def delete_report(cb, parser, args): - feed = get_feed(cb, feed_id=args.id, feed_name=args.feedname) - report = get_report(feed, report_id=args.reportid, report_name=args.reportname) - report.delete() - - -def replace_report(cb, parser, args): - feed = get_feed(cb, feed_id=args.id, feed_name=args.feedname) - - imported = json.loads(sys.stdin.read()) - - reports = feed.reports - existing_report = next( - (report for report in reports if imported["id"] == report.id), None - ) - - if existing_report: - existing_report.update(**imported) - else: - eprint("No existing report to replace") - sys.exit(1) - - -def main(): - parser = build_cli_parser() - commands = parser.add_subparsers(help="Feed commands", dest="command_name") - - list_command = commands.add_parser("list", help="List all configured feeds") - list_command.add_argument( - "-P", - "--public", - help="Include public feeds", - action="store_true", - default=False, - ) - list_command.add_argument( - "-r", - "--reports", - help="Include reports for each feed", - action="store_true", - default=False, - ) - list_command.add_argument( - "-i", - "--iocs", - help="Include IOCs for each feed's reports", - action="store_true", - default=False, - ) - - list_iocs_command = commands.add_parser( - "list-iocs", help="List all IOCs for a feed" - ) - specifier = list_iocs_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--id", type=str, help="Feed ID") - specifier.add_argument("-f", "--feedname", type=str, help="Feed Name") - - export_command = commands.add_parser( - "export", help="Export a feed into an importable format" - ) - specifier = export_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--id", type=str, help="Feed ID") - specifier.add_argument("-f", "--feedname", type=str, help="Feed Name") - - import_command = commands.add_parser( - "import", help="Import a previously exported feed" - ) - import_command.add_argument( - "-f", "--feedname", type=str, help="Renames the imported feed" - ) - import_command.add_argument( - "-P", "--public", help="Make the feed public", action="store_true" - ) - - del_command = commands.add_parser("delete", help="Delete feed") - specifier = del_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--id", type=str, help="Feed ID") - specifier.add_argument("-f", "--feedname", type=str, help="Feed Name") - - export_report_command = commands.add_parser( - "export-report", help="Export a feed's report into an importable format" - ) - specifier = export_report_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--id", type=str, help="Feed ID") - specifier.add_argument("-f", "--feedname", type=str, help="Feed Name") - specifier = export_report_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-I", "--reportid", type=str, help="Report ID") - specifier.add_argument("-r", "--reportname", type=str, help="Report Name") - - import_report_command = commands.add_parser( - "import-report", help="Import a previously exported report" - ) - specifier = import_report_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--id", type=str, help="Feed ID") - specifier.add_argument("-f", "--feedname", type=str, help="Feed Name") - - delete_report_command = commands.add_parser( - "delete-report", help="Delete a report from a feed" - ) - specifier = delete_report_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--id", type=str, help="Feed ID") - specifier.add_argument("-f", "--feedname", type=str, help="Feed Name") - specifier = delete_report_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-I", "--reportid", type=str, help="Report ID") - specifier.add_argument("-r", "--reportname", type=str, help="Report Name") - - replace_report_command = commands.add_parser( - "replace-report", help="Replace a feed's report" - ) - specifier = replace_report_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--id", type=str, help="Feed ID") - specifier.add_argument("-f", "--feedname", type=str, help="Feed Name") - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - if args.command_name == "list": - return list_feeds(cb, parser, args) - elif args.command_name == "list-iocs": - return list_iocs(cb, parser, args) - elif args.command_name == "export": - return export_feed(cb, parser, args) - elif args.command_name == "import": - return import_feed(cb, parser, args) - elif args.command_name == "delete": - return delete_feed(cb, parser, args) - elif args.command_name == "export-report": - return export_report(cb, parser, args) - elif args.command_name == "import-report": - return import_report(cb, parser, args) - elif args.command_name == "delete-report": - return delete_report(cb, parser, args) - elif args.command_name == "replace-report": - return replace_report(cb, parser, args) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/import_response_feeds.py b/examples/DEPRECATED_threathunter/import_response_feeds.py deleted file mode 100644 index 85146ea3..00000000 --- a/examples/DEPRECATED_threathunter/import_response_feeds.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python -# -import sys -from cbapi.psc.threathunter import CbThreatHunterAPI -from cbapi.psc.threathunter.models import Feed as FeedTH -from cbapi.response.models import Feed -from cbapi.example_helpers import build_cli_parser, get_cb_response_object -from cbapi.errors import ServerError -from urllib.parse import unquote -import logging - -log = logging.getLogger(__name__) - - -def list_feeds(cb, parser, args): - """ - Lists the feeds in CB Response - """ - for f in cb.select(Feed): - for fieldname in ["id", "category", "display_name", "enabled", "provider_url", "summary", "tech_data", - "feed_url", "use_proxy", "validate_server_cert"]: - print("%-20s: %s" % (fieldname, getattr(f, fieldname, ""))) - - if f.username: - for fieldname in ["username", "password"]: - print("%-20s: %s" % (fieldname, getattr(f, fieldname, ""))) - - if f.ssl_client_crt: - for fieldname in ["ssl_client_crt", "ssl_client_key"]: - print("%-20s: %s" % (fieldname, getattr(f, fieldname, ""))) - - print("\n") - - -def list_reports(cb, parser, args): - """ - Lists the reports in a feed from CB Response - :param: id - The ID of a feed - """ - feed = cb.select(Feed, args.id, force_init=True) - for report in feed.reports: - print(report) - print("\n") - - -def convert_feed(cb, cb_th, parser, args): - """ - Converts and copies a feed from CB Response to CB Threat Hunter - :param: id - The ID of a feed from CB Response - - Requires a credentials profile for both CB Response and CB Threat Hunter - Ensure that your credentials for CB Threat Hunter have permissions to the Feed Manager APIs - """ - th_feed = {"feedinfo": {}, "reports": []} - # Fetches the CB Response feed - feed = cb.select(Feed, args.id, force_init=True) - - th_feed["feedinfo"]["name"] = feed.name - th_feed["feedinfo"]["provider_url"] = feed.provider_url - th_feed["feedinfo"]["summary"] = feed.summary - th_feed["feedinfo"]["category"] = feed.category - th_feed["feedinfo"]["access"] = "private" - - # Temporary values until refresh - th_feed["feedinfo"]["owner"] = "org_key" - th_feed["feedinfo"]["id"] = "id" - - # Iterates the reports in the CB Response feed - for report in feed.reports: - th_report = {} - th_report["id"] = report.id - th_report["timestamp"] = report.timestamp - th_report["title"] = report.title - th_report["severity"] = (report.score % 10) + 1 - if hasattr(report, "description"): - th_report["description"] = report.description - else: - th_report["description"] = "" - if hasattr(report, "link"): - th_report["link"] = report.link - th_report["iocs"] = {} - if report.iocs: - if "md5" in report.iocs: - th_report["iocs"]["md5"] = report.iocs["md5"] - if "ipv4" in report.iocs: - th_report["iocs"]["ipv4"] = report.iocs["ipv4"] - if "ipv6" in report.iocs: - th_report["iocs"]["ipv6"] = report.iocs["ipv6"] - if "dns" in report.iocs: - th_report["iocs"]["dns"] = report.iocs["dns"] - - if "query" in report.iocs: - th_report["iocs"]["query"] = [] - for query in report.iocs.get("query", []): - try: - search = query.get('search_query', "") - if "q=" in search: - params = search.split('&') - for p in params: - if "q=" in p: - search = unquote(p[2:]) - # Converts the CB Response query to CB Threat Hunter - th_query = cb_th.convert_query(search) - if th_query: - query["search_query"] = th_query - th_report["iocs"]["query"].append(query) - except ServerError: - print('Invalid query {}'.format(query.get('search_query', ""))) - - th_feed["reports"].append(th_report) - - # Pushes the new feed to CB Threat Hunter - new_feed = cb_th.create(FeedTH, th_feed) - new_feed.save() - print("{}\n".format(new_feed)) - - -def main(): - parser = build_cli_parser() - parser.add_argument("-thp", "--threatprofile", help="Threat Hunter profile", default="default") - commands = parser.add_subparsers(help="Feed commands", dest="command_name") - - commands.add_parser("list", help="List all configured feeds") - - list_reports_command = commands.add_parser("list-reports", help="List all configured reports for a feed") - list_reports_command.add_argument("-i", "--id", type=str, help="Feed ID") - - convert_feed_command = commands.add_parser("convert", help="Convert feed from CB Response to CB Threat Hunter") - convert_feed_command.add_argument("-i", "--id", type=str, help="Feed ID") - - args = parser.parse_args() - cb = get_cb_response_object(args) - cb_th = CbThreatHunterAPI(profile=args.threatprofile) - - if args.command_name == "list": - return list_feeds(cb, parser, args) - if args.command_name == "list-reports": - return list_reports(cb, parser, args) - if args.command_name == "convert": - return convert_feed(cb, cb_th, parser, args) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/modify_feed.py b/examples/DEPRECATED_threathunter/modify_feed.py deleted file mode 100644 index 8f543b8a..00000000 --- a/examples/DEPRECATED_threathunter/modify_feed.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_feed_object - - -def main(): - parser = build_cli_parser("Modify a CbTH feed") - parser.add_argument("--id", type=str, help="Feed ID", default=None) - parser.add_argument("--name", type=str, help="Feed name", default=None) - parser.add_argument("--owner", type=str, help="Feed owner", default=None) - parser.add_argument("--url", type=str, help="Feed provider url", default="https://example.com") - parser.add_argument("--summary", type=str, help="Feed summary", default=None) - parser.add_argument("--category", type=str, help="Feed category", default="Partner") - parser.add_argument("--access", type=str, help="Feed access scope", default="private") - - args = parser.parse_args() - cb = get_cb_threathunter_feed_object(args) - - feed = cb.feed(args.id) - - print("Before modification:") - print("=" * 80) - print(feed) - print("=" * 80) - - metadata = {} - if args.name: - metadata["name"] = args.name - if args.owner: - metadata["owner"] = args.owner - if args.url: - metadata["provider_url"] = args.url - if args.summary: - metadata["summary"] = args.summary - if args.category: - metadata["category"] = args.category - if args.access: - metadata["access"] = args.access - - feed.feedinfo.update(**metadata) - - print("After modification:") - print("=" * 80) - print(feed) - print("=" * 80) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/process_exporter.py b/examples/DEPRECATED_threathunter/process_exporter.py deleted file mode 100644 index b0d017e7..00000000 --- a/examples/DEPRECATED_threathunter/process_exporter.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Process -import json -import csv - - -def main(): - parser = build_cli_parser("Query processes") - parser.add_argument("-p", type=str, help="process guid", default=None) - parser.add_argument("-q", type=str, help="query string", default=None) - parser.add_argument("-s", type=bool, help="silent mode", default=False) - parser.add_argument("-n", type=int, help="only output N events", default=None) - parser.add_argument("-f", type=str, help="output file name", default=None) - parser.add_argument("-of", type=str, help="output file format: csv or json", default="json") - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - if not args.p and not args.q: - print("Error: Missing Process GUID to search for events with") - sys.exit(1) - - if args.q: - processes = cb.select(Process).where(args.q) - else: - processes = cb.select(Process).where(process_guid=args.p) - - if args.n: - processes = [p for p in processes[0:args.n]] - - if not args.s: - for process in processes: - print("Process: {}".format(process.process_name)) - print("\tPIDs: {}".format(process.process_pids)) - print("\tSHA256: {}".format(process.process_sha256)) - print("\tGUID: {}".format(process.process_guid)) - - if args.f is not None: - if args.of == "json": - with open(args.f, 'w') as outfile: - for p in processes: - json.dump(p.original_document, outfile) - print(p.original_document) - else: - headers = set() - headers.update(*(d.original_document.keys() for d in processes)) - with open(args.f, 'w') as outfile: - csvwriter = csv.DictWriter(outfile, fieldnames=headers) - csvwriter.writeheader() - for p in processes: - csvwriter.writerow(p.original_document) - - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/process_query.py b/examples/DEPRECATED_threathunter/process_query.py deleted file mode 100644 index 1b3bfbd8..00000000 --- a/examples/DEPRECATED_threathunter/process_query.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Process, Binary -from cbapi.errors import ObjectNotFoundError - - -def main(): - parser = build_cli_parser("Query processes") - parser.add_argument("-q", type=str, help="process query", default="process_name:notepad.exe") - parser.add_argument("-n", type=int, help="only output N processes", default=None) - parser.add_argument("-b", action="store_true", help="show binary information", default=False) - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - processes = cb.select(Process).where(args.q) - - if args.n: - processes = processes[0:args.n] - - for process in processes: - print("Process: {}".format(process.process_name)) - print("\tPIDs: {}".format(process.process_pids)) - print("\tSHA256: {}".format(process.process_sha256)) - print("\tGUID: {}".format(process.process_guid)) - - if args.b: - try: - binary = cb.select(Binary, process.process_sha256) - print(binary) - print(binary.summary) - except ObjectNotFoundError: - print("No binary found for process with hash: {}".format(process.process_sha256)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/process_tree.py b/examples/DEPRECATED_threathunter/process_tree.py deleted file mode 100644 index df2e64f2..00000000 --- a/examples/DEPRECATED_threathunter/process_tree.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Process - - -def main(): - parser = build_cli_parser("Query processes") - parser.add_argument("-p", type=str, help="process guid", default=None) - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - if not args.p: - print("Error: Missing Process GUID to query the process tree with") - sys.exit(1) - - tree = cb.select(Process).where(process_guid=args.p)[0].tree() - for idx, child in enumerate(tree.children): - print("Child #{}".format(idx)) - print("\tName: {}".format(child.process_name)) - print("\tGUID: {}".format(child.process_guid)) - print("\tNumber of children: {}".format(len(child.children))) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/process_tree_exporter.py b/examples/DEPRECATED_threathunter/process_tree_exporter.py deleted file mode 100644 index 2eef7326..00000000 --- a/examples/DEPRECATED_threathunter/process_tree_exporter.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Process -import csv -import json - - -def main(): - parser = build_cli_parser("Query processes") - parser.add_argument("-p", type=str, help="process guid", default=None) - parser.add_argument("-f", type=str, help="output file name", default=None) - parser.add_argument("-of", type=str, help="output file format: csv or json", default="json") - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - if not args.p: - print("Error: Missing Process GUID to query the process tree with") - sys.exit(1) - - tree = cb.select(Process).where(process_guid=args.p)[0].tree() - - for idx, child in enumerate(tree.children): - print("Child #{}".format(idx)) - print("\tName: {}".format(child.process_name)) - print("\tNumber of children: {}".format(len(child.children))) - - if args.f is not None: - if args.of == "json": - with open(args.f, 'w') as outfile: - for idx, child in enumerate(tree.children): - json.dump(child.original_document, outfile) - else: - with open(args.f, 'w') as outfile: - csvwriter = csv.writer(outfile) - for idx, child in enumerate(tree.children): - csvwriter.writerow(child.original_document) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/search.py b/examples/DEPRECATED_threathunter/search.py deleted file mode 100644 index d757aaf7..00000000 --- a/examples/DEPRECATED_threathunter/search.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python - -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_threathunter_object -from cbapi.psc.threathunter import Process - - -def main(): - parser = build_cli_parser("Search processes") - parser.add_argument("-q", type=str, help="process query", default="process_name:notepad.exe") - parser.add_argument("-f", help="show full objects", action="store_true", default=False) - parser.add_argument("-n", type=int, help="only output N processes", default=None) - parser.add_argument("-e", help="show events for query results", action="store_true", default=False) - parser.add_argument("-c", help="show children for query results", action="store_true", default=False) - parser.add_argument("-p", help="show parents for query results", action="store_true", default=False) - parser.add_argument("-t", help="show tree for query results", action="store_true", default=False) - parser.add_argument("-S", type=str, help="sort by this field", required=False) - parser.add_argument("-D", help="return results in descending order", action="store_true") - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - processes = cb.select(Process).where(args.q) - - direction = "ASC" - if args.D: - direction = "DESC" - - if args.S: - processes.sort_by(args.S, direction=direction) - - print("Number of processes: {}".format(len(processes))) - - if args.n: - processes = processes[0:args.n] - - for process in processes: - if args.f: - print(process) - else: - print("{} ({}): {}".format(process.process_name, process.process_guid, process.process_sha256)) - - if args.e: - print("=========== events ===========") - for event in process.events(): - if args.f: - print(event) - else: - print("\t{}".format(event.event_type)) - - if args.c: - print("========== children ==========") - for child in process.children: - if args.f: - print(child) - else: - print("\t{}: {}".format(child.process_name, child.process_sha256)) - - if args.p: - print("========== parents ==========") - for parent in process.parents: - if args.f: - print(parent) - else: - print("\t{}: {}".format(parent.process_name, parent.process_sha256)) - - if args.t: - print("=========== tree =============") - tree = process.tree() - print(tree) - print(tree.nodes) - - print("===========================") - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/README.md b/examples/DEPRECATED_threathunter/threat_intelligence/README.md deleted file mode 100644 index fa862284..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/README.md +++ /dev/null @@ -1,129 +0,0 @@ -# ThreatIntel Module -Python3 module that can be used in the development of Threat Intelligence Connectors for the Carbon Black Cloud. - -## Requirements - -The file `requirements.txt` contains a list of dependencies for this project. After cloning this repository, run the following command from the `examples/threathunter/threat_intelligence` directory: - -```python -pip3 install -r ./requirements.txt -``` - - -## Introduction -This document describes how to use the ThreatIntel Python3 module for development of connectors that retrieve Threat Intelligence and import it into a Carbon Black Cloud instance. - -Throughout this document, there are references to Carbon Black ThreatHunter Feed and Report formats. Documentation on Feed and Report definitions is [available here.](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/feed-api/#definitions) - -## Example - -An example of implementing this ThreatIntel module is [available here](Taxii_README.md). The example uses cabby to connect to a TAXII server, collect threat intelligence, and send it to a ThreatHunter Feed. - - -## Usage - -`threatintel.py` has two main uses: - -1. Report Validation with `schemas.ReportSchema` -2. Pushing Reports to a Carbon Black ThreatHunter Feed with `threatintel.push_to_cb()` - -### Report validation - -Each Report to be sent to the Carbon Black Cloud should be validated -before sending. The [ThreatHunter Report format](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/feed-api/#definitions) is a JSON object with -five required and five optional values. - -|Required|Type|Optional|Type| -|---|---|---|---| -|`id`|string|`link`|string| -|`timestamp`|integer|`[tags]`|[str]| -|`title`|string|`iocs`|[IOC Format](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/feed-api/#definitions)| -|`description`|string|`[iocs_v2]`|[[IOCv2 Format](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/feed-api/#definitions)]| -|`severity`|integer|`visibility`|string| - -The `push_to_cb` function checks for the existence and type of the five -required values, and (if applicable) checks the optional values, through a Schema. -See `schemas.py` for the definitions. - -### Pushing Reports to a Carbon Black ThreatHunter Feed - -The `push_to_cb` function takes a list of `AnalysisResult` objects (or objects of your own custom class) and a Carbon -Black ThreatHunter Feed ID as input, and writes output to the console. -The `AnalysisResult` class is defined in `results.py`, and requirements for a custom class are outlined in the Customization section below. - -`AnalysisResult` objects are expected to have the same properties as -ThreatHunter Reports (listed in the table above in Report Validation), with the addition of `iocs_v2`. The -`push_to_cb` function will convert `AnalysisResult` objects into -Report dictionaries, and then those dictionaries into ThreatHunter -Report objects. - -Any improperly formatted report dictionaries are saved to a file called `malformed_reports.json`. - -Upon successful sending of reports to a ThreatHunter Feed, you should -see something similar to the following INFO message in the console: - -`INFO:threatintel:Appended 1000 reports to ThreatHunter Feed AbCdEfGhIjKlMnOp` - - -### Using Validation and Pushing to ThreatHunter in your own code - -Import the module and supporting classes like any other python package, and instantiate a ThreatIntel object: - - ```python - from threatintel import ThreatIntel - from results import IOC_v2, AnalysisResult - ti = ThreatIntel() -``` - -Take the threat intelligence data from your source, and convert it into ``AnalysisResult`` objects. Then, attach the indicators of compromise, and store your data in a list. - -```python - myResults = [] - for intel in myThreatIntelligenceData: - result = AnalysisResult(analysis_name=intel.name, scan_time=intel.scan_time, score=intel.score, title=intel.title, description=intel.description) - #ioc_dict could be a collection of md5 hashes, dns values, file hashes, etc. - for ioc_key, ioc_val in intel.ioc_dict.items(): - result.attach_ioc_v2(values=ioc_val, field=ioc_key, link=link) - myResults.append(result) -``` - -Finally, push your threat intelligence data to a ThreatHunter Feed. -```python - ti.push_to_cb(feed_id='AbCdEfGhIjKlMnOp', results=myResults) -``` - -`ti.push_to_cb` automatically validates your input to ensure it has the values required for ThreatHunter. Validated reports will be sent to your specified ThreatHunter Feed, and any malformed reports will be available for review locally at `malformed_reports.json`. - - - -## Customization - -Although the `AnalysisResult` class is provided in `results.py` as an example, you may create your own custom class to use with `push_to_cb`. The class must have the following attributes to work with the provided `push_to_cb` function, as well as the ThreatHunter backend: - - -|Attribute|Type| -|---|---| -|`id`|string| -|`timestamp`|integer| -|`title`|string| -|`description`|string| -|`severity`|integer| -|`iocs_v2`|[[IOCv2 Format](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/feed-api/#definitions)]| - -It is strongly recommended to use the provided `IOC_v2()` class from `results.py`. If you decide to use a custom `iocs_v2` class, that class must have a method called `as_dict` that returns `id`, `match_type`, `values`, `field`, and `link` as a dictionary. - - -## Writing a Custom Threat Intelligence Polling Connector - -An example of a custom Threat Intel connector that uses the `ThreatIntel` Python3 module is included in this repository as `stix_taxii.py`. Most use cases will warrant the use of the ThreatHunter `Report` attribute `iocs_v2`, so it is included in `ThreatIntel.push_to_cb()`. - -`ThreatIntel.push_to_cb()` and `AnalysisResult` can be adapted to include other ThreatHunter `Report` attributes like `link, tags, iocs, and visibility`. - - -## Troubleshooting - -### Credential Error -In order to use this code, you must have CBAPI installed and configured. If you receive an authentication error, visit the Developer Network Authentication Page for [instructions on setting up authentication](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/). See [ReadTheDocs](https://cbapi.readthedocs.io/en/latest/index.html#api-credentials) for instructions on configuring your credentials file. - -### 504 Gateway Timeout Error -The [Carbon Black ThreatHunter Feed Manager API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/feed-api/) is used in this code. When posting to a Feed, there is a 60 second limit before the gateway terminates your connection. The amount of reports you can POST to a Feed is limited by your connection speed. In this case, you will have to split your threat intelligence into smaller collections until the request takes less than 60 seconds, and send each smaller collection to an individual ThreatHunter Feed. diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/Taxii_README.md b/examples/DEPRECATED_threathunter/threat_intelligence/Taxii_README.md deleted file mode 100644 index fff2e434..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/Taxii_README.md +++ /dev/null @@ -1,41 +0,0 @@ -# TAXII Connector -Connector for pulling and converting STIX information from TAXII Service Providers into CB Feeds. - -## Requirements/Installation - -The file `requirements.txt` contains a list of dependencies for this project. After cloning this repository, run the following command from the `examples/threathunter/threat_intelligence` directory: - -```python -pip3 install -r ./requirements.txt -``` - -## Introduction -This document describes how to configure the CB ThreatHunter TAXII connector. -This connector allows for the importing of STIX data by querying one or more TAXII services, retrieving that data and then converting it into CB feeds using the CB JSON format for IOCs. - -## Setup - TAXII Configuration File -The TAXII connector uses the configuration file `config.yml`. An example configuration file is available [here.](config.yml) An explanation of each entry in the configuration file is provided in the example. - - -## Running the Connector -The connector can be activated by running the Python3 file `stix_taxii.py`. The connector will attempt to connect to your TAXII service(s), poll the collection(s), retrieve the STIX data, and send it to the ThreatHunter Feed specified in your `config.yml` file. - -```python -python3 stix_taxii.py -``` - -This script supports updating each TAXII configuration's `start_date`, the date for which to start requesting data, via the command line with the argument `site_start_date`. To change the `stat_date` value for each site in your config file, you must supply the site name and desired `start_date` in `%Y-%m-%d %H:%M:%S` format. - -```python -python3 stix_taxii.py --site_start_date my_site_name_1 '2019-11-05 00:00:00' my_site_name_2 '2019-11-05 00:00:00' -``` - -This may be useful if the intention is to keep an up-to-date collection of STIX data in a ThreatHunter Feed. - -## Troubleshooting - -### Credential Error -In order to use this code, you must have CBAPI installed and configured. If you receive an authentication error, visit the Developer Network Authentication Page for [instructions on setting up authentication](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/). See [ReadTheDocs](https://cbapi.readthedocs.io/en/latest/index.html?highlight=credentials.psc#api-credentials) for instructions on configuring your credentials file. - -### 504 Gateway Timeout Error -The [Carbon Black ThreatHunter Feed Manager API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/feed-api/) is used in this code. When posting to a Feed, there is a 60 second limit before the gateway terminates your connection. The amount of reports you can POST to a Feed is limited by your connection speed. In this case, you will have to split your threat intelligence into smaller collections until the request takes less than 60 seconds, and send each smaller collection to an individual ThreatHunter Feed. diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/config.yml b/examples/DEPRECATED_threathunter/threat_intelligence/config.yml deleted file mode 100644 index 121b204e..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/config.yml +++ /dev/null @@ -1,78 +0,0 @@ -sites: - my_site_name_1: - # the feed_id of the ThreatHunter Feed you want to send ThreatIntel to - # example: 7wP8BEc2QsS8ciEqaRv7Ad - feed_id: - - # the address of the site (only server ip or dns; don't put https:// or a trailing slash) - # example: limo.anomali.com - site: - - # the path of the site for discovering what services are available - # this is supplied by your taxii provider - # example: /api/v1/taxii/taxii-discovery-service/ - discovery_path: - - # the path of the site for listing what collections are available to you - # this is supplied by your taxii provider - # example: /api/v1/taxii/collection_management/ - collection_management_path: - - # the path of the site for polling a collection - # this is supplied by your taxii provider - # example: /api/v1/taxii/poll/ - poll_path: - - # if you require https for your TAXII service connection, set to true - # defaults to true - use_https: - - # by default, we validate SSL certificates. Change to false to turn off SSL verification - ssl_verify: - - # (optional) if you need SSL certificates for authentication, set the path of the - # certificate and key here. - cert_file: - key_file: - - # (optional) how to score each result. Accepts values [1,10], and defaults to 5 - default_score: - - # (optional) username for authorization with your taxii provider - username: - - # (optional) password for authorization with your taxii provider - password: - - # (optional) specify which collections to convert to feeds (comma-delimited) - # example: Abuse_ch_Ransomware_IPs_F135, DShield_Scanning_IPs_F150 - collections: - - # the start date for which to start requesting data. - # Use %y-%m-%d %H:%M:%S format - # example: 2019-01-01 00:00:00 - start_date: - - # (optional) the minutes to advance for each request. - # If you don't have a lot of data, you could advance your requests - # to every 60 minutes, or 1440 minutes for daily chunks - # defaults to 1440 - size_of_request_in_minutes: - - # (optional) path to a CA SSL certificate - ca_cert: - - # (optional) if you need requests to go through a proxy, specify an http URL here - http_proxy_url: - - # (optional) if you need requests to go through a proxy, specify an https URL here - https_proxy_url: - - # (optional) number of reports to collect from each site. - # Leave blank for no limit - reports_limit: - - # (optional) control the number of failed attempts per-collection before giving up - # trying to get (empty/malformed) STIX data out of a TAXII server. - # defaults to 10 - fail_limit: diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/feed_helper.py b/examples/DEPRECATED_threathunter/threat_intelligence/feed_helper.py deleted file mode 100644 index 1484bce3..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/feed_helper.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Advances the `begin_date` and `end_date` fields while polling the TAXII server to iteratively get per-collection STIX content. - -This is tied to the `start_date` and `size_of_request_in_minutes` configuration options in your `config.yml`. -""" - -from datetime import datetime, timedelta, timezone -import logging -log = logging.getLogger(__name__) - - -class FeedHelper(): - def __init__(self, start_date, size_of_request_in_minutes): - self.size_of_request_in_minutes = size_of_request_in_minutes - if isinstance(start_date, datetime): - self.start_date = start_date.replace(tzinfo=timezone.utc) - elif isinstance(start_date, str): - self.start_date = datetime.strptime(start_date, "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc) - else: - log.error(f"Start_date must be a string or datetime object. Received a start_time config value with unsupported type: {type(start_date)}") - raise ValueError - self.end_date = self.start_date + \ - timedelta(minutes=self.size_of_request_in_minutes) - self.now = datetime.utcnow().replace(tzinfo=timezone.utc) - if self.end_date > self.now: - self.end_date = self.now - self.start = False - self.done = False - - def advance(self): - """Returns True if keep going, False if we already hit the end time and cannot advance.""" - if not self.start: - self.start = True - return True - - if self.done: - return False - - # continues shifting the time window by size_of_request_in_minutes until we hit current time, then stops - self.start_date = self.end_date - self.end_date += timedelta(minutes=self.size_of_request_in_minutes) - if self.end_date > self.now: - self.end_date = self.now - self.done = True - - return True diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/get_feed_ids.py b/examples/DEPRECATED_threathunter/threat_intelligence/get_feed_ids.py deleted file mode 100644 index 488a29c9..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/get_feed_ids.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Lists ThreatHunter Feed IDs available for results dispatch.""" - -from cbapi.psc.threathunter import CbThreatHunterAPI -from cbapi.psc.threathunter.models import Feed -import logging - -log = logging.getLogger(__name__) - - -def get_feed_ids(): - cb = CbThreatHunterAPI() - feeds = cb.select(Feed) - if not feeds: - log.info("No feeds are available for the org key {}".format(cb.credentials.org_key)) - else: - for feed in feeds: - log.info("Feed name: {:<20} \t Feed ID: {:>20}".format(feed.name, feed.id)) - - -if __name__ == '__main__': - get_feed_ids() diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/requirements.txt b/examples/DEPRECATED_threathunter/threat_intelligence/requirements.txt deleted file mode 100644 index eb1beb53..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -cybox==2.1.0.18 -dataclasses>=0.6 -cabby==0.1.20 -stix==1.2.0.7 -lxml==4.6.5 -urllib3>=1.24.2 -cbapi>=1.5.6 -python_dateutil==2.8.1 -PyYAML==5.4 -schema diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/results.py b/examples/DEPRECATED_threathunter/threat_intelligence/results.py deleted file mode 100644 index e3d8d732..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/results.py +++ /dev/null @@ -1,81 +0,0 @@ -import enum -import logging - - -class IOC_v2(): - """Models an indicator of compromise detected during an analysis. - - Every IOC belongs to an AnalysisResult. - """ - - def __init__(self, analysis, match_type, values, field, link): - self.id = analysis - self.match_type = match_type - self.values = values - self.field = field - self.link = link - - class MatchType(str, enum.Enum): - """ - Represents the valid matching strategies for an IOC. - """ - - Equality: str = "equality" - Regex: str = "regex" - Query: str = "query" - - def as_dict(self): - return { - "id": str(self.id), - "match_type": self.match_type, - "values": list(self.values), - "field": self.field, - "link": self.link, - } - - -class AnalysisResult(): - """Models the result of an analysis performed by a connector.""" - - def __init__(self, analysis_name, scan_time, score, title, description): - self.id = str(analysis_name) - self.timestamp = scan_time - self.title = title - self.description = description - self.severity = score - self.iocs = [] - self.iocs_v2 = [] - self.link = None - self.tags = None - self.visibility = None - self.connector_name = "STIX_TAXII" - - def attach_ioc_v2(self, *, match_type=IOC_v2.MatchType.Equality, values, field, link): - self.iocs_v2.append(IOC_v2(analysis=self.id, match_type=match_type, values=values, field=field, link=link)) - - def normalize(self): - """Normalizes this result to make it palatable for the CbTH backend.""" - - if self.severity <= 0 or self.severity > 10: - logging.warning("normalizing OOB score: {}".format(self.severity)) - if self.severity > 10 and self.severity < 100: - #assume it's a percentage - self.severity = round(self.severity/10) - else: - # any severity above 10 becomes 10, or below 1 becomes 1 - # Report severity must be between 1 & 10, else CBAPI throws 400 error - self.severity = max(1, min(self.severity, 10)) - return self - - def as_dict(self): - return {"IOCs_v2": [ioc_v2.as_dict() for ioc_v2 in self.iocs_v2], **super().as_dict()} - - def as_dict_full(self): - return { - "id": self.id, - "timestamp": self.timestamp, - "title": self.title, - "description": self.description, - "severity": self.severity, - "iocs_v2": [iocv2.as_dict() for iocv2 in self.iocs_v2] - } diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/schemas.py b/examples/DEPRECATED_threathunter/threat_intelligence/schemas.py deleted file mode 100644 index da361604..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/schemas.py +++ /dev/null @@ -1,44 +0,0 @@ -from schema import And, Or, Optional, Schema - - -IOCv2Schema = Schema( - { - "id": And(str, len), - "match_type": And(str, lambda type: type in ["query", "equality", "regex"]), - "values": And([str], len), - Optional("field"): str, - Optional("link"): str - } -) - -QueryIOCSchema = Schema( - { - "search_query": And(str, len), - Optional("index_type"): And(str, len) - } -) - -IOCSchema = Schema( - { - Optional("md5"): And([str], len), - Optional("ipv4"): And([str], len), - Optional("ipv6"): And([str], len), - Optional("dns"): And([str], len), - Optional("query"): [QueryIOCSchema] - } -) - -ReportSchema = Schema( - { - "id": And(str, len), - "timestamp": And(int, lambda n: n > 0), - "title": And(str, len), - "description": And(str, len), - "severity": And(int, lambda n: n > 0 and n < 11), - Optional("link"): str, - Optional("tags"): [str], - Optional("iocs_v2"): [IOCv2Schema], - Optional("iocs"): IOCSchema, - Optional("visibility"): str - } -) diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/stix_parse.py b/examples/DEPRECATED_threathunter/threat_intelligence/stix_parse.py deleted file mode 100644 index 8663c549..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/stix_parse.py +++ /dev/null @@ -1,466 +0,0 @@ -"""Parses STIX observables from the XML data returned by the TAXII server. - -The following IOC types are extracted from STIX data: - -* MD5 Hashes -* Domain Names -* IP-Addresses -* IP-Address Ranges -""" - -from cybox.objects.domain_name_object import DomainName -from cybox.objects.address_object import Address -from cybox.objects.file_object import File -from cybox.objects.uri_object import URI -from lxml import etree -from io import BytesIO -from stix.core import STIXPackage - -import logging -import string -import socket -import uuid -import time -import datetime -import dateutil -import dateutil.tz -import re - -from cabby.constants import ( - CB_STIX_XML_111, CB_CAP_11, CB_SMIME, - CB_STIX_XML_10, CB_STIX_XML_101, CB_STIX_XML_11, CB_XENC_122002) - -CB_STIX_XML_12 = 'urn:stix.mitre.org:xml:1.2' - -BINDING_CHOICES = [CB_STIX_XML_111, CB_CAP_11, CB_SMIME, CB_STIX_XML_12, - CB_STIX_XML_10, CB_STIX_XML_101, CB_STIX_XML_11, - CB_XENC_122002] - - -logger = logging.getLogger(__name__) - - -domain_allowed_chars = string.printable[:-6] # Used by validate_domain_name function - - -def validate_domain_name(domain_name): - """Validates a domain name to ensure validity and saneness. - - Args: - domain_name: Domain name string to check. - - Returns: - True if checks pass, False otherwise. - """ - - if len(domain_name) > 255: - logger.warn( - "Excessively long domain name {} in IOC list".format(domain_name)) - return False - - if not all([c in domain_allowed_chars for c in domain_name]): - logger.warn("Malformed domain name {} in IOC list".format(domain_name)) - return False - - parts = domain_name.split('.') - if not parts: - logger.warn("Empty domain name found in IOC list") - return False - - for part in parts: - if len(part) < 1 or len(part) > 63: - logger.warn("Invalid label length {} in domain name {} for report %s".format( - part, domain_name)) - return False - - return True - - -def validate_md5sum(md5): - """Validates md5sum. - - Args: - md5sum: md5sum to check. - - Returns: - True if checks pass, False otherwise. - """ - - if 32 != len(md5): - logger.warn("Invalid md5 length for md5 {}".format(md5)) - return False - if not md5.isalnum(): - logger.warn("Malformed md5 {} in IOC list".format(md5)) - return False - for c in "ghijklmnopqrstuvwxyz": - if c in md5 or c.upper() in md5: - logger.warn("Malformed md5 {} in IOC list".format(md5)) - return False - - return True - - -def sanitize_id(id): - """Removes unallowed chars from an ID. - - Ids may only contain a-z, A-Z, 0-9, - and must have one character. - - Args: - id: the ID to be sanitized. - - Returns: - A sanitized ID. - """ - - return id.replace(':', '-') - - -def validate_ip_address(ip_address): - """Validates an IPv4 address.""" - - try: - socket.inet_aton(ip_address) - return True - except socket.error: - return False - - -def cybox_parse_observable(observable, indicator, timestamp, score): - """Parses a cybox observable and returns a list containing a report dictionary. - - cybox is a open standard language encoding info about cyber observables. - - Args: - observable: the cybox obserable to parse. - - Returns: - A report dictionary if the cybox observable has props of type: - - cybox.objects.address_object.Address, - cybox.objects.file_object.File, - cybox.objects.domain_name_object.DomainName, or - cybox.objects.uri_object.URI - - Otherwise it will return an empty list. - - """ - reports = [] - - if observable.object_ and observable.object_.properties: - props = observable.object_.properties - logger.debug("{0} has props type: {1}".format(indicator, type(props))) - else: - logger.debug("{} has no props; skipping".format(indicator)) - return reports - - # - # sometimes the description is None - # - description = '' - if observable.description and observable.description.value: - description = str(observable.description.value) - - # - # if description is an empty string, then use the indicator's description - # NOTE: This was added for RecordedFuture - # - - if not description and indicator and indicator.description and indicator.description.value: - description = str(indicator.description.value) - - # - # if description is still empty, use the indicator's title - # - if not description and indicator and indicator.title: - description = str(indicator.title) - - # - # use the first reference as a link - # This was added for RecordedFuture - # - link = '' - if indicator and indicator.producer and indicator.producer.references: - for reference in indicator.producer.references: - link = reference - break - else: - if indicator and indicator.title: - split_title = indicator.title.split() - title_found = True - elif observable and observable.title: - split_title = observable.title.split() - title_found = True - else: - title_found = False - - if title_found: - url_pattern = re.compile("^(http:\/\/www\.|https:\/\/www\.|http:\/\/|https:\/\/)?[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(:[0-9]{1,5})?(\/.*)?$") - for token in split_title: - if url_pattern.match(token): - link = token - break - - - # - # Sometimes the title is None, so generate a random UUID - # - - if observable and observable.title: - title = observable.title - else: - title = str(uuid.uuid4()) - - # ID must be unique. Collisions cause 500 error on Carbon Black backend - id = str(uuid.uuid4()) - - if type(props) == DomainName: - # go into domainname function - reports = parse_domain_name_observable(observable, props, id, description, title, timestamp, link, score) - - elif type(props) == Address: - reports = parse_address_observable(observable, props, id, description, title, timestamp, link, score) - - elif type(props) == File: - reports = parse_file_observable(observable, props, id, description, title, timestamp, link, score) - - elif type(props) == URI: - reports = parse_uri_observable(observable, props, id, description, title, timestamp, link, score) - - else: - return reports - - return reports - -def parse_uri_observable(observable, props, id, description, title, timestamp, link, score): - - reports = [] - - if props.value and props.value.value: - - iocs = {'netconn_domain': []} - # - # Sometimes props.value.value is a list - # - - if type(props.value.value) is list: - for domain_name in props.value.value: - if validate_domain_name(domain_name.strip()): - iocs['netconn_domain'].append(domain_name.strip()) - else: - domain_name = props.value.value.strip() - if validate_domain_name(domain_name): - iocs['netconn_domain'].append(domain_name) - - if len(iocs['netconn_domain']) > 0: - reports.append({'iocs_v2': iocs, - 'id': sanitize_id(id), - 'description': description, - 'title': title, - 'timestamp': timestamp, - 'link': link, - 'score': score}) - return reports - - -def parse_domain_name_observable(observable, props, id, description, title, timestamp, link, score): - - reports = [] - if props.value and props.value.value: - iocs = {'netconn_domain': []} - # - # Sometimes props.value.value is a list - # - - if type(props.value.value) is list: - for domain_name in props.value.value: - if validate_domain_name(domain_name.strip()): - iocs['netconn_domain'].append(domain_name.strip()) - else: - domain_name = props.value.value.strip() - if validate_domain_name(domain_name): - iocs['netconn_domain'].append(domain_name) - - if len(iocs['netconn_domain']) > 0: - reports.append({'iocs_v2': iocs, - 'id': sanitize_id(id), - 'description': description, - 'title': title, - 'timestamp': timestamp, - 'link': link, - 'score': score}) - return reports - - -def parse_address_observable(observable, props, id, description, title, timestamp, link, score): - - reports = [] - if props.category == 'ipv4-addr' and props.address_value: - iocs = {'netconn_ipv4': []} - - # - # Sometimes props.address_value.value is a list vs a string - # - if type(props.address_value.value) is list: - for ip in props.address_value.value: - if validate_ip_address(ip.strip()): - iocs['netconn_ipv4'].append(ip.strip()) - else: - ipv4 = props.address_value.value.strip() - if validate_ip_address(ipv4): - iocs['netconn_ipv4'].append(ipv4) - - if len(iocs['netconn_ipv4']) > 0: - reports.append({'iocs_v2': iocs, - 'id': sanitize_id(observable.id_), - 'description': description, - 'title': title, - 'timestamp': timestamp, - 'link': link, - 'score': score}) - - return reports - - -def parse_file_observable(observable, props, id, description, title, timestamp, link, score): - - reports = [] - iocs = {'hash': []} - if props.md5: - if type(props.md5) is list: - for hash in props.md5: - if validate_md5sum(hash.strip()): - iocs['hash'].append(hash.strip()) - else: - if hasattr(props.md5, 'value'): - hash = props.md5.value.strip() - else: - hash = props.md5.strip() - if validate_md5sum(hash): - iocs['hash'].append(hash) - - if len(iocs['hash']) > 0: - reports.append({'iocs_v2': iocs, - 'id': sanitize_id(id), - 'description': description, - 'title': title, - 'timestamp': timestamp, - 'link': link, - 'score': score}) - - return reports - - -def get_stix_indicator_score(indicator, default_score): - """Returns a digit representing the indicator score. - - Converts from "high", "medium", or "low" into a digit, if necessary. - """ - - if not indicator.confidence: - return default_score - - - confidence_val_str = indicator.confidence.value.__str__() - if confidence_val_str.isdigit(): - score = int(confidence_val_str) - return score - elif confidence_val_str.lower() == "high": - return 7 # 75 - elif confidence_val_str.lower() == "medium": - return 5 # 50 - elif confidence_val_str.lower() == "low": - return 2 # 25 - else: - return default_score - - -def get_stix_indicator_timestamp(indicator): - timestamp = 0 - if indicator.timestamp: - if indicator.timestamp.tzinfo: - timestamp = int((indicator.timestamp - - datetime.datetime(1970, 1, 1).replace( - tzinfo=dateutil.tz.tzutc())).total_seconds()) - else: - timestamp = int((indicator.timestamp - - datetime.datetime(1970, 1, 1)).total_seconds()) - return timestamp - - -def get_stix_package_timestamp(stix_package): - timestamp = 0 - if not stix_package or not stix_package.timestamp: - return timestamp - try: - timestamp = stix_package.timestamp - timestamp = int(time.mktime(timestamp.timetuple())) - except (TypeError, OverflowError, ValueError) as e: - logger.warning("Problem parsing stix timestamp: {}".format(e)) - return timestamp - - -def parse_stix_indicators(stix_package, default_score): - reports = [] - if not stix_package.indicators: - return reports - - for indicator in stix_package.indicators: - if not indicator or not indicator.observable: - continue - score = get_stix_indicator_score(indicator, default_score) - timestamp = get_stix_indicator_timestamp(indicator) - yield from cybox_parse_observable( - indicator.observable, indicator, timestamp, score) - - -def parse_stix_observables(stix_package, default_score): - reports = [] - if not stix_package.observables: - return reports - - timestamp = get_stix_package_timestamp(stix_package) - for observable in stix_package.observables: - if not observable: - continue - yield from cybox_parse_observable( # single element list - observable, None, timestamp, default_score) - - -def sanitize_stix(stix_xml): - ret_xml = b'' - try: - xml_root = etree.fromstring(stix_xml) - content = xml_root.find( - './/{http://taxii.mitre.org/messages/taxii_xml_binding-1.1}Content') - if content is not None and len(content) == 0 and len(list(content)) == 0: - # Content has no children. - # So lets make sure we parse the xml text for content and - # re-add it as valid XML so we can parse - _content = xml_root.find( - "{http://taxii.mitre.org/messages/taxii_xml_binding-1.1}Content_Block/{http://taxii.mitre.org/messages/taxii_xml_binding-1.1}Content") - if _content: - new_stix_package = etree.fromstring(_content.text) - content.append(new_stix_package) - ret_xml = etree.tostring(xml_root) - except etree.ParseError as e: - logger.warning("Problem parsing stix: {}".format(e)) - return ret_xml - - -def parse_stix(stix_xml, default_score): - reports = [] - try: - stix_xml = sanitize_stix(stix_xml) - bio = BytesIO(stix_xml) - stix_package = STIXPackage.from_xml(bio) - if not stix_package: - logger.warning("Could not parse STIX xml") - return reports - if not stix_package.indicators and not stix_package.observables: - logger.info("No indicators or observables found in stix_xml") - return reports - yield from parse_stix_indicators(stix_package, default_score) - yield from parse_stix_observables(stix_package, default_score) - except etree.XMLSyntaxError as e: - logger.warning("Problem parsing stix: {}".format(e)) - return reports diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/stix_taxii.py b/examples/DEPRECATED_threathunter/threat_intelligence/stix_taxii.py deleted file mode 100644 index 95358071..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/stix_taxii.py +++ /dev/null @@ -1,392 +0,0 @@ -"""Connects to TAXII servers via cabby and formats the data received for dispatching to a Carbon Black feed.""" - -import argparse -import logging -import traceback -from threatintel import ThreatIntel -from cabby.exceptions import NoURIProvidedError, ClientException -from requests.exceptions import ConnectionError -from cbapi.errors import ApiError -from cabby import create_client -from dataclasses import dataclass -import yaml -import os -from stix_parse import parse_stix, BINDING_CHOICES -from feed_helper import FeedHelper -from datetime import datetime -from results import AnalysisResult -from cbapi.psc.threathunter.models import Feed -import urllib3 -import copy - -# logging.basicConfig(filename='stix.log', filemode='w', level=logging.DEBUG) -logging.basicConfig(filename='stix.log', filemode='w', format='%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', - datefmt='%Y-%m-%d:%H:%M:%S', - level=logging.INFO) -handled_exceptions = (NoURIProvidedError, ClientException, ConnectionError) - - -def load_config_from_file(): - """Loads YAML formatted configuration from config.yml in working directory.""" - - logging.debug("loading config from file") - config_filename = os.path.join(os.path.dirname((os.path.abspath(__file__))), "config.yml") - with open(config_filename, "r") as config_file: - config_data = yaml.load(config_file, Loader=yaml.SafeLoader) - config_data_without_none_vals = copy.deepcopy(config_data) - for site_name, site_config_dict in config_data['sites'].items(): - for conf_key, conf_value in site_config_dict.items(): - if conf_value is None: - del config_data_without_none_vals['sites'][site_name][conf_key] - logging.info(f"loaded config data: {config_data_without_none_vals}") - return config_data_without_none_vals - - -@dataclass(eq=True, frozen=True) -class TaxiiSiteConfig: - """Contains information needed to interface with a TAXII server. - - These values are loaded in from config.yml for each entry in the configuration file. - Each TaxiiSiteConnector has its own TaxiiSiteConfig. - """ - - feed_id: str = '' - site: str = '' - discovery_path: str = '' - collection_management_path: str = '' - poll_path: str = '' - use_https: bool = True - ssl_verify: bool = True - cert_file: str = None - key_file: str = None - default_score: int = 5 # [1,10] - username: str = None - password: str = None - collections: str = '*' - start_date: str = None - size_of_request_in_minutes: int = 1440 - ca_cert: str = None - http_proxy_url: str = None - https_proxy_url: str = None - reports_limit: int = None - fail_limit: int = 10 # num attempts per collection for polling & parsing - - -class TaxiiSiteConnector(): - """Connects to and pulls data from a TAXII server.""" - - def __init__(self, site_conf): - self.config = TaxiiSiteConfig(**site_conf) - self.client = None - - def create_taxii_client(self): - """Connects to a TAXII server using cabby and configuration entries.""" - - conf = self.config - if not conf.start_date: - logging.error(f"A start_date is required for site {conf.site}. Exiting.") - return - if not conf.ssl_verify: - urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - try: - client = create_client(conf.site, - use_https=conf.use_https, - discovery_path=conf.discovery_path) - client.set_auth(username=conf.username, - password=conf.password, - verify_ssl=conf.ssl_verify, - ca_cert=conf.ca_cert, - cert_file=conf.cert_file, - key_file=conf.key_file) - - proxy_dict = dict() - if conf.http_proxy_url: - proxy_dict['http'] = conf.http_proxy_url - if conf.https_proxy_url: - proxy_dict['https'] = conf.https_proxy_url - if proxy_dict: - client.set_proxies(proxy_dict) - - self.client = client - - except handled_exceptions as e: - logging.error(f"Error creating client: {e}") - - def create_uri(self, config_path): - """Formats a URI for discovery, collection, or polling of a TAXII server. - - Args: - config_path: A URI path to a TAXII server's discovery, collection, or polling service. Defined in config.yml configuration file. - - Returns: - A full URI to one of a TAXII server's service paths. - """ - - uri = None - if self.config.site and config_path: - if self.config.use_https: - uri = 'https://' - else: - uri = 'http://' - uri = uri + self.config.site + config_path - return uri - - def query_collections(self): - """Returns a list of STIX collections available to the user to poll.""" - - collections = [] - try: - uri = self.create_uri(self.config.collection_management_path) - collections = self.client.get_collections( - uri=uri) # autodetect if uri=None - for collection in collections: - logging.info(f"Collection: {collection.name}, {collection.type}") - except handled_exceptions as e: - logging.warning(f"Problem fetching collections from TAXII server. Check your TAXII Provider URL and username/password (if required to access TAXII server): {e}") - return collections - - def poll_server(self, collection, feed_helper): - """Returns a STIX content block for a specific TAXII collection. - - Args: - collection: Name of a TAXII collection to poll. - feed_helper: FeedHelper object. - """ - - content_blocks = [] - uri = self.create_uri(self.config.poll_path) - try: - logging.info(f"Polling Collection: {collection.name}") - content_blocks = self.client.poll( - uri=uri, - collection_name=collection.name, - begin_date=feed_helper.start_date, - end_date=feed_helper.end_date, - content_bindings=BINDING_CHOICES) - except handled_exceptions as e: - logging.warning(f"problem polling taxii server: {e}") - return content_blocks - - def parse_collection_content(self, content_blocks): - """Yields a formatted report dictionary for each STIX content_block. - - Args: - content_block: A chunk of STIX data from the TAXII collection being polled. - """ - - for block in content_blocks: - yield from parse_stix(block.content, self.config.default_score) - - def import_collection(self, collection): - """Polls a single TAXII server collection. - - Starting at the start_date set in config.yml, a FeedHelper object will continue to grab chunks - of data from a collection until the report limit is reached or we reach the current datetime. - - Args: - collection: Name of a TAXII collection to poll. - - Yields: - Formatted report dictionaries from parse_collection_content(content_blocks) - for each content_block pulled from a single TAXII collection. - """ - - num_times_empty_content_blocks = 0 - advance = True - reports_limit = self.config.reports_limit - if not self.config.size_of_request_in_minutes: - size_of_request_in_minutes = 1440 - else: - size_of_request_in_minutes = self.config.size_of_request_in_minutes - feed_helper = FeedHelper(self.config.start_date, - size_of_request_in_minutes) - # config parameters `start_date` and `size_of_request_in_minutes` tell this Feed Helper - # where to start polling in the collection, and then will advance polling in chunks of - # `size_of_request_in_minutes` until we hit the most current `content_block`, - # or reports_limit is reached. - while feed_helper.advance(): - num_reports = 0 - num_times_empty_content_blocks = 0 - content_blocks = self.poll_server(collection, feed_helper) - reports = self.parse_collection_content(content_blocks) - for report in reports: - yield report - num_reports += 1 - if reports_limit is not None and num_reports >= reports_limit: - logging.info(f"Reports limit of {self.config.reports_limit} reached") - advance = False - break - - if not advance: - break - if collection.type == 'DATA_SET': # data is unordered, not a feed - logging.info(f"collection:{collection}; type data_set; breaking") - break - if num_reports == 0: - num_times_empty_content_blocks += 1 - if num_times_empty_content_blocks > self.config.fail_limit: - logging.error('Max fail limit reached; Exiting.') - break - if reports_limit is not None: - reports_limit -= num_reports - - def import_collections(self, available_collections): - """Polls each desired collection specified in config.yml. - - Args: - available_collections: list of collections available to a TAXII server user. - - Yields: - From import_collection(self, collection) for each desired collection. - """ - - if not self.config.collections: - desired_collections = '*' - else: - desired_collections = self.config.collections - - desired_collections = [x.strip() - for x in desired_collections.lower().split(',')] - - want_all = True if '*' in desired_collections else False - - for collection in available_collections: - if collection.type != 'DATA_FEED' and collection.type != 'DATA_SET': - logging.debug(f"collection:{collection}; type not feed or data") - continue - if not collection.available: - logging.debug(f"collection:{collection} not available") - continue - if want_all or collection.name.lower() in desired_collections: - yield from self.import_collection(collection) - - def generate_reports(self): - """Returns a list of report dictionaries for each desired collection specified in config.yml.""" - - reports = [] - - self.create_taxii_client() - if not self.client: - logging.error('Unable to create taxii client.') - return reports - - available_collections = self.query_collections() - if not available_collections: - logging.warning('Unable to find any collections.') - return reports - - reports = self.import_collections(available_collections) - if not reports: - logging.warning('Unable to import collections.') - return reports - - return reports - - -class StixTaxii(): - """Allows for interfacing with multiple TAXII servers. - - Instantiates separate TaxiiSiteConnector objects for each site specified in config.yml. - Formats report dictionaries into AnalysisResult objects with formatted IOC_v2 attirbutes. - Sends AnalysisResult objects to ThreatIntel.push_to_cb for dispatching to a feed. - """ - - def __init__(self, site_confs): - self.config = site_confs - self.client = None - - def result(self, **kwargs): - """Returns a new AnalysisResult with the given fields populated.""" - - result = AnalysisResult(**kwargs).normalize() - return result - - def configure_sites(self): - """Creates a TaxiiSiteConnector for each site in config.yml""" - - self.sites = {} - try: - for site_name, site_conf in self.config['sites'].items(): - self.sites[site_name] = TaxiiSiteConnector(site_conf) - logging.info(f"loaded site {site_name}") - except handled_exceptions as e: - - logging.error(f"Error in parsing config file: {e}") - - def format_report(self, reports): - """Converts a dictionary into an AnalysisResult. - - Args: - reports: list of report dictionaries containing an id, title, description, timestamp, score, link, and iocs_v2. - - Yields: - An AnalysisResult for each report dictionary. - """ - - for report in reports: - try: - analysis_name = report['id'] - title = report['title'] - description = report['description'] - scan_time = datetime.fromtimestamp(report['timestamp']) - score = report['score'] - link = report['link'] - ioc_dict = report['iocs_v2'] - result = self.result( - analysis_name=analysis_name, - scan_time=scan_time, - score=score, - title=title, - description=description) - for ioc_key, ioc_val in ioc_dict.items(): - result.attach_ioc_v2(values=ioc_val, field=ioc_key, link=link) - except handled_exceptions as e: - logging.warning(f"Problem in report formatting: {e}") - result = self.result( - analysis_name="exception_format_report", error=True) - yield result - - def collect_and_send_reports(self): - """Collects and sends formatted reports to ThreatIntel.push_to_cb for validation and dispatching to a feed.""" - - self.configure_sites() - ti = ThreatIntel() - for site_name, site_conn in self.sites.items(): - logging.debug(f"Verifying Feed {site_conn.config.feed_id} exists") - try: - ti.verify_feed_exists(site_conn.config.feed_id) - except ApiError as e: - logging.error(f"Couldn't find CbTH Feed {site_conn.config.feed_id}. Skipping {site_name}: {e}") - continue - logging.info(f"Talking to {site_name} server") - reports = site_conn.generate_reports() - if not reports: - logging.error(f"No reports generated for {site_name}") - continue - else: - try: - ti.push_to_cb(feed_id=site_conn.config.feed_id, results=self.format_report(reports)) - except Exception as e: - logging.error(traceback.format_exc()) - logging.error(f"Failed to push reports to feed {site_conn.config.feed_id}: {e}") -if __name__ == '__main__': - - parser = argparse.ArgumentParser(description='Modify configuration values via command line.') - parser.add_argument('--site_start_date', metavar='s', nargs='+', - help='the site name and desired start date to begin polling from') - args = parser.parse_args() - - config = load_config_from_file() - - if args.site_start_date: - for index in range(len(args.site_start_date)): - arg = args.site_start_date[index] - if arg in config['sites']: # if we see a name that matches a site Name - try: - new_time = datetime.strptime(args.site_start_date[index+1], "%Y-%m-%d %H:%M:%S") - config['sites'][arg]['start_date'] = new_time - logging.info(f"Updated the start_date for {arg} to {new_time}") - except ValueError as e: - logging.error(f"Failed to update start_date for {arg}: {e}") - stix_taxii = StixTaxii(config) - stix_taxii.collect_and_send_reports() diff --git a/examples/DEPRECATED_threathunter/threat_intelligence/threatintel.py b/examples/DEPRECATED_threathunter/threat_intelligence/threatintel.py deleted file mode 100644 index c00672f9..00000000 --- a/examples/DEPRECATED_threathunter/threat_intelligence/threatintel.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Validates result dictionaries, creates ThreatHunter Reports, validates ThreatHunter Reports, and sends them to a ThreatHunter Feed. - -Also allows for conversion from result dictionaries into ThreatHunter `Report` objects. -""" - -import logging -import json -from cbapi.psc.threathunter import CbThreatHunterAPI, Report -from cbapi.errors import ApiError -from cbapi.psc.threathunter.models import Feed -from schemas import ReportSchema -from schema import SchemaError - -log = logging.getLogger(__name__) - - -class ThreatIntel: - def __init__(self): - self.cb = CbThreatHunterAPI(timeout=200) - - def verify_feed_exists(self, feed_id): - """Verify that a Feed exists.""" - try: - feed = self.cb.select(Feed, feed_id) - return feed - except ApiError: - raise ApiError - - def push_to_cb(self, feed_id, results): - feed = self.verify_feed_exists(feed_id) # will raise an ApiError if the feed cannot be found - if not feed: - return - report_list_to_send = [] - reports = [] - malformed_reports = [] - - for result in results: - try: - report_dict = { - "id": str(result.id), - "timestamp": int(result.timestamp.timestamp()), - "title": str(result.title), - "description": str(result.description), - "severity": int(result.severity), - "iocs_v2": [ioc_v2.as_dict() for ioc_v2 in result.iocs_v2] - } - try: - ReportSchema.validate(report_dict) - # create CB Report object - report = Report(self.cb, initial_data=report_dict, feed_id=feed_id) - report_list_to_send.append(report) - reports.append(report_dict) - except SchemaError as e: - log.warning("Report Validation failed. Saving report to file for reference.") - malformed_reports.append(report_dict) - except Exception as e: - log.error(f"Failed to create a report dictionary from result object. {e}") - - log.debug(f"Num Reports: {len(report_list_to_send)}") - try: - with open('reports.json', 'w') as f: - json.dump(reports, f, indent=4) - except Exception as e: - log.error(f"Failed to write reports to file: {e}") - - log.debug("Sending results to Carbon Black Cloud.") - - if report_list_to_send: - try: - feed.append_reports(report_list_to_send) - log.info(f"Appended {len(report_list_to_send)} reports to ThreatHunter Feed {feed_id}") - except Exception as e: - log.debug(f"Failed sending {len(report_list_to_send)} reports: {e}") - - if malformed_reports: - log.warning("Some report(s) failed validation. See malformed_reports.json for reports that failed.") - try: - with open('malformed_reports.json', 'w') as f: - json.dump(malformed_reports, f, indent=4) - except Exception as e: - log.error(f"Failed to write malformed_reports to file: {e}") - diff --git a/examples/DEPRECATED_threathunter/watchlist_operations.py b/examples/DEPRECATED_threathunter/watchlist_operations.py deleted file mode 100644 index 096690f1..00000000 --- a/examples/DEPRECATED_threathunter/watchlist_operations.py +++ /dev/null @@ -1,327 +0,0 @@ -#!/usr/bin/env python -# - -import sys -from cbapi.psc.threathunter.models import Watchlist, Report, Feed -from cbapi.example_helpers import eprint, read_iocs, build_cli_parser, get_cb_threathunter_object -from cbapi.errors import ObjectNotFoundError -import logging -import json -import time -import hashlib - -log = logging.getLogger(__name__) - - -def get_watchlist(cb, watchlist_id=None, watchlist_name=None): - if watchlist_id: - return cb.select(Watchlist, watchlist_id) - elif watchlist_name: - feeds = [feed for feed in cb.select(Watchlist) if feed.name == watchlist_name] - - if not feeds: - eprint("No watchlist named {}".format(watchlist_name)) - sys.exit(1) - elif len(feeds) > 1: - eprint("More than one feed named {}, not continuing".format(watchlist_name)) - sys.exit(1) - - return feeds[0] - else: - raise ValueError("expected either watchlist_id or watchlist_name") - - -def get_report(watchlist, report_id=None, report_name=None): - if report_id: - reports = [report for report in watchlist.reports if report.id == report_id] - elif report_name: - reports = [report for report in watchlist.reports if report.title == report_name] - else: - raise ValueError("expected either report_id or report_name") - - if not reports: - eprint("No matching reports found.") - sys.exit(1) - if len(reports) > 1: - eprint("More than one matching report found.") - sys.exit(1) - - return reports[0] - - -def get_report_feed(watchlist, report_id=None, report_name=None): - reports = watchlist.feed.reports - - if report_id: - reports = [report for report in reports if report.id == report_id] - elif report_name: - reports = [report for report in reports if report.title == report_name] - else: - raise ValueError("expected either report_id or report_name") - - if not reports: - eprint("No matching reports found.") - sys.exit(1) - if len(reports) > 1: - eprint("More than one matching report found.") - sys.exit(1) - - return reports[0] - - -def list_watchlists(cb, parser, args): - watchlists = cb.select(Watchlist) - - for watchlist in watchlists: - print(watchlist) - if args.reports: - for report in watchlist.reports: - print(report) - if watchlist.feed: - for report in watchlist.feed.reports: - print(report) - - -def subscribe_watchlist(cb, parser, args): - try: - cb.select(Feed, args.feed_id) - except ObjectNotFoundError: - eprint("Nonexistent or private feed: {}".format(args.feed_id)) - sys.exit(1) - - classifier = { - "key": "feed_id", - "value": args.feed_id, - } - - watchlist_dict = { - "name": args.watchlist_name, - "description": args.description, - "tags_enabled": args.tags, - "alerts_enabled": args.alerts, - "create_timestamp": args.timestamp, - "last_update_timestamp": args.last_update, - "report_ids": [], - "classifier": classifier, - } - - watchlist = cb.create(Watchlist, watchlist_dict) - watchlist.save() - - -def create_watchlist(cb, parser, args): - watchlist_dict = { - "name": args.watchlist_name, - "description": args.description, - "tags_enabled": args.tags, - "alerts_enabled": args.alerts, - "create_timestamp": args.timestamp, - "last_update_timestamp": args.last_update, - "report_ids": [], - "classifier": None, - } - - rep_tags = [] - if args.rep_tags: - rep_tags = args.rep_tags.split(",") - - report_dict = { - "timestamp": args.rep_timestamp, - "title": args.rep_title, - "description": args.rep_desc, - "severity": args.rep_severity, - "link": args.rep_link, - "tags": rep_tags, - "iocs_v2": [], # NOTE(ww): The feed server will convert IOCs to v2s for us. - } - - report_id, iocs = read_iocs(cb) - - report_dict["id"] = report_id - report_dict["iocs"] = iocs - - report = cb.create(Report, report_dict) - report.save_watchlist() - - watchlist_dict["report_ids"].append(report.id) - watchlist = cb.create(Watchlist, watchlist_dict) - watchlist.save() - - -def delete_watchlist(cb, parser, args): - watchlist = get_watchlist(cb, watchlist_id=args.watchlist_id, watchlist_name=args.watchlist_name) - - if args.reports: - [report.delete() for report in watchlist.reports] - - watchlist.delete() - - -def alter_report(cb, parser, args): - watchlist = get_watchlist(cb, watchlist_id=args.watchlist_id) - - if watchlist.reports: - report = get_report(watchlist, report_id=args.report_id) - else: - report = get_report_feed(watchlist, report_id=args.report_id) - - if args.severity: - if watchlist.reports: - report.update(severity=args.severity) - else: - report.custom_severity = args.severity - - if args.activate: - report.unignore() - elif args.deactivate: - report.ignore() - - -def alter_ioc(cb, parser, args): - watchlist = get_watchlist(cb, watchlist_id=args.watchlist_id) - report = get_report(watchlist, report_id=args.report_id) - - iocs = [ioc for ioc in report.iocs_ if ioc.id == args.ioc_id] - - if not iocs: - eprint("No IOC with ID {} found.".format(args.ioc_id)) - sys.exit(1) - elif len(iocs) > 1: - eprint("More than one IOC with ID {} found.".format(args.ioc_id)) - sys.exit(1) - - if args.activate: - iocs[0].unignore() - elif args.deactivate: - iocs[0].ignore() - - -def export_watchlist(cb, parser, args): - watchlist = get_watchlist(cb, watchlist_id=args.watchlist_id, watchlist_name=args.watchlist_name) - exported = { - 'watchlist': watchlist._info, - } - - exported['reports'] = [report._info for report in watchlist.reports] - - print(json.dumps(exported)) - - -def import_watchlist(cb, parser, args): - imported = json.loads(sys.stdin.read()) - - # clear any report IDs, since we'll regenerate them - imported["watchlist"]["report_ids"].clear() - - watchlist = cb.create(Watchlist, imported['watchlist']) - watchlist.save() - - # import each report and extract its new ID - report_ids = [] - for rep_dict in imported["reports"]: - - # NOTE(ww): Previous versions of the CbTH Watchlist API weren't - # generating IOC IDs on the server side. If they don't show up - # in our import, generate them manually. - for ioc in rep_dict["iocs_v2"]: - if not ioc["id"]: - ioc_id = hashlib.md5() - ioc_id.update(str(time.time()).encode("utf-8")) - [ioc_id.update(value.encode("utf-8")) for value in ioc["values"]] - ioc["id"] = ioc_id.hexdigest() - report = cb.create(Report, rep_dict) - report.save_watchlist() - report_ids.append(report.id) - - # finally, update our new watchlist with the imported reports - if report_ids: - watchlist.update(report_ids=report_ids) - - -def main(): - parser = build_cli_parser() - commands = parser.add_subparsers(help="Feed commands", dest="command_name") - - list_command = commands.add_parser("list", help="List all configured watchlists") - list_command.add_argument("-r", "--reports", action="store_true", help="List reports for each watchlist", - default=False) - - subscribe_command = commands.add_parser("subscribe", help="Create a watchlist with a feed") - subscribe_command.add_argument("-i", "--feed_id", type=str, help="The Feed ID", required=True) - subscribe_command.add_argument("-w", "--watchlist_name", type=str, help="Watchlist name", required=True) - subscribe_command.add_argument("-d", "--description", type=str, help="Watchlist description", required=True) - subscribe_command.add_argument("-t", "--tags", action="store_true", help="Enable tags", default=False) - subscribe_command.add_argument("-a", "--alerts", action="store_true", help="Enable alerts", default=False) - subscribe_command.add_argument("-T", "--timestamp", type=int, help="Creation timestamp", default=int(time.time())) - subscribe_command.add_argument("-U", "--last_update", type=int, help="Last update timestamp", - default=int(time.time())) - - create_command = commands.add_parser("create", help="Create a watchlist with a report") - create_command.add_argument("-w", "--watchlist_name", type=str, help="Watchlist name", required=True) - create_command.add_argument("-d", "--description", type=str, help="Watchlist description", required=True) - create_command.add_argument("-t", "--tags", action="store_true", help="Enable tags", default=False) - create_command.add_argument("-a", "--alerts", action="store_true", help="Enable alerts", default=False) - create_command.add_argument("-T", "--timestamp", type=int, help="Creation timestamp", default=int(time.time())) - create_command.add_argument("-U", "--last_update", type=int, help="Last update timestamp", default=int(time.time())) - # Report metadata arguments. - create_command.add_argument("--rep_timestamp", type=int, help="Report timestamp", default=int(time.time())) - create_command.add_argument("--rep_title", type=str, help="Report title", required=True) - create_command.add_argument("--rep_desc", type=str, help="Report description", required=True) - create_command.add_argument("--rep_severity", type=int, help="Report severity", default=1) - create_command.add_argument("--rep_link", type=str, help="Report link") - create_command.add_argument("--rep_tags", type=str, help="Report tags, comma separated") - create_command.add_argument("--rep_visibility", type=str, help="Report visibility") - - delete_command = commands.add_parser("delete", help="Delete a watchlist") - delete_command.add_argument("-R", "--reports", action="store_true", help="Delete all associated reports too", - default=False) - specifier = delete_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--watchlist_id", type=str, help="The watchlist ID") - specifier.add_argument("-w", "--watchlist_name", type=str, help="The watchlist name") - - alter_report_command = commands.add_parser("alter-report", help="Change the properties of a watchlist's report") - alter_report_command.add_argument("-i", "--watchlist_id", type=str, help="Watchlist ID", required=True) - alter_report_command.add_argument("-r", "--report_id", type=str, help="Report ID", required=True) - alter_report_command.add_argument("-s", "--severity", type=int, help="The report's severity", required=False) - specifier = alter_report_command.add_mutually_exclusive_group(required=False) - specifier.add_argument("-d", "--deactivate", action="store_true", help="Deactive alerts for this report") - specifier.add_argument("-a", "--activate", action="store_true", help="Activate alerts for this report") - - alter_ioc_command = commands.add_parser("alter-ioc", help="Change the properties of a watchlist's IOC") - alter_ioc_command.add_argument("-i", "--watchlist_id", type=str, help="Watchlist ID", required=True) - alter_ioc_command.add_argument("-r", "--report_id", type=str, help="Report ID", required=True) - alter_ioc_command.add_argument("-I", "--ioc_id", type=str, help="IOC ID", required=True) - specifier = alter_ioc_command.add_mutually_exclusive_group(required=False) - specifier.add_argument("-d", "--deactivate", action="store_true", help="Deactive alerts for this IOC") - specifier.add_argument("-a", "--activate", action="store_true", help="Activate alerts for this IOC") - - export_command = commands.add_parser("export", help="Export a watchlist into an importable format") - specifier = export_command.add_mutually_exclusive_group(required=True) - specifier.add_argument("-i", "--watchlist_id", type=str, help="Watchlist ID") - specifier.add_argument("-w", "--watchlist_name", type=str, help="Watchlist name") - - commands.add_parser("import", help="Import a previously exported watchlist") - - args = parser.parse_args() - cb = get_cb_threathunter_object(args) - - if args.command_name == "list": - return list_watchlists(cb, parser, args) - elif args.command_name == "subscribe": - return subscribe_watchlist(cb, parser, args) - elif args.command_name == "create": - return create_watchlist(cb, parser, args) - elif args.command_name == "delete": - return delete_watchlist(cb, parser, args) - elif args.command_name == "alter-report": - return alter_report(cb, parser, args) - elif args.command_name == "alter-ioc": - return alter_ioc(cb, parser, args) - elif args.command_name == "export": - return export_watchlist(cb, parser, args) - elif args.command_name == "import": - return import_watchlist(cb, parser, args) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/setup.py b/setup.py index e8eb37dc..50703cc8 100644 --- a/setup.py +++ b/setup.py @@ -9,11 +9,7 @@ 'cbapi', 'cbapi.protection', 'cbapi.response', - 'cbapi.cache', - 'cbapi.psc', - 'cbapi.psc.defense', - 'cbapi.psc.threathunter', - 'cbapi.psc.livequery' + 'cbapi.cache' ] install_requires = [ @@ -61,5 +57,5 @@ 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules' ], - scripts=['bin/cbapi-response', 'bin/cbapi-protection', 'bin/cbapi-defense', 'bin/cbapi', 'bin/cbapi-psc'] + scripts=['bin/cbapi-response', 'bin/cbapi-protection', 'bin/cbapi'] ) diff --git a/src/cbapi/__init__.py b/src/cbapi/__init__.py index f4acafd8..163499f0 100644 --- a/src/cbapi/__init__.py +++ b/src/cbapi/__init__.py @@ -12,9 +12,4 @@ from cbapi.response.rest_api import CbEnterpriseResponseAPI, CbResponseAPI from cbapi.protection.rest_api import CbEnterpriseProtectionAPI, CbProtectionAPI from cbapi.psc import CbPSCBaseAPI -from cbapi.psc.defense import CbDefenseAPI -from cbapi.psc.threathunter import CbThreatHunterAPI from cbapi.psc.livequery import CbLiveQueryAPI - -# for compatibility with Cb Defense code from cbapi < 1.4.0 -import cbapi.psc.defense as defense diff --git a/src/cbapi/defense.py b/src/cbapi/defense.py deleted file mode 100644 index e9a15cb6..00000000 --- a/src/cbapi/defense.py +++ /dev/null @@ -1,2 +0,0 @@ -# Compatibility with old Defense API code -from cbapi.psc.defense import * # noqa: F401, F403 diff --git a/src/cbapi/example_helpers.py b/src/cbapi/example_helpers.py index a237e35e..e0ee2731 100644 --- a/src/cbapi/example_helpers.py +++ b/src/cbapi/example_helpers.py @@ -16,8 +16,6 @@ from cbapi.protection import CbEnterpriseProtectionAPI from cbapi.psc import CbPSCBaseAPI -from cbapi.psc.defense import CbDefenseAPI -from cbapi.psc.threathunter import CbThreatHunterAPI from cbapi.psc.livequery import CbLiveQueryAPI from cbapi.response import CbEnterpriseResponseAPI @@ -94,34 +92,6 @@ def get_cb_psc_object(args): return cb -def get_cb_defense_object(args): - if args.verbose: - logging.basicConfig() - logging.getLogger("cbapi").setLevel(logging.DEBUG) - logging.getLogger("__main__").setLevel(logging.DEBUG) - - if args.cburl and args.apitoken: - cb = CbDefenseAPI(url=args.cburl, token=args.apitoken, ssl_verify=(not args.no_ssl_verify)) - else: - cb = CbDefenseAPI(profile=args.profile) - - return cb - - -def get_cb_threathunter_object(args): - if args.verbose: - logging.basicConfig() - logging.getLogger("cbapi").setLevel(logging.DEBUG) - logging.getLogger("__main__").setLevel(logging.DEBUG) - - if args.cburl and args.apitoken: - cb = CbThreatHunterAPI(url=args.cburl, token=args.apitoken, ssl_verify=(not args.no_ssl_verify)) - else: - cb = CbThreatHunterAPI(profile=args.profile) - - return cb - - def get_cb_livequery_object(args): if args.verbose: logging.basicConfig() diff --git a/src/cbapi/psc/__init__.py b/src/cbapi/psc/__init__.py index 3f3be5f0..239b1eb6 100644 --- a/src/cbapi/psc/__init__.py +++ b/src/cbapi/psc/__init__.py @@ -3,4 +3,4 @@ from __future__ import absolute_import from .rest_api import CbPSCBaseAPI -from .models import Device, Workflow, BaseAlert, WatchlistAlert, CBAnalyticsAlert, VMwareAlert, WorkflowStatus +from .models import Device diff --git a/src/cbapi/psc/alerts_query.py b/src/cbapi/psc/alerts_query.py deleted file mode 100755 index 6a775c6d..00000000 --- a/src/cbapi/psc/alerts_query.py +++ /dev/null @@ -1,704 +0,0 @@ -from cbapi.errors import ApiError -from .base_query import PSCQueryBase, QueryBuilder, QueryBuilderSupportMixin, IterableQueryMixin -from .devices_query import DeviceSearchQuery - - -class BaseAlertSearchQuery(PSCQueryBase, QueryBuilderSupportMixin, IterableQueryMixin): - """ - Represents a query that is used to locate BaseAlert objects. - """ - VALID_CATEGORIES = ["THREAT", "MONITORED", "INFO", "MINOR", "SERIOUS", "CRITICAL"] - VALID_REPUTATIONS = ["KNOWN_MALWARE", "SUSPECT_MALWARE", "PUP", "NOT_LISTED", "ADAPTIVE_WHITE_LIST", - "COMMON_WHITE_LIST", "TRUSTED_WHITE_LIST", "COMPANY_BLACK_LIST"] - VALID_ALERT_TYPES = ["CB_ANALYTICS", "VMWARE", "WATCHLIST"] - VALID_WORKFLOW_VALS = ["OPEN", "DISMISSED"] - VALID_FACET_FIELDS = ["ALERT_TYPE", "CATEGORY", "REPUTATION", "WORKFLOW", "TAG", "POLICY_ID", - "POLICY_NAME", "DEVICE_ID", "DEVICE_NAME", "APPLICATION_HASH", - "APPLICATION_NAME", "STATUS", "RUN_STATE", "POLICY_APPLIED_STATE", - "POLICY_APPLIED", "SENSOR_ACTION"] - - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._query_builder = QueryBuilder() - self._criteria = {} - self._time_filter = {} - self._sortcriteria = {} - self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/workflow/_criteria" - self._count_valid = False - self._total_results = 0 - - def _update_criteria(self, key, newlist): - """ - Updates the criteria being collected for a query. Assumes the specified criteria item is - defined as a list; the list passed in will be set as the value for this criteria item, or - appended to the existing one if there is one. - - :param str key: The key for the criteria item to be set - :param list newlist: List of values to be set for the criteria item - """ - oldlist = self._criteria.get(key, []) - self._criteria[key] = oldlist + newlist - - def set_categories(self, categories): - """ - Restricts the alerts that this query is performed on to the specified categories. - - :param categories list: List of categories to be restricted to. Valid categories are - "THREAT", "MONITORED", "INFO", "MINOR", "SERIOUS", and "CRITICAL." - :return: This instance - """ - if not all((c in BaseAlertSearchQuery.VALID_CATEGORIES) for c in categories): - raise ApiError("One or more invalid category values") - self._update_criteria("category", categories) - return self - - def set_create_time(self, *args, **kwargs): - """ - Restricts the alerts that this query is performed on to the specified - creation time (either specified as a start and end point or as a - range). - - :return: This instance - """ - if kwargs.get("start", None) and kwargs.get("end", None): - if kwargs.get("range", None): - raise ApiError("cannot specify range= in addition to start= and end=") - stime = kwargs["start"] - if not isinstance(stime, str): - stime = stime.isoformat() - etime = kwargs["end"] - if not isinstance(etime, str): - etime = etime.isoformat() - self._time_filter = {"start": stime, "end": etime} - elif kwargs.get("range", None): - if kwargs.get("start", None) or kwargs.get("end", None): - raise ApiError("cannot specify start= or end= in addition to range=") - self._time_filter = {"range": kwargs["range"]} - else: - raise ApiError("must specify either start= and end= or range=") - return self - - def set_device_ids(self, device_ids): - """ - Restricts the alerts that this query is performed on to the specified - device IDs. - - :param device_ids list: list of integer device IDs - :return: This instance - """ - if not all(isinstance(device_id, int) for device_id in device_ids): - raise ApiError("One or more invalid device IDs") - self._update_criteria("device_id", device_ids) - return self - - def set_device_names(self, device_names): - """ - Restricts the alerts that this query is performed on to the specified - device names. - - :param device_names list: list of string device names - :return: This instance - """ - if not all(isinstance(n, str) for n in device_names): - raise ApiError("One or more invalid device names") - self._update_criteria("device_name", device_names) - return self - - def set_device_os(self, device_os): - """ - Restricts the alerts that this query is performed on to the specified - device operating systems. - - :param device_os list: List of string operating systems. Valid values are - "WINDOWS", "ANDROID", "MAC", "IOS", "LINUX", and "OTHER." - :return: This instance - """ - if not all((osval in DeviceSearchQuery.VALID_OS) for osval in device_os): - raise ApiError("One or more invalid operating systems") - self._update_criteria("device_os", device_os) - return self - - def set_device_os_versions(self, device_os_versions): - """ - Restricts the alerts that this query is performed on to the specified - device operating system versions. - - :param device_os_versions list: List of string operating system versions. - :return: This instance - """ - if not all(isinstance(n, str) for n in device_os_versions): - raise ApiError("One or more invalid device OS versions") - self._update_criteria("device_os_version", device_os_versions) - return self - - def set_device_username(self, users): - """ - Restricts the alerts that this query is performed on to the specified - user names. - - :param users list: List of string user names. - :return: This instance - """ - if not all(isinstance(u, str) for u in users): - raise ApiError("One or more invalid user names") - self._update_criteria("device_username", users) - return self - - def set_group_results(self, do_group): - """ - Specifies whether or not to group the results of the query. - - :param do_group boolean: True to group the results, False to not do so. - :return: This instance - """ - self._criteria["group_results"] = True if do_group else False - return self - - def set_alert_ids(self, alert_ids): - """ - Restricts the alerts that this query is performed on to the specified - alert IDs. - - :param alert_ids list: List of string alert IDs. - :return: This instance - """ - if not all(isinstance(v, str) for v in alert_ids): - raise ApiError("One or more invalid alert ID values") - self._update_criteria("id", alert_ids) - return self - - def set_legacy_alert_ids(self, alert_ids): - """ - Restricts the alerts that this query is performed on to the specified - legacy alert IDs. - - :param alert_ids list: List of string legacy alert IDs. - :return: This instance - """ - if not all(isinstance(v, str) for v in alert_ids): - raise ApiError("One or more invalid alert ID values") - self._update_criteria("legacy_alert_id", alert_ids) - return self - - def set_minimum_severity(self, severity): - """ - Restricts the alerts that this query is performed on to the specified - minimum severity level. - - :param severity int: The minimum severity level for alerts. - :return: This instance - """ - self._criteria["minimum_severity"] = severity - return self - - def set_policy_ids(self, policy_ids): - """ - Restricts the alerts that this query is performed on to the specified - policy IDs. - - :param policy_ids list: list of integer policy IDs - :return: This instance - """ - if not all(isinstance(policy_id, int) for policy_id in policy_ids): - raise ApiError("One or more invalid policy IDs") - self._update_criteria("policy_id", policy_ids) - return self - - def set_policy_names(self, policy_names): - """ - Restricts the alerts that this query is performed on to the specified - policy names. - - :param policy_names list: list of string policy names - :return: This instance - """ - if not all(isinstance(n, str) for n in policy_names): - raise ApiError("One or more invalid policy names") - self._update_criteria("policy_name", policy_names) - return self - - def set_process_names(self, process_names): - """ - Restricts the alerts that this query is performed on to the specified - process names. - - :param process_names list: list of string process names - :return: This instance - """ - if not all(isinstance(n, str) for n in process_names): - raise ApiError("One or more invalid process names") - self._update_criteria("process_name", process_names) - return self - - def set_process_sha256(self, shas): - """ - Restricts the alerts that this query is performed on to the specified - process SHA-256 hash values. - - :param shas list: list of string process SHA-256 hash values - :return: This instance - """ - if not all(isinstance(n, str) for n in shas): - raise ApiError("One or more invalid SHA256 values") - self._update_criteria("process_sha256", shas) - return self - - def set_reputations(self, reps): - """ - Restricts the alerts that this query is performed on to the specified - reputation values. - - :param reps list: List of string reputation values. Valid values are - "KNOWN_MALWARE", "SUSPECT_MALWARE", "PUP", "NOT_LISTED", - "ADAPTIVE_WHITE_LIST", "COMMON_WHITE_LIST", - "TRUSTED_WHITE_LIST", and "COMPANY_BLACK_LIST". - :return: This instance - """ - if not all((r in BaseAlertSearchQuery.VALID_REPUTATIONS) for r in reps): - raise ApiError("One or more invalid reputation values") - self._update_criteria("reputation", reps) - return self - - def set_tags(self, tags): - """ - Restricts the alerts that this query is performed on to the specified - tag values. - - :param tags list: list of string tag values - :return: This instance - """ - if not all(isinstance(tag, str) for tag in tags): - raise ApiError("One or more invalid tags") - self._update_criteria("tag", tags) - return self - - def set_target_priorities(self, priorities): - """ - Restricts the alerts that this query is performed on to the specified - target priority values. - - :param priorities list: List of string target priority values. Valid values are - "LOW", "MEDIUM", "HIGH", and "MISSION_CRITICAL". - :return: This instance - """ - if not all((prio in DeviceSearchQuery.VALID_PRIORITIES) for prio in priorities): - raise ApiError("One or more invalid priority values") - self._update_criteria("target_value", priorities) - return self - - def set_threat_ids(self, threats): - """ - Restricts the alerts that this query is performed on to the specified - threat ID values. - - :param threats list: list of string threat ID values - :return: This instance - """ - if not all(isinstance(t, str) for t in threats): - raise ApiError("One or more invalid threat ID values") - self._update_criteria("threat_id", threats) - return self - - def set_types(self, alerttypes): - """ - Restricts the alerts that this query is performed on to the specified - alert type values. - - :param alerttypes list: List of string alert type values. Valid values are - "CB_ANALYTICS", "VMWARE", and "WATCHLIST". - :return: This instance - """ - if not all((t in BaseAlertSearchQuery.VALID_ALERT_TYPES) for t in alerttypes): - raise ApiError("One or more invalid alert type values") - self._update_criteria("type", alerttypes) - return self - - def set_workflows(self, workflow_vals): - """ - Restricts the alerts that this query is performed on to the specified - workflow status values. - - :param workflow_vals list: List of string alert type values. Valid values are - "OPEN" and "DISMISSED". - :return: This instance - """ - if not all((t in BaseAlertSearchQuery.VALID_WORKFLOW_VALS) for t in workflow_vals): - raise ApiError("One or more invalid workflow status values") - self._update_criteria("workflow", workflow_vals) - return self - - def _build_criteria(self): - """ - Builds the criteria object for use in a query. - - :return: The criteria object. - """ - mycrit = self._criteria - if self._time_filter: - mycrit["create_time"] = self._time_filter - return mycrit - - def sort_by(self, key, direction="ASC"): - """Sets the sorting behavior on a query's results. - - Example:: - - >>> cb.select(BaseAlert).sort_by("name") - - :param key: the key in the schema to sort by - :param direction: the sort order, either "ASC" or "DESC" - :rtype: :py:class:`BaseAlertSearchQuery` - """ - if direction not in DeviceSearchQuery.VALID_DIRECTIONS: - raise ApiError("invalid sort direction specified") - self._sortcriteria = {"field": key, "order": direction} - return self - - def _build_request(self, from_row, max_rows, add_sort=True): - """ - Creates the request body for an API call. - - :param int from_row: The row to start the query at. - :param int max_rows: The maximum number of rows to be returned. - :param boolean add_sort: If True(default), the sort criteria will be added as part of the request. - :return: A dict containing the complete request body. - """ - request = {"criteria": self._build_criteria()} - request["query"] = self._query_builder._collapse() - request["rows"] = 100 - if from_row > 0: - request["start"] = from_row - if max_rows >= 0: - request["rows"] = max_rows - if add_sort and self._sortcriteria != {}: - request["sort"] = [self._sortcriteria] - return request - - def _build_url(self, tail_end): - """ - Creates the URL to be used for an API call. - - :param str tail_end: String to be appended to the end of the generated URL. - """ - url = self._doc_class.urlobject.format(self._cb.credentials.org_key) + tail_end - return url - - def _count(self): - """ - Returns the number of results from the run of this query. - - :return: The number of results from the run of this query. - """ - if self._count_valid: - return self._total_results - - url = self._build_url("/_search") - request = self._build_request(0, -1) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - return self._total_results - - def _perform_query(self, from_row=0, max_rows=-1): - """ - Performs the query and returns the results of the query in an iterable fashion. - - :param int from_row: The row to start the query at (default 0). - :param int max_rows: The maximum number of rows to be returned (default -1, meaning "all"). - """ - url = self._build_url("/_search") - current = from_row - numrows = 0 - still_querying = True - while still_querying: - request = self._build_request(current, max_rows) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - results = result.get("results", []) - for item in results: - yield self._doc_class(self._cb, item["id"], item) - current += 1 - numrows += 1 - - if max_rows > 0 and numrows == max_rows: - still_querying = False - break - - from_row = current - if current >= self._total_results: - still_querying = False - break - - def facets(self, fieldlist, max_rows=0): - """ - Return information about the facets for this alert by search, using the defined criteria. - - :param fieldlist list: List of facet field names. Valid names are - "ALERT_TYPE", "CATEGORY", "REPUTATION", "WORKFLOW", "TAG", "POLICY_ID", - "POLICY_NAME", "DEVICE_ID", "DEVICE_NAME", "APPLICATION_HASH", - "APPLICATION_NAME", "STATUS", "RUN_STATE", "POLICY_APPLIED_STATE", - "POLICY_APPLIED", and "SENSOR_ACTION". - :param max_rows int: The maximum number of rows to return. 0 means return all rows. - :return: A list of facet information specified as dicts. - """ - if not all((field in BaseAlertSearchQuery.VALID_FACET_FIELDS) for field in fieldlist): - raise ApiError("One or more invalid term field names") - request = self._build_request(0, -1, False) - request["terms"] = {"fields": fieldlist, "rows": max_rows} - url = self._build_url("/_facet") - resp = self._cb.post_object(url, body=request) - result = resp.json() - return result.get("results", []) - - def _update_status(self, status, remediation, comment): - """ - Updates the status of all alerts matching the given query. - - :param str state: The status to put the alerts into, either "OPEN" or "DISMISSED". - :param remediation str: The remediation state to set for all alerts. - :param comment str: The comment to set for all alerts. - :return: The request ID, which may be used to select a WorkflowStatus object. - """ - request = {"state": status, "criteria": self._build_criteria(), "query": self._query_builder._collapse()} - if remediation is not None: - request["remediation_state"] = remediation - if comment is not None: - request["comment"] = comment - resp = self._cb.post_object(self._bulkupdate_url.format(self._cb.credentials.org_key), body=request) - output = resp.json() - return output["request_id"] - - def update(self, remediation=None, comment=None): - """ - Update all alerts matching the given query. The alerts will be left in an OPEN state after this request. - - :param remediation str: The remediation state to set for all alerts. - :param comment str: The comment to set for all alerts. - :return: The request ID, which may be used to select a WorkflowStatus object. - """ - return self._update_status("OPEN", remediation, comment) - - def dismiss(self, remediation=None, comment=None): - """ - Dismiss all alerts matching the given query. The alerts will be left in a DISMISSED state after this request. - - :param remediation str: The remediation state to set for all alerts. - :param comment str: The comment to set for all alerts. - :return: The request ID, which may be used to select a WorkflowStatus object. - """ - return self._update_status("DISMISSED", remediation, comment) - - -class WatchlistAlertSearchQuery(BaseAlertSearchQuery): - """ - Represents a query that is used to locate WatchlistAlert objects. - """ - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/watchlist/workflow/_criteria" - - def set_watchlist_ids(self, ids): - """ - Restricts the alerts that this query is performed on to the specified - watchlist ID values. - - :param ids list: list of string watchlist ID values - :return: This instance - """ - if not all(isinstance(t, str) for t in ids): - raise ApiError("One or more invalid watchlist IDs") - self._update_criteria("watchlist_id", ids) - return self - - def set_watchlist_names(self, names): - """ - Restricts the alerts that this query is performed on to the specified - watchlist name values. - - :param names list: list of string watchlist name values - :return: This instance - """ - if not all(isinstance(name, str) for name in names): - raise ApiError("One or more invalid watchlist names") - self._update_criteria("watchlist_name", names) - return self - - -class CBAnalyticsAlertSearchQuery(BaseAlertSearchQuery): - """ - Represents a query that is used to locate CBAnalyticsAlert objects. - """ - VALID_THREAT_CATEGORIES = ["UNKNOWN", "NON_MALWARE", "NEW_MALWARE", "KNOWN_MALWARE", "RISKY_PROGRAM"] - VALID_LOCATIONS = ["ONSITE", "OFFSITE", "UNKNOWN"] - VALID_KILL_CHAIN_STATUSES = ["RECONNAISSANCE", "WEAPONIZE", "DELIVER_EXPLOIT", "INSTALL_RUN", - "COMMAND_AND_CONTROL", "EXECUTE_GOAL", "BREACH"] - VALID_POLICY_APPLIED = ["APPLIED", "NOT_APPLIED"] - VALID_RUN_STATES = ["DID_NOT_RUN", "RAN", "UNKNOWN"] - VALID_SENSOR_ACTIONS = ["POLICY_NOT_APPLIED", "ALLOW", "ALLOW_AND_LOG", "TERMINATE", "DENY"] - VALID_THREAT_CAUSE_VECTORS = ["EMAIL", "WEB", "GENERIC_SERVER", "GENERIC_CLIENT", "REMOTE_DRIVE", - "REMOVABLE_MEDIA", "UNKNOWN", "APP_STORE", "THIRD_PARTY"] - - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/cbanalytics/workflow/_criteria" - - def set_blocked_threat_categories(self, categories): - """ - Restricts the alerts that this query is performed on to the specified - threat categories that were blocked. - - :param categories list: List of threat categories to look for. Valid values are "UNKNOWN", - "NON_MALWARE", "NEW_MALWARE", "KNOWN_MALWARE", and "RISKY_PROGRAM". - :return: This instance. - """ - if not all((category in CBAnalyticsAlertSearchQuery.VALID_THREAT_CATEGORIES) - for category in categories): - raise ApiError("One or more invalid threat categories") - self._update_criteria("blocked_threat_category", categories) - return self - - def set_device_locations(self, locations): - """ - Restricts the alerts that this query is performed on to the specified - device locations. - - :param locations list: List of device locations to look for. Valid values are "ONSITE", "OFFSITE", - and "UNKNOWN". - :return: This instance. - """ - if not all((location in CBAnalyticsAlertSearchQuery.VALID_LOCATIONS) - for location in locations): - raise ApiError("One or more invalid device locations") - self._update_criteria("device_location", locations) - return self - - def set_kill_chain_statuses(self, statuses): - """ - Restricts the alerts that this query is performed on to the specified - kill chain statuses. - - :param statuses list: List of kill chain statuses to look for. Valid values are "RECONNAISSANCE", - "WEAPONIZE", "DELIVER_EXPLOIT", "INSTALL_RUN","COMMAND_AND_CONTROL", - "EXECUTE_GOAL", and "BREACH". - :return: This instance. - """ - if not all((status in CBAnalyticsAlertSearchQuery.VALID_KILL_CHAIN_STATUSES) - for status in statuses): - raise ApiError("One or more invalid kill chain status values") - self._update_criteria("kill_chain_status", statuses) - return self - - def set_not_blocked_threat_categories(self, categories): - """ - Restricts the alerts that this query is performed on to the specified - threat categories that were NOT blocked. - - :param categories list: List of threat categories to look for. Valid values are "UNKNOWN", - "NON_MALWARE", "NEW_MALWARE", "KNOWN_MALWARE", and "RISKY_PROGRAM". - :return: This instance. - """ - if not all((category in CBAnalyticsAlertSearchQuery.VALID_THREAT_CATEGORIES) - for category in categories): - raise ApiError("One or more invalid threat categories") - self._update_criteria("not_blocked_threat_category", categories) - return self - - def set_policy_applied(self, applied_statuses): - """ - Restricts the alerts that this query is performed on to the specified - status values showing whether policies were applied. - - :param applied_statuses list: List of status values to look for. Valid values are - "APPLIED" and "NOT_APPLIED". - :return: This instance. - """ - if not all((s in CBAnalyticsAlertSearchQuery.VALID_POLICY_APPLIED) - for s in applied_statuses): - raise ApiError("One or more invalid policy-applied values") - self._update_criteria("policy_applied", applied_statuses) - return self - - def set_reason_code(self, reason): - """ - Restricts the alerts that this query is performed on to the specified - reason codes (enum values). - - :param reason list: List of string reason codes to look for. - :return: This instance. - """ - if not all(isinstance(t, str) for t in reason): - raise ApiError("One or more invalid reason code values") - self._update_criteria("reason_code", reason) - return self - - def set_run_states(self, states): - """ - Restricts the alerts that this query is performed on to the specified run states. - - :param states list: List of run states to look for. Valid values are "DID_NOT_RUN", "RAN", - and "UNKNOWN". - :return: This instance. - """ - if not all((s in CBAnalyticsAlertSearchQuery.VALID_RUN_STATES) - for s in states): - raise ApiError("One or more invalid run states") - self._update_criteria("run_state", states) - return self - - def set_sensor_actions(self, actions): - """ - Restricts the alerts that this query is performed on to the specified sensor actions. - - :param actions list: List of sensor actions to look for. Valid values are "POLICY_NOT_APPLIED", - "ALLOW", "ALLOW_AND_LOG", "TERMINATE", and "DENY". - :return: This instance. - """ - if not all((action in CBAnalyticsAlertSearchQuery.VALID_SENSOR_ACTIONS) - for action in actions): - raise ApiError("One or more invalid sensor actions") - self._update_criteria("sensor_action", actions) - return self - - def set_threat_cause_vectors(self, vectors): - """ - Restricts the alerts that this query is performed on to the specified threat cause vectors. - - :param vectors list: List of threat cause vectors to look for. Valid values are "EMAIL", "WEB", - "GENERIC_SERVER", "GENERIC_CLIENT", "REMOTE_DRIVE", "REMOVABLE_MEDIA", - "UNKNOWN", "APP_STORE", and "THIRD_PARTY". - :return: This instance. - """ - if not all((vector in CBAnalyticsAlertSearchQuery.VALID_THREAT_CAUSE_VECTORS) - for vector in vectors): - raise ApiError("One or more invalid threat cause vectors") - self._update_criteria("threat_cause_vector", vectors) - return self - - -class VMwareAlertSearchQuery(BaseAlertSearchQuery): - """ - Represents a query that is used to locate VMwareAlert objects. - """ - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/vmware/workflow/_criteria" - - def set_group_ids(self, groupids): - """ - Restricts the alerts that this query is performed on to the specified - AppDefense-assigned alarm group IDs. - - :param groupids list: List of (integer) AppDefense-assigned alarm group IDs. - :return: This instance. - """ - if not all(isinstance(groupid, int) for groupid in groupids): - raise ApiError("One or more invalid alarm group IDs") - self._update_criteria("group_id", groupids) - return self diff --git a/src/cbapi/psc/cblr.py b/src/cbapi/psc/cblr.py deleted file mode 100644 index 380450f0..00000000 --- a/src/cbapi/psc/cblr.py +++ /dev/null @@ -1,250 +0,0 @@ -import logging -import threading -from cbapi.six.moves.queue import Queue -from collections import defaultdict -from concurrent.futures import _base - -from cbapi.errors import TimeoutError -from cbapi.live_response_api import CbLRManagerBase, CbLRSessionBase, poll_status - - -OS_LIVE_RESPONSE_ENUM = { - "WINDOWS": 1, - "LINUX": 2, - "MAC": 4 -} - -log = logging.getLogger(__name__) - - -class LiveResponseSession(CbLRSessionBase): - def __init__(self, cblr_manager, session_id, sensor_id, session_data=None): - super(LiveResponseSession, self).__init__(cblr_manager, session_id, sensor_id, session_data=session_data) - from cbapi.psc.defense.models import Device - device_info = self._cb.select(Device, self.sensor_id) - self.os_type = OS_LIVE_RESPONSE_ENUM.get(device_info.deviceType, None) - - -class WorkItem(object): - def __init__(self, fn, sensor_id): - from cbapi.psc.defense.models import Device - self.fn = fn - if isinstance(sensor_id, Device): - self.sensor_id = sensor_id.deviceId - else: - self.sensor_id = int(sensor_id) - - self.future = _base.Future() - - -class CompletionNotification(object): - def __init__(self, sensor_id): - self.sensor_id = sensor_id - - -class WorkerStatus(object): - def __init__(self, sensor_id, status="ready", exception=None): - self.sensor_id = sensor_id - self.status = status - self.exception = exception - - -class JobWorker(threading.Thread): - def __init__(self, cb, sensor_id, result_queue): - super(JobWorker, self).__init__() - self.cb = cb - self.sensor_id = sensor_id - self.job_queue = Queue() - self.lr_session = None - self.result_queue = result_queue - - def run(self): - try: - self.lr_session = self.cb.live_response.request_session(self.sensor_id) - self.result_queue.put(WorkerStatus(self.sensor_id, status="ready")) - - while True: - work_item = self.job_queue.get(block=True) - if not work_item: - self.job_queue.task_done() - return - - self.run_job(work_item) - self.result_queue.put(CompletionNotification(self.sensor_id)) - self.job_queue.task_done() - except Exception as e: - self.result_queue.put(WorkerStatus(self.sensor_id, status="error", exception=e)) - finally: - if self.lr_session: - self.lr_session.close() - self.result_queue.put(WorkerStatus(self.sensor_id, status="exiting")) - - def run_job(self, work_item): - try: - work_item.future.set_result(work_item.fn(self.lr_session)) - except Exception as e: - work_item.future.set_exception(e) - - -class LiveResponseSessionManager(CbLRManagerBase): - cblr_base = "/integrationServices/v3/cblr" - cblr_session_cls = LiveResponseSession - - def submit_job(self, job, sensor): - if self._job_scheduler is None: - # spawn the scheduler thread - self._job_scheduler = LiveResponseJobScheduler(self._cb) - self._job_scheduler.start() - - work_item = WorkItem(job, sensor) - self._job_scheduler.submit_job(work_item) - return work_item.future - - def _get_or_create_session(self, sensor_id): - session_id = self._create_session(sensor_id) - - try: - res = poll_status(self._cb, "{cblr_base}/session/{0}".format(session_id, cblr_base=self.cblr_base), - desired_status="ACTIVE", delay=1, timeout=360) - except Exception: - # "close" the session, otherwise it will stay in a pending state - self._close_session(session_id) - - # the Cb server will return a 404 if we don't establish a session in time, so convert this to a "timeout" - raise TimeoutError(uri="{cblr_base}/session/{0}".format(session_id, cblr_base=self.cblr_base), - message="Could not establish session with sensor {0}".format(sensor_id), - error_code=404) - else: - return session_id, res - - def _close_session(self, session_id): - try: - self._cb.put_object("{cblr_base}/session".format(session_id, cblr_base=self.cblr_base), - {"session_id": session_id, "status": "CLOSE"}) - except Exception: - pass - - def _create_session(self, sensor_id): - response = self._cb.post_object("{cblr_base}/session/{0}".format(sensor_id, cblr_base=self.cblr_base), - {"sensor_id": sensor_id}).json() - session_id = response["id"] - return session_id - - -class LiveResponseJobScheduler(threading.Thread): - daemon = True - - def __init__(self, cb, max_workers=10): - super(LiveResponseJobScheduler, self).__init__() - self._cb = cb - self._job_workers = {} - self._idle_workers = set() - self._unscheduled_jobs = defaultdict(list) - self._max_workers = max_workers - self.schedule_queue = Queue() - - def run(self): - log.debug("Starting Live Response Job Scheduler") - - while True: - log.debug("Waiting for item on Scheduler Queue") - item = self.schedule_queue.get(block=True) - log.debug("Got item: {0}".format(item)) - if isinstance(item, WorkItem): - # new WorkItem available - self._unscheduled_jobs[item.sensor_id].append(item) - elif isinstance(item, CompletionNotification): - # job completed - self._idle_workers.add(item.sensor_id) - elif isinstance(item, WorkerStatus): - if item.status == "error": - log.error("Error encountered by JobWorker[{0}]: {1}".format(item.sensor_id, - item.exception)) - elif item.status == "exiting": - log.debug("JobWorker[{0}] has exited, waiting...".format(item.sensor_id)) - self._job_workers[item.sensor_id].join() - log.debug("JobWorker[{0}] deleted".format(item.sensor_id)) - del self._job_workers[item.sensor_id] - try: - self._idle_workers.remove(item.sensor_id) - except KeyError: - pass - elif item.status == "ready": - log.debug("JobWorker[{0}] now ready to accept jobs, session established".format(item.sensor_id)) - self._idle_workers.add(item.sensor_id) - else: - log.debug("Unknown status from JobWorker[{0}]: {1}".format(item.sensor_id, item.status)) - else: - log.debug("Received unknown item on the scheduler Queue, exiting") - # exiting the scheduler if we get None - # TODO: wait for all worker threads to exit - return - - self._schedule_jobs() - - def _schedule_jobs(self): - log.debug("Entering scheduler") - - # First, see if there are new jobs to schedule on idle workers. - self._schedule_existing_workers() - - # If we have jobs scheduled to run on sensors with no current associated worker, let's spawn new ones. - if set(self._unscheduled_jobs.keys()) - self._idle_workers: - self._cleanup_idle_workers() - self._spawn_new_workers() - self._schedule_existing_workers() - - def _cleanup_idle_workers(self, max=None): - if not max: - max = self._max_workers - - for sensor in list(self._idle_workers)[:max]: - log.debug("asking worker for sensor id {0} to exit".format(sensor)) - self._job_workers[sensor].job_queue.put(None) - - def _schedule_existing_workers(self): - log.debug("There are idle workers for sensor ids {0}".format(self._idle_workers)) - - intersection = self._idle_workers.intersection(set(self._unscheduled_jobs.keys())) - - log.debug("{0} jobs ready to execute in existing execution slots".format(len(intersection))) - - for sensor in intersection: - item = self._unscheduled_jobs[sensor].pop(0) - self._job_workers[sensor].job_queue.put(item) - self._idle_workers.remove(item.sensor_id) - - self._cleanup_unscheduled_jobs() - - def _cleanup_unscheduled_jobs(self): - marked_for_deletion = [] - for k in self._unscheduled_jobs.keys(): - if len(self._unscheduled_jobs[k]) == 0: - marked_for_deletion.append(k) - - for k in marked_for_deletion: - del self._unscheduled_jobs[k] - - def submit_job(self, work_item): - self.schedule_queue.put(work_item) - - def _spawn_new_workers(self): - from cbapi.psc.defense.models import Device - if len(self._job_workers) >= self._max_workers: - return - - schedule_max = self._max_workers - len(self._job_workers) - ''' - sensors = [s for s in self._cb.select(Device) if s.deviceId in self._unscheduled_jobs - and s.deviceId not in self._job_workers - and "AVAILABLE" in s.sensorStates] - ''' - log.debug("spawning new workers to handle unscheduled jobs: {0}".format(self._unscheduled_jobs)) - sensors = [s for s in self._cb.select(Device) if s.deviceId in self._unscheduled_jobs - and s.deviceId not in self._job_workers] - sensors_to_schedule = sensors[:schedule_max] - log.debug("Spawning new workers to handle these sensors: {0}".format(sensors_to_schedule)) - for sensor in sensors_to_schedule: - log.debug("Spawning new JobWorker for sensor id {0}".format(sensor.deviceId)) - self._job_workers[sensor.deviceId] = JobWorker(self._cb, sensor.deviceId, self.schedule_queue) - self._job_workers[sensor.deviceId].start() diff --git a/src/cbapi/psc/defense/__init__.py b/src/cbapi/psc/defense/__init__.py deleted file mode 100644 index bc37a1d5..00000000 --- a/src/cbapi/psc/defense/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Exported public API for the Cb Defense API - -from __future__ import absolute_import - -from .rest_api import CbDefenseAPI -from cbapi.psc.defense.models import Device, Event, Policy diff --git a/src/cbapi/psc/defense/models.py b/src/cbapi/psc/defense/models.py deleted file mode 100644 index 0b02d1b9..00000000 --- a/src/cbapi/psc/defense/models.py +++ /dev/null @@ -1,164 +0,0 @@ -from cbapi.models import MutableBaseModel, CreatableModelMixin, NewBaseModel - -from copy import deepcopy -import logging -import json - -from cbapi.errors import ServerError - -log = logging.getLogger(__name__) - - -class DefenseMutableModel(MutableBaseModel): - _change_object_http_method = "PATCH" - _change_object_key_name = None - - def __init__(self, cb, model_unique_id=None, initial_data=None, force_init=False, full_doc=False): - super(DefenseMutableModel, self).__init__(cb, model_unique_id=model_unique_id, initial_data=initial_data, - force_init=force_init, full_doc=full_doc) - if not self._change_object_key_name: - self._change_object_key_name = self.primary_key - - def _parse(self, obj): - if type(obj) == dict and self.info_key in obj: - return obj[self.info_key] - - def _update_object(self): - if self._change_object_http_method != "PATCH": - return self._update_entire_object() - else: - return self._patch_object() - - def _update_entire_object(self): - if self.__class__.primary_key in self._dirty_attributes.keys() or self._model_unique_id is None: - new_object_info = deepcopy(self._info) - try: - if not self._new_object_needs_primary_key: - del(new_object_info[self.__class__.primary_key]) - except Exception: - pass - log.debug("Creating a new {0:s} object".format(self.__class__.__name__)) - ret = self._cb.api_json_request(self.__class__._new_object_http_method, self.urlobject, - data={self.info_key: new_object_info}) - else: - log.debug("Updating {0:s} with unique ID {1:s}".format(self.__class__.__name__, str(self._model_unique_id))) - ret = self._cb.api_json_request(self.__class__._change_object_http_method, - self._build_api_request_uri(), data={self.info_key: self._info}) - - return self._refresh_if_needed(ret) - - def _patch_object(self): - if self.__class__.primary_key in self._dirty_attributes.keys() or self._model_unique_id is None: - log.debug("Creating a new {0:s} object".format(self.__class__.__name__)) - ret = self._cb.api_json_request(self.__class__._new_object_http_method, self.urlobject, - data=self._info) - else: - updates = {} - for k in self._dirty_attributes.keys(): - updates[k] = self._info[k] - log.debug("Updating {0:s} with unique ID {1:s}".format(self.__class__.__name__, str(self._model_unique_id))) - ret = self._cb.api_json_request(self.__class__._change_object_http_method, - self._build_api_request_uri(), data=updates) - - return self._refresh_if_needed(ret) - - def _refresh_if_needed(self, request_ret): - refresh_required = True - - if request_ret.status_code not in range(200, 300): - try: - message = json.loads(request_ret.text)[0] - except Exception: - message = request_ret.text - - raise ServerError(request_ret.status_code, message, - result="Did not update {} record.".format(self.__class__.__name__)) - else: - try: - message = request_ret.json() - log.debug("Received response: %s" % message) - if not isinstance(message, dict): - raise ServerError(request_ret.status_code, message, - result="Unknown error updating {0:s} record.".format(self.__class__.__name__)) - else: - if message.get("success", False): - if isinstance(message.get(self.info_key, None), dict): - self._info = message.get(self.info_key) - self._full_init = True - refresh_required = False - else: - if self._change_object_key_name in message.keys(): - # if all we got back was an ID, try refreshing to get the entire record. - log.debug("Only received an ID back from the server, forcing a refresh") - self._info[self.primary_key] = message[self._change_object_key_name] - refresh_required = True - else: - # "success" is False - raise ServerError(request_ret.status_code, message.get("message", ""), - result="Did not update {0:s} record.".format(self.__class__.__name__)) - except Exception: - pass - - self._dirty_attributes = {} - if refresh_required: - self.refresh() - return self._model_unique_id - - -class Device(DefenseMutableModel): - urlobject = "/integrationServices/v3/device" - primary_key = "deviceId" - info_key = "deviceInfo" - swagger_meta_file = "psc/defense/models/deviceInfo.yaml" - - def __init__(self, cb, model_unique_id, initial_data=None): - super(Device, self).__init__(cb, model_unique_id, initial_data) - - def lr_session(self): - """ - Retrieve a Live Response session object for this Device. - - :return: Live Response session object - :rtype: :py:class:`cbapi.defense.cblr.LiveResponseSession` - :raises ApiError: if there is an error establishing a Live Response session for this Device - - """ - return self._cb._request_lr_session(self._model_unique_id) - - -class Event(NewBaseModel): - urlobject = "/integrationServices/v3/event" - primary_key = "eventId" - info_key = "eventInfo" - - def _parse(self, obj): - if type(obj) == dict and self.info_key in obj: - return obj[self.info_key] - - def __init__(self, cb, model_unique_id, initial_data=None): - super(Event, self).__init__(cb, model_unique_id, initial_data) - - -class Policy(DefenseMutableModel, CreatableModelMixin): - urlobject = "/integrationServices/v3/policy" - info_key = "policyInfo" - swagger_meta_file = "psc/defense/models/policyInfo.yaml" - _change_object_http_method = "PUT" - _change_object_key_name = "policyId" - - @property - def rules(self): - return dict([(r.get("id"), r) for r in self.policy.get("rules", [])]) - - def add_rule(self, new_rule): - self._cb.post_object("{0}/rule".format(self._build_api_request_uri()), {"ruleInfo": new_rule}) - self.refresh() - - def delete_rule(self, rule_id): - self._cb.delete_object("{0}/rule/{1}".format(self._build_api_request_uri(), rule_id)) - self.refresh() - - def replace_rule(self, rule_id, new_rule): - self._cb.put_object("{0}/rule/{1}".format(self._build_api_request_uri(), rule_id), - {"ruleInfo": new_rule}) - self.refresh() diff --git a/src/cbapi/psc/defense/models/deviceInfo.yaml b/src/cbapi/psc/defense/models/deviceInfo.yaml deleted file mode 100644 index 7d7106a7..00000000 --- a/src/cbapi/psc/defense/models/deviceInfo.yaml +++ /dev/null @@ -1,221 +0,0 @@ -type: object -properties: - osVersion: - type: string - activationCode: - type: string - organizationId: - type: integer - format: int64 - deviceId: - type: integer - format: int64 - deviceSessionId: - type: integer - format: int64 - deviceOwnerId: - type: integer - format: int64 - deviceGuid: - type: string - format: uuid - email: - type: string - format: email - assignedToId: - type: integer - format: int64 - assignedToName: - type: string - deviceType: - type: string - x-nullable: true - enum: - - "MAC" - - "WINDOWS" - firstName: - type: string - lastName: - type: string - middleName: - type: string - createTime: - type: integer - format: epoch-ms-date-time - policyId: - type: integer - format: int64 - policyName: - type: string - quarantined: - type: boolean - targetPriorityType: - type: string - x-nullable: true - enum: - - "HIGH" - - "LOW" - - "MEDIUM" - - "MISSION_CRITICAL" - lastVirusActivityTime: - type: integer - format: epoch-ms-date-time - firstVirusActivityTime: - type: integer - format: epoch-ms-date-time - activationCodeExpiryTime: - type: integer - format: epoch-ms-date-time - organizationName: - type: string - sensorVersion: - type: string - registeredTime: - type: integer - format: epoch-ms-date-time - lastContact: - type: integer - format: epoch-ms-date-time - lastReportedTime: - type: integer - format: epoch-ms-date-time - windowsPlatform: - type: string - x-nullable: true - enum: - - "CLIENT_X64" - - "CLIENT_X86" - - "SERVER_X6" - - "SERVER_X86" - vdiBaseDevice: - type: integer - format: int64 - avStatus: - type: array - items: - type: string - x-nullable: true - enum: - - "AV_ACTIVE" - - "AV_BYPASS" - - "AV_DEREGISTERED" - - "AV_NOT_REGISTERED" - - "AV_REGISTERED" - - "FULLY_DISABLED" - - "FULLY_ENABLED" - - "INSTALLED" - - "INSTALLED_SERVER" - - "NOT_INSTALLED" - - "ONACCESS_SCAN_DISABLED" - - "ONDEMAND_SCAN_DISABLED" - - "ONDEMOND_SCAN_DISABLED" - - "PRODUCT_UPDATE_DISABLED" - - "SIGNATURE_UPDATE_DISABLED" - - "UNINSTALLED" - - "UNINSTALLED_SERVER" - deregisteredTime: - type: integer - format: epoch-ms-date-time - sensorStates: - type: array - items: - type: string - x-nullable: true - enum: - - "ACTIVE" - - "CSR_ACTION" - - "DB_CORRUPTION_DETECTED" - - "DRIVER_INIT_ERROR" - - "LOOP_DETECTED" - - "PANICS_DETECTED" - - "REMGR_INIT_ERROR" - - "REPUX_ACTION" - - "SENSOR_MAINTENANCE" - - "SENSOR_RESET_IN_PROGRESS" - - "SENSOR_SHUTDOWN" - - "SENSOR_UNREGISTERED" - - "SENSOR_UPGRADE_IN_PROGRESS" - - "UNSUPPORTED_OS" - - "WATCHDOG" - messages: - type: array - items: - type: object - properties: - message: - type: string - time: - type: integer - format: epoch-ms-date-time - rootedBySensor: - type: boolean - rootedBySensorTime: - type: integer - format: epoch-ms-date-time - lastInternalIpAddress: - type: string - lastExternalIpAddress: - type: string - lastLocation: - type: string - x-nullable: true - enum: - - "OFFSITE" - - "ONSITE" - - "UNKNOWN" - avUpdateServers: - type: array - items: - type: string - passiveMode: - type: boolean - lastResetTime: - type: integer - format: epoch-ms-date-time - lastShutdownTime: - type: integer - format: epoch-ms-date-time - scanStatus: - type: string - scanLastActionTime: - type: integer - format: epoch-ms-date-time - scanLastCompleteTime: - type: integer - format: epoch-ms-date-time - linuxKernelVersion: - type: string - avEngine: - type: string - avLastScanTime: - type: integer - format: epoch-ms-date-time - rootedByAnalytics: - type: boolean - rootedByAnalyticsTime: - type: integer - format: epoch-ms-date-time - testId: - type: integer - avMaster: - type: boolean - uninstalledTime: - type: integer - format: epoch-ms-date-time - name: - type: string - status: - type: string - x-nullable: true - enum: - - "ACTIVE" - - "ALL" - - "BYPASS" - - "BYPASS_ON" - - "DEREGISTERED" - - "ERROR" - - "INACTIVE" - - "PENDING" - - "QUARANTINE" - - "REGISTERED" - - "UNINSTALLED" diff --git a/src/cbapi/psc/defense/models/policyInfo.yaml b/src/cbapi/psc/defense/models/policyInfo.yaml deleted file mode 100644 index b13dc207..00000000 --- a/src/cbapi/psc/defense/models/policyInfo.yaml +++ /dev/null @@ -1,25 +0,0 @@ -type: object -required: -- description -- name -- policy -- priorityLevel -- version -properties: - description: - type: string - id: - type: integer - latestRevision: - type: integer - name: - type: string - policy: - type: object - priorityLevel: - type: string - systemPolicy: - type: boolean - version: - type: integer - diff --git a/src/cbapi/psc/defense/rest_api.py b/src/cbapi/psc/defense/rest_api.py deleted file mode 100644 index 3e719473..00000000 --- a/src/cbapi/psc/defense/rest_api.py +++ /dev/null @@ -1,194 +0,0 @@ -from cbapi.utils import convert_query_params -from cbapi.query import PaginatedQuery - -from cbapi.psc.rest_api import CbPSCBaseAPI -import logging -import time - -log = logging.getLogger(__name__) - - -def convert_to_kv_pairs(q): - k, v = q.split(':', 1) - return k, v - - -class CbDefenseAPI(CbPSCBaseAPI): - """THIS SDK IS DEPRECATED FOR CARBON BLACK CLOUD - - Please see - `Carbon Black Cloud Python SDK on the Developer Network `_ - for details on the replacement Carbon Black Cloud Python SDK. - - The main entry point into the Carbon Black Cloud Endpoint Standard Defense API. - - :param str profile: (optional) Use the credentials in the named profile when connecting to the Carbon Black server. - Uses the profile named 'default' when not specified. - - Usage:: - - >>> from cbapi import CbDefenseAPI - >>> cb = CbDefenseAPI(profile="production") - """ - - def __init__(self, *args, **kwargs): - super(CbDefenseAPI, self).__init__(*args, **kwargs) - - def _perform_query(self, cls, query_string=None): - return Query(cls, self, query_string) - - def notification_listener(self, interval=60): - """Generator to continually poll the Cloud Endpoint Standard server for notifications (alerts). Note that - this can only be used with a 'SIEM' key generated in the Carbon Black Cloud console. - """ - while True: - for notification in self.get_notifications(): - yield notification - time.sleep(interval) - - def get_notifications(self): - """DEPRECATED: Retrieve queued notifications (alerts) from the Cloud Endpoint Standard server. Note that this can only be - used with a 'SIEM' key generated in the Carbon Black Cloud console. - - :returns: list of dictionary objects representing the notifications, or an empty list if none available. - """ - res = self.get_object("/integrationServices/v3/notification") - return res.get("notifications", []) - - def get_auditlogs(self): - """DEPRECATED: Retrieve queued audit logs from the Carbon Black Cloud Endpoint Standard server. - Note that this can only be used with a 'API' key generated in the CBC console. - :returns: list of dictionary objects representing the audit logs, or an empty list if none available. - """ - res = self.get_object("/integrationServices/v3/auditlogs") - return res.get("notifications", []) - - -class Query(PaginatedQuery): - """Represents a prepared query to the Cloud Endpoint Standard server. - - This object is returned as part of a :py:meth:`CbDefenseAPI.select` - operation on models requested from the Cloud Endpoint Standard server. You should not have to create - this class yourself. - - The query is not executed on the server until it's accessed, either as an iterator (where it will generate values - on demand as they're requested) or as a list (where it will retrieve the entire result set and save to a list). - You can also call the Python built-in ``len()`` on this object to retrieve the total number of items matching - the query. - - Examples:: - - >>> from cbapi.psc.defense import CbDefenseAPI - >>> cb = CbDefenseAPI() - - Notes: - - The slicing operator only supports start and end parameters, but not step. ``[1:-1]`` is legal, but - ``[1:2:-1]`` is not. - - You can chain where clauses together to create AND queries; only objects that match all ``where`` clauses - will be returned. - """ - - def __init__(self, doc_class, cb, query=None): - super(Query, self).__init__(doc_class, cb, None) - if query: - self._query = [query] - else: - self._query = [] - - self._sort_by = None - self._group_by = None - self._batch_size = 100 - - def _clone(self): - nq = self.__class__(self._doc_class, self._cb) - nq._query = self._query[::] - nq._sort_by = self._sort_by - nq._group_by = self._group_by - nq._batch_size = self._batch_size - return nq - - def where(self, q): - """Add a filter to this query. - - :param str q: Query string - :return: Query object - :rtype: :py:class:`Query` - """ - nq = self._clone() - nq._query.append(q) - return nq - - def and_(self, q): - """Add a filter to this query. Equivalent to calling :py:meth:`where` on this object. - - :param str q: Query string - :return: Query object - :rtype: :py:class:`Query` - """ - return self.where(q) - - def prepare_query(self, args): - if self._query: - for qe in self._query: - k, v = convert_to_kv_pairs(qe) - args[k] = v - - return args - - def _count(self): - args = {'limit': 0} - args = self.prepare_query(args) - - query_args = convert_query_params(args) - self._total_results = int(self._cb.get_object(self._doc_class.urlobject, query_parameters=query_args) - .get("totalResults", 0)) - self._count_valid = True - return self._total_results - - def _search(self, start=0, rows=0): - # iterate over total result set, 1000 at a time - args = {} - if start != 0: - args['start'] = start - args['rows'] = self._batch_size - - current = start - numrows = 0 - - args = self.prepare_query(args) - still_querying = True - - while still_querying: - query_args = convert_query_params(args) - result = self._cb.get_object(self._doc_class.urlobject, query_parameters=query_args) - - self._total_results = result.get("totalResults", 0) - self._count_valid = True - - results = result.get('results', []) - - if results is None: - log.debug("Results are None") - if current >= 100000: - log.info("Max result size exceeded. Truncated to 100k.") - break - - for item in results: - yield item - current += 1 - numrows += 1 - if rows and numrows == rows: - still_querying = False - break - - args['start'] = current + 1 # as of 6/2017, the indexing on the Cb Defense backend is still 1-based - - if current >= self._total_results: - break - - if not results: - log.debug("server reported total_results overestimated the number of results for this query by {0}" - .format(self._total_results - current)) - log.debug("resetting total_results for this query to {0}".format(current)) - self._total_results = current - break diff --git a/src/cbapi/psc/devices_query.py b/src/cbapi/psc/devices_query.py index 6ddc431e..5a5e185d 100755 --- a/src/cbapi/psc/devices_query.py +++ b/src/cbapi/psc/devices_query.py @@ -64,7 +64,7 @@ def set_device_ids(self, device_ids): Restricts the devices that this query is performed on to the specified device IDs. - :param ad_group_ids: list of ints + :param device_ids: list of ints :return: This instance """ if not all(isinstance(device_id, int) for device_id in device_ids): diff --git a/src/cbapi/psc/models.py b/src/cbapi/psc/models.py index 50805d4b..321dac06 100755 --- a/src/cbapi/psc/models.py +++ b/src/cbapi/psc/models.py @@ -1,8 +1,6 @@ from cbapi.models import MutableBaseModel, UnrefreshableModel from cbapi.errors import ServerError from cbapi.psc.devices_query import DeviceSearchQuery -from cbapi.psc.alerts_query import BaseAlertSearchQuery, WatchlistAlertSearchQuery, \ - CBAnalyticsAlertSearchQuery, VMwareAlertSearchQuery from copy import deepcopy import logging @@ -130,17 +128,6 @@ def _refresh(self): self._last_refresh_time = time.time() return True - def lr_session(self): - """ - Retrieve a Live Response session object for this Device. - - :return: Live Response session object - :rtype: :py:class:`cbapi.defense.cblr.LiveResponseSession` - :raises ApiError: if there is an error establishing a Live Response session for this Device - - """ - return self._cb._request_lr_session(self._model_unique_id) - def background_scan(self, flag): """ Set the background scan option for this device. @@ -192,180 +179,3 @@ def update_sensor_version(self, sensor_version): :param dict sensor_version: New version properties for the sensor. """ return self._cb.device_update_sensor_version([self._model_unique_id], sensor_version) - - -class Workflow(UnrefreshableModel): - swagger_meta_file = "psc/models/workflow.yaml" - - def __init__(self, cb, initial_data=None): - super(Workflow, self).__init__(cb, model_unique_id=None, initial_data=initial_data) - - -class BaseAlert(PSCMutableModel): - urlobject = "/appservices/v6/orgs/{0}/alerts" - urlobject_single = "/appservices/v6/orgs/{0}/alerts/{1}" - primary_key = "id" - swagger_meta_file = "psc/models/base_alert.yaml" - - def __init__(self, cb, model_unique_id, initial_data=None): - super(BaseAlert, self).__init__(cb, model_unique_id, initial_data) - self._workflow = Workflow(cb, initial_data.get("workflow", None) if initial_data else None) - if model_unique_id is not None and initial_data is None: - self._refresh() - - @classmethod - def _query_implementation(cls, cb): - return BaseAlertSearchQuery(cls, cb) - - def _refresh(self): - url = self.urlobject_single.format(self._cb.credentials.org_key, self._model_unique_id) - resp = self._cb.get_object(url) - self._info = resp - self._workflow = Workflow(self._cb, resp.get("workflow", None)) - self._last_refresh_time = time.time() - return True - - @property - def workflow_(self): - return self._workflow - - def _update_workflow_status(self, state, remediation, comment): - """ - Update the workflow status of this alert. - - :param str state: The state to set for this alert, either "OPEN" or "DISMISSED". - :param remediation str: The remediation status to set for the alert. - :param comment str: The comment to set for the alert. - """ - request = {"state": state} - if remediation: - request["remediation_state"] = remediation - if comment: - request["comment"] = comment - url = self.urlobject_single.format(self._cb.credentials.org_key, - self._model_unique_id) + "/workflow" - resp = self._cb.post_object(url, request) - self._workflow = Workflow(self._cb, resp.json()) - self._last_refresh_time = time.time() - - def dismiss(self, remediation=None, comment=None): - """ - Dismiss this alert. - - :param remediation str: The remediation status to set for the alert. - :param comment str: The comment to set for the alert. - """ - self._update_workflow_status("DISMISSED", remediation, comment) - - def update(self, remediation=None, comment=None): - """ - Update this alert. - - :param remediation str: The remediation status to set for the alert. - :param comment str: The comment to set for the alert. - """ - self._update_workflow_status("OPEN", remediation, comment) - - def _update_threat_workflow_status(self, state, remediation, comment): - """ - Update the workflow status of all alerts with the same threat ID, past or future. - - :param str state: The state to set for this alert, either "OPEN" or "DISMISSED". - :param remediation str: The remediation status to set for the alert. - :param comment str: The comment to set for the alert. - """ - request = {"state": state} - if remediation: - request["remediation_state"] = remediation - if comment: - request["comment"] = comment - url = "/appservices/v6/orgs/{0}/threat/{1}/workflow".format(self._cb.credentials.org_key, - self.threat_id) - resp = self._cb.post_object(url, request) - return Workflow(self._cb, resp.json()) - - def dismiss_threat(self, remediation=None, comment=None): - """ - Dismiss alerts for this threat. - - :param remediation str: The remediation status to set for the alert. - :param comment str: The comment to set for the alert. - """ - return self._update_threat_workflow_status("DISMISSED", remediation, comment) - - def update_threat(self, remediation=None, comment=None): - """ - Update alerts for this threat. - - :param remediation str: The remediation status to set for the alert. - :param comment str: The comment to set for the alert. - """ - return self._update_threat_workflow_status("OPEN", remediation, comment) - - -class WatchlistAlert(BaseAlert): - urlobject = "/appservices/v6/orgs/{0}/alerts/watchlist" - - @classmethod - def _query_implementation(cls, cb): - return WatchlistAlertSearchQuery(cls, cb) - - -class CBAnalyticsAlert(BaseAlert): - urlobject = "/appservices/v6/orgs/{0}/alerts/cbanalytics" - - @classmethod - def _query_implementation(cls, cb): - return CBAnalyticsAlertSearchQuery(cls, cb) - - -class VMwareAlert(BaseAlert): - urlobject = "/appservices/v6/orgs/{0}/alerts/vmware" - - @classmethod - def _query_implementation(cls, cb): - return VMwareAlertSearchQuery(cls, cb) - - -class WorkflowStatus(PSCMutableModel): - urlobject_single = "/appservices/v6/orgs/{0}/workflow/status/{1}" - primary_key = "id" - swagger_meta_file = "psc/models/workflow_status.yaml" - - def __init__(self, cb, model_unique_id, initial_data=None): - super(WorkflowStatus, self).__init__(cb, model_unique_id, initial_data) - self._request_id = model_unique_id - self._workflow = None - if model_unique_id is not None: - self._refresh() - - def _refresh(self): - url = self.urlobject_single.format(self._cb.credentials.org_key, self._request_id) - resp = self._cb.get_object(url) - self._info = resp - self._workflow = Workflow(self._cb, resp.get("workflow", None)) - self._last_refresh_time = time.time() - return True - - @property - def id_(self): - return self._request_id - - @property - def workflow_(self): - return self._workflow - - @property - def queued(self): - self._refresh() - return self._info.get("status", "") == "QUEUED" - - @property - def in_progress(self): - self._refresh() - return self._info.get("status", "") == "IN_PROGRESS" - - @property - def finished(self): - self._refresh() - return self._info.get("status", "") == "FINISHED" diff --git a/src/cbapi/psc/models/base_alert.yaml b/src/cbapi/psc/models/base_alert.yaml deleted file mode 100755 index ffc0b4e0..00000000 --- a/src/cbapi/psc/models/base_alert.yaml +++ /dev/null @@ -1,139 +0,0 @@ -type: object -properties: - category: - type: string - description: Alert category - Monitored vs Threat - enum: - - THREAT - - MONITORED - - INFO - - MINOR - - SERIOUS - - CRITICAL - create_time: - type: string - format: date-time - description: Time the alert was created - device_id: - type: integer - format: int64 - description: ID of the device - device_name: - type: string - description: Device name - device_os: - type: string - description: Device OS - enum: - - WINDOWS - - ANDROID - - MAC - - IOS - - LINUX - - OTHER - device_os_version: - type: string - example: Windows 10 x64 - description: Device OS Version - device_username: - type: string - description: Logged on user during the alert. This is filled on a best-effort - approach. If the user is not available it may be populated with the device - owner - first_event_time: - type: string - format: date-time - description: Time of the first event in an alert - group_details: - description: Group details for when alert grouping is on - type: object - properties: - count: - type: integer - format: int64 - description: Number of times the event has occurred - total_devices: - type: integer - format: int64 - description: The number of devices that have seen this alert - id: - type: string - description: Unique ID for this alert - last_event_time: - type: string - format: date-time - description: Time of the last event in an alert - last_update_time: - type: string - format: date-time - description: Time the alert was last updated - legacy_alert_id: - type: string - description: Unique short ID for this alert. This is deprecated and only available - on alerts stored in the old schema. - notes_present: - type: boolean - description: Are notes present for this threatId - org_key: - type: string - example: ABCD1234 - description: Unique identifier for the organization to which the alert belongs - policy_id: - type: integer - format: int64 - description: ID of the policy the device was in at the time of the alert - policy_name: - type: string - description: Name of the policy the device was in at the time of the alert - severity: - type: integer - format: int32 - description: Threat ranking - tags: - type: array - description: Tags for the alert - items: - type: string - target_value: - type: string - description: Device priority as assigned via the policy - enum: - - LOW - - MEDIUM - - HIGH - - MISSION_CRITICAL - threat_id: - type: string - description: ID of the threat to which this alert belongs. Threats are comprised - of a combination of factors that can be repeated across devices. - type: - type: string - description: Type of the alert - enum: - - CB_ANALYTICS - - VMWARE - - WATCHLIST - workflow: - description: User-updatable status of the alert - type: object - properties: - changed_by: - type: string - description: Username of the user who changed the workflow - comment: - type: string - description: Comment when updating the workflow - last_update_time: - type: string - format: date-time - description: When the workflow was last updated - remediation: - type: string - description: Alert remediation code. Indicates the result of the investigation - into the alert - state: - type: string - description: State of the workflow - enum: - - OPEN - - DISMISSED diff --git a/src/cbapi/psc/models/workflow.yaml b/src/cbapi/psc/models/workflow.yaml deleted file mode 100755 index 8807a69f..00000000 --- a/src/cbapi/psc/models/workflow.yaml +++ /dev/null @@ -1,23 +0,0 @@ -type: object -description: Tracking system for alerts as they are triaged and resolved -properties: - changed_by: - type: string - description: Username of the user who changed the workflow - comment: - type: string - description: Comment when updating the workflow - last_update_time: - type: string - format: date-time - description: When the workflow was last updated - remediation: - type: string - description: Alert remediation code. Indicates the result of the investigation - into the alert - state: - type: string - description: State of the workflow - enum: - - OPEN - - DISMISSED diff --git a/src/cbapi/psc/models/workflow_status.yaml b/src/cbapi/psc/models/workflow_status.yaml deleted file mode 100755 index 202e8cb5..00000000 --- a/src/cbapi/psc/models/workflow_status.yaml +++ /dev/null @@ -1,56 +0,0 @@ -type: object -description: Dismiss status response for async calls -properties: - errors: - type: array - description: Errors for dismiss alerts or threats, if no errors it won't be - included in response - items: - type: string - failed_ids: - type: array - description: Failed ids - items: - type: string - id: - type: string - description: Time based id for async job, it's not unique across the orgs - num_hits: - type: integer - format: int64 - description: Total number of alerts to be operated on - num_success: - type: integer - format: int64 - description: Successfully operated number of alerts - status: - type: string - description: Status for the async progress - enum: - - QUEUED - - IN_PROGRESS - - FINISHED - workflow: - description: Requested workflow change - type: object - properties: - changed_by: - type: string - description: Username of the user who changed the workflow - comment: - type: string - description: Comment when updating the workflow - last_update_time: - type: string - format: date-time - description: When the workflow was last updated - remediation: - type: string - description: Alert remediation code. Indicates the result of the investigation - into the alert - state: - type: string - description: State of the workflow - enum: - - OPEN - - DISMISSED diff --git a/src/cbapi/psc/rest_api.py b/src/cbapi/psc/rest_api.py index aa12f794..ad7a68da 100755 --- a/src/cbapi/psc/rest_api.py +++ b/src/cbapi/psc/rest_api.py @@ -1,6 +1,5 @@ from cbapi.connection import BaseAPI from cbapi.errors import ApiError, ServerError -from .cblr import LiveResponseSessionManager import logging log = logging.getLogger(__name__) @@ -27,17 +26,6 @@ def _perform_query(self, cls, **kwargs): else: raise ApiError("All PSC models should provide _query_implementation") - # ---- LiveOps - - @property - def live_response(self): - if self._lr_scheduler is None: - self._lr_scheduler = LiveResponseSessionManager(self) - return self._lr_scheduler - - def _request_lr_session(self, sensor_id): - return self.live_response.request_session(sensor_id) - # ---- Device API def _raw_device_action(self, request): diff --git a/src/cbapi/psc/threathunter/__init__.py b/src/cbapi/psc/threathunter/__init__.py deleted file mode 100644 index d1b0e338..00000000 --- a/src/cbapi/psc/threathunter/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Exported public API for the Cb ThreatHunter API - -from __future__ import absolute_import - -from .rest_api import CbThreatHunterAPI -from cbapi.psc.threathunter.models import ( - Process, Event, Tree, Feed, Report, IOC, IOC_V2, Watchlist, Binary, Downloads -) -from cbapi.psc.threathunter.query import QueryBuilder diff --git a/src/cbapi/psc/threathunter/models.py b/src/cbapi/psc/threathunter/models.py deleted file mode 100644 index 1ac3f31d..00000000 --- a/src/cbapi/psc/threathunter/models.py +++ /dev/null @@ -1,1117 +0,0 @@ -from __future__ import absolute_import -from cbapi.errors import ApiError, InvalidObjectError -from cbapi.models import CreatableModelMixin, MutableBaseModel, UnrefreshableModel -import logging -from cbapi.psc.threathunter.query import Query, AsyncProcessQuery, TreeQuery, FeedQuery, ReportQuery, WatchlistQuery -import validators -import time - -log = logging.getLogger(__name__) - - -class FeedModel(UnrefreshableModel, CreatableModelMixin, MutableBaseModel): - """A common base class for models used by the Feed and Watchlist APIs. - """ - pass - - -class Process(UnrefreshableModel): - """Represents a process retrieved by one of the CbTH endpoints. - """ - default_sort = 'last_update desc' - primary_key = "process_guid" - validation_url = "/api/investigate/v1/orgs/{}/processes/search_validation" - - class Summary(UnrefreshableModel): - """Represents a summary of organization-specific information for - a process. - """ - default_sort = "last_update desc" - primary_key = "process_guid" - urlobject_single = "/api/investigate/v1/orgs/{}/processes/summary" - - def __init__(self, cb, model_unique_id): - url = self.urlobject_single.format(cb.credentials.org_key) - summary = cb.get_object(url, query_parameters={"process_guid": model_unique_id}) - - while summary["incomplete_results"]: - log.debug("summary incomplete, requesting again") - summary = self._cb.get_object( - url, query_parameters={"process_guid": self.process_guid} - ) - - super(Process.Summary, self).__init__(cb, model_unique_id=model_unique_id, - initial_data=summary, force_init=False, - full_doc=True) - - @classmethod - def _query_implementation(cls, cb): - # This will emulate a synchronous process query, for now. - return AsyncProcessQuery(cls, cb) - - def __init__(self, cb, model_unique_id=None, initial_data=None, force_init=False, full_doc=True): - super(Process, self).__init__(cb, model_unique_id=model_unique_id, initial_data=initial_data, - force_init=force_init, full_doc=full_doc) - - @property - def summary(self): - """Returns organization-specific information about this process. - """ - return self._cb.select(Process.Summary, self.process_guid) - - def events(self, **kwargs): - """Returns a query for events associated with this process's process GUID. - - :param kwargs: Arguments to filter the event query with. - :return: Returns a Query object with the appropriate search parameters for events - :rtype: :py:class:`cbapi.psc.threathunter.query.Query` - - Example:: - - >>> [print(event) for event in process.events()] - >>> [print(event) for event in process.events(event_type="modload")] - """ - query = self._cb.select(Event).where(process_guid=self.process_guid) - - if kwargs: - query = query.and_(**kwargs) - - return query - - def tree(self): - """Returns a :py:class:`Tree` of children (and possibly siblings) - associated with this process. - - :return: Returns a :py:class:`Tree` object - :rtype: :py:class:`Tree` - - Example: - - >>> tree = process.tree() - """ - data = self._cb.select(Tree).where(process_guid=self.process_guid).all() - return Tree(self._cb, initial_data=data) - - @property - def parents(self): - """Returns a query for parent processes associated with this process. - - :return: Returns a Query object with the appropriate search parameters for parent processes, - or None if the process has no recorded parent - :rtype: :py:class:`cbapi.psc.threathunter.query.AsyncProcessQuery` or None - """ - if "parent_guid" in self._info: - return self._cb.select(Process).where(process_guid=self.parent_guid) - else: - return [] - - @property - def children(self): - """Returns a list of child processes for this process. - - :return: Returns a list of process objects - :rtype: list of :py:class:`Process` - """ - if isinstance(self.summary.children, list): - return [ - Process(self._cb, initial_data=child) - for child in self.summary.children - ] - else: - return [] - - @property - def siblings(self): - """Returns a list of sibling processes for this process. - - :return: Returns a list of process objects - :rtype: list of :py:class:`Process` - """ - return [ - Process(self._cb, initial_data=sibling) - for sibling in self.summary.siblings - ] - - @property - def process_md5(self): - """Returns a string representation of the MD5 hash for this process. - - :return: A string representation of the process's MD5. - :rtype: str - """ - # NOTE: We have to check _info instead of poking the attribute directly - # to avoid the missing attrbute login in NewBaseModel. - if "process_hash" in self._info: - return next((hsh for hsh in self.process_hash if len(hsh) == 32), None) - else: - return None - - @property - def process_sha256(self): - """Returns a string representation of the SHA256 hash for this process. - - :return: A string representation of the process's SHA256. - :rtype: str - """ - if "process_hash" in self._info: - return next((hsh for hsh in self.process_hash if len(hsh) == 64), None) - else: - return None - - @property - def process_pids(self): - """Returns a list of PIDs associated with this process. - - :return: A list of PIDs - :rtype: list of ints - """ - # NOTE(ww): This exists because the API returns the list as "process_pid", - # which is misleading. We just give a slightly clearer name. - return self.process_pid - - -class Event(UnrefreshableModel): - """Events can be queried for via ``CbThreatHunterAPI.select`` - or though an already selected process with ``Process.events()``. - """ - urlobject = '/api/investigate/v2/orgs/{}/events/{}/_search' - validation_url = '/api/investigate/v1/orgs/{}/events/search_validation' - default_sort = 'last_update desc' - primary_key = "process_guid" - - @classmethod - def _query_implementation(cls, cb): - return Query(cls, cb) - - def __init__(self, cb, model_unique_id=None, initial_data=None, force_init=False, full_doc=True): - super(Event, self).__init__(cb, model_unique_id=model_unique_id, initial_data=initial_data, - force_init=force_init, full_doc=full_doc) - - -class Tree(UnrefreshableModel): - """The preferred interface for interacting with Tree models - is ``Process.tree()``. - """ - urlobject = '/threathunter/search/v1/orgs/{}/processes/tree' - primary_key = 'process_guid' - - @classmethod - def _query_implementation(cls, cb): - return TreeQuery(cls, cb) - - def __init__(self, cb, model_unique_id=None, initial_data=None, force_init=False, full_doc=True): - super(Tree, self).__init__( - cb, model_unique_id=model_unique_id, initial_data=initial_data, - force_init=force_init, full_doc=full_doc - ) - - @property - def children(self): - """Returns all of the children of the process that this tree is centered around. - - :return: A list of :py:class:`Process` instances - :rtype: list of :py:class:`Process` - """ - return [Process(self._cb, initial_data=child) for child in self.nodes["children"]] - - -class Feed(FeedModel): - """Represents a ThreatHunter feed's metadata. - """ - urlobject = "/threathunter/feedmgr/v2/orgs/{}/feeds" - urlobject_single = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}" - primary_key = "id" - swagger_meta_file = "psc/threathunter/models/feed.yaml" - - @classmethod - def _query_implementation(cls, cb): - return FeedQuery(cls, cb) - - def __init__(self, cb, model_unique_id=None, initial_data=None): - item = {} - reports = [] - - if initial_data: - # NOTE(ww): Some endpoints give us the full Feed, others give us just the FeedInfo. - if "feedinfo" in initial_data: - item = initial_data["feedinfo"] - reports = initial_data.get("reports", []) - else: - item = initial_data - elif model_unique_id: - url = self.urlobject_single.format( - cb.credentials.org_key, model_unique_id - ) - resp = cb.get_object(url) - item = resp.get("feedinfo", {}) - reports = resp.get("reports", []) - - feed_id = item.get("id") - - super(Feed, self).__init__(cb, model_unique_id=feed_id, initial_data=item, - force_init=False, full_doc=True) - - self._reports = [Report(cb, initial_data=report, feed_id=feed_id) for report in reports] - - def save(self, public=False): - """Saves this feed on the Enterprise EDR server. - - :param public: Whether to make the feed publicly available - :return: The saved feed - :rtype: :py:class:`Feed` - """ - self.validate() - - body = { - 'feedinfo': self._info, - 'reports': [report._info for report in self._reports], - } - - url = "/threathunter/feedmgr/v2/orgs/{}/feeds".format( - self._cb.credentials.org_key - ) - if public: - url = url + "/public" - - new_info = self._cb.post_object(url, body).json() - self._info.update(new_info) - return self - - def validate(self): - """Validates this feed's state. - - :raise InvalidObjectError: if the feed's state is invalid - """ - super(Feed, self).validate() - - if self.access not in ["public", "private"]: - raise InvalidObjectError("access should be public or private") - - if not validators.url(self.provider_url): - raise InvalidObjectError("provider_url should be a valid URL") - - for report in self._reports: - report.validate() - - def delete(self): - """Deletes this feed from the Enterprise EDR server. - - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing feed ID") - - url = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.delete_object(url) - - def update(self, **kwargs): - """Update this feed's metadata with the given arguments. - - >>> feed.update(access="private") - - :param kwargs: The fields to update - :type kwargs: dict(str, str) - :raise InvalidObjectError: if `id` is missing or :py:meth:`validate` fails - :raise ApiError: if an invalid field is specified - """ - if not self.id: - raise InvalidObjectError("missing feed ID") - - for key, value in kwargs.items(): - if key in self._info: - self._info[key] = value - - self.validate() - - url = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}/feedinfo".format( - self._cb.credentials.org_key, - self.id, - ) - new_info = self._cb.put_object(url, self._info).json() - self._info.update(new_info) - - return self - - @property - def reports(self): - """Returns a list of :py:class:`Report` associated with this feed. - - :return: a list of reports - :rtype: list(:py:class:`Report`) - """ - return self._cb.select(Report).where(feed_id=self.id) - - def replace_reports(self, reports): - """Replace this feed's reports with the given reports. - - :param reports: the reports to replace with - :type reports: list(:py:class:`Report`) - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing feed ID") - - rep_dicts = [report._info for report in reports] - body = {"reports": rep_dicts} - - url = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}/reports".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.post_object(url, body) - - def append_reports(self, reports): - """Append the given reports to this feed's current reports. - - :param reports: the reports to append - :type reports: list(:py:class:`Report`) - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing feed ID") - - rep_dicts = [report._info for report in reports] - rep_dicts += [report._info for report in self.reports] - body = {"reports": rep_dicts} - - url = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}/reports".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.post_object(url, body) - - -class Report(FeedModel): - """Represents reports retrieved from a ThreatHunter feed. - """ - urlobject = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}/reports" - primary_key = "id" - swagger_meta_file = "psc/threathunter/models/report.yaml" - - @classmethod - def _query_implementation(cls, cb): - return ReportQuery(cls, cb) - - def __init__(self, cb, model_unique_id=None, initial_data=None, - feed_id=None, from_watchlist=False): - - super(Report, self).__init__(cb, model_unique_id=initial_data.get("id"), - initial_data=initial_data, - force_init=False, full_doc=True) - - # NOTE(ww): Warn instead of failing since we allow Watchlist reports - # to be created via create(), but we don't actually know that the user - # intends to use them with a watchlist until they call save(). - if not feed_id and not from_watchlist: - log.warning("Report created without feed ID or not from watchlist") - - self._feed_id = feed_id - self._from_watchlist = from_watchlist - - if self.iocs: - self._iocs = IOC(cb, initial_data=self.iocs, report_id=self.id) - if self.iocs_v2: - self._iocs_v2 = [IOC_V2(cb, initial_data=ioc, report_id=self.id) for ioc in self.iocs_v2] - - def save_watchlist(self): - """Saves this report *as a watchlist report*. - - .. NOTE:: - This method **cannot** be used to save a feed report. To - save feed reports, create them with `cb.create` and use - :py:meth:`Feed.replace`. - - :raise InvalidObjectError: if :py:meth:`validate` fails - """ - self.validate() - - # NOTE(ww): Once saved, this object corresponds to a watchlist report. - # As such, we need to tell the model to route calls like update() - # and delete() to the correct (watchlist) endpoints. - self._from_watchlist = True - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports".format( - self._cb.credentials.org_key - ) - new_info = self._cb.post_object(url, self._info).json() - self._info.update(new_info) - return self - - def validate(self): - """Validates this report's state. - - :raise InvalidObjectError: if the report's state is invalid - """ - super(Report, self).validate() - - if self.link and not validators.url(self.link): - raise InvalidObjectError("link should be a valid URL") - - if self.iocs_v2: - [ioc.validate() for ioc in self._iocs_v2] - - def update(self, **kwargs): - """Update this report with the given arguments. - - .. NOTE:: - The report's timestamp is always updated, regardless of whether - passed explicitly. - - >>> report.update(title="My new report title") - - :param kwargs: The fields to update - :type kwargs: dict(str, str) - :return: The updated report - :rtype: :py:class:`Report` - :raises InvalidObjectError: if `id` is missing, or `feed_id` is missing - and this report is a feed report, or :py:meth:`validate` fails - """ - - if not self.id: - raise InvalidObjectError("missing Report ID") - - if self._from_watchlist: - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}".format( - self._cb.credentials.org_key, - self.id - ) - else: - if not self._feed_id: - raise InvalidObjectError("missing Feed ID") - url = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}/reports/{}".format( - self._cb.credentials.org_key, - self._feed_id, - self.id - ) - - for key, value in kwargs.items(): - if key in self._info: - self._info[key] = value - - # NOTE(ww): Updating reports on the watchlist API appears to require - # updated timestamps. - self.timestamp = int(time.time()) - self.validate() - - new_info = self._cb.put_object(url, self._info).json() - self._info.update(new_info) - return self - - def delete(self): - """Deletes this report from the Enterprise EDR server. - - >>> report.delete() - - :raises InvalidObjectError: if `id` is missing, or `feed_id` is missing - and this report is a feed report - """ - if not self.id: - raise InvalidObjectError("missing Report ID") - - if self._from_watchlist: - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}".format( - self._cb.credentials.org_key, - self.id - ) - else: - if not self._feed_id: - raise InvalidObjectError("missing Feed ID") - url = "/threathunter/feedmgr/v2/orgs/{}/feeds/{}/reports/{}".format( - self._cb.credentials.org_key, - self._feed_id, - self.id - ) - - self._cb.delete_object(url) - - @property - def ignored(self): - """Returns the ignore status for this report. - - Only watchlist reports have an ignore status. - - >>> if report.ignored: - ... report.unignore() - - :return: whether or not this report is ignored - :rtype: bool - :raises InvalidObjectError: if `id` is missing or this report is not from a watchlist - """ - if not self.id: - raise InvalidObjectError("missing Report ID") - if not self._from_watchlist: - raise InvalidObjectError("ignore status only applies to watchlist reports") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/ignore".format( - self._cb.credentials.org_key, - self.id - ) - resp = self._cb.get_object(url) - return resp["ignored"] - - def ignore(self): - """Sets the ignore status on this report. - - Only watchlist reports have an ignore status. - - :raises InvalidObjectError: if `id` is missing or this report is not from a watchlist - """ - if not self.id: - raise InvalidObjectError("missing Report ID") - - if not self._from_watchlist: - raise InvalidObjectError("ignoring only applies to watchlist reports") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/ignore".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.put_object(url, None) - - def unignore(self): - """Removes the ignore status on this report. - - Only watchlist reports have an ignore status. - - :raises InvalidObjectError: if `id` is missing or this report is not from a watchlist - """ - if not self.id: - raise InvalidObjectError("missing Report ID") - - if not self._from_watchlist: - raise InvalidObjectError("ignoring only applies to watchlist reports") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/ignore".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.delete_object(url) - - @property - def custom_severity(self): - """Returns the custom severity for this report. - - :return: The custom severity for this report, if it exists - :rtype: :py:class:`ReportSeverity` - :raise InvalidObjectError: if `id` is missing or this report is from a watchlist - """ - if not self.id: - raise InvalidObjectError("missing report ID") - if self._from_watchlist: - raise InvalidObjectError("watchlist reports don't have custom severities") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/severity".format( - self._cb.credentials.org_key, - self.id - ) - resp = self._cb.get_object(url) - return ReportSeverity(self._cb, initial_data=resp) - - @custom_severity.setter - def custom_severity(self, sev_level): - """Sets or removed the custom severity for this report - - :param int sev_level: the new severity, or None to remove the custom severity - :return: The new custom severity, or None if removed - :rtype: :py:class:`ReportSeverity` or None - :raise InvalidObjectError: if `id` is missing or this report is from a watchlist - """ - if not self.id: - raise InvalidObjectError("missing report ID") - if self._from_watchlist: - raise InvalidObjectError("watchlist reports don't have custom severities") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/severity".format( - self._cb.credentials.org_key, - self.id - ) - - if sev_level is None: - self._cb.delete_object(url) - return - - args = { - "report_id": self.id, - "severity": sev_level, - } - - resp = self._cb.put_object(url, args).json() - return ReportSeverity(self._cb, initial_data=resp) - - @property - def iocs_(self): - """Returns a list of :py:class:`IOC_V2` associated with this report. - - >>> for ioc in report.iocs_: - ... print(ioc.values) - - :return: a list of IOCs - :rtype: list(:py:class:`IOC_V2`) - """ - if not self.iocs_v2: - return [] - - # NOTE(ww): This name is underscored because something in the model - # hierarchy is messing up method resolution -- self.iocs and self.iocs_v2 - # are resolving to the attributes rather than the attribute-ified - # methods. - return self._iocs_v2 - - -class IOC(FeedModel): - """Represents a collection of categorized IOCs. - """ - swagger_meta_file = "psc/threathunter/models/iocs.yaml" - - def __init__(self, cb, model_unique_id=None, initial_data=None, report_id=None): - """Creates a new IOC instance. - - :raise ApiError: if `initial_data` is `None` - """ - if not initial_data: - raise ApiError("IOC can only be initialized from initial_data") - - super(IOC, self).__init__(cb, model_unique_id=model_unique_id, initial_data=initial_data, - force_init=False, full_doc=True) - - self._report_id = report_id - - def validate(self): - """Validates this IOC structure's state. - - :raise InvalidObjectError: if the IOC structure's state is invalid - """ - super(IOC, self).validate() - - for md5 in self.md5: - if not validators(md5): - raise InvalidObjectError("invalid MD5 checksum: {}".format(md5)) - for ipv4 in self.ipv4: - if not validators(ipv4): - raise InvalidObjectError("invalid IPv4 address: {}".format(ipv4)) - for ipv6 in self.ipv6: - if not validators(ipv6): - raise InvalidObjectError("invalid IPv6 address: {}".format(ipv6)) - for dns in self.dns: - if not validators(dns): - raise InvalidObjectError("invalid domain: {}".format(dns)) - for query in self.query: - if not self._cb.validate(query["search_query"]): - raise InvalidObjectError("invalid search query: {}".format(query["search_query"])) - - -class IOC_V2(FeedModel): - """Represents a collection of IOCs of a particular type, plus matching criteria and metadata. - """ - primary_key = "id" - swagger_meta_file = "psc/threathunter/models/ioc_v2.yaml" - - def __init__(self, cb, model_unique_id=None, initial_data=None, report_id=None): - """Creates a new IOC_V2 instance. - - :raise ApiError: if `initial_data` is `None` - """ - if not initial_data: - raise ApiError("IOC_V2 can only be initialized from initial_data") - - super(IOC_V2, self).__init__(cb, model_unique_id=initial_data.get(self.primary_key), - initial_data=initial_data, force_init=False, - full_doc=True) - - self._report_id = report_id - - def validate(self): - """Validates this IOC_V2's state. - - :raise InvalidObjectError: if the IOC_V2's state is invalid - """ - super(IOC_V2, self).validate() - - if self.link and not validators.url(self.link): - raise InvalidObjectError("link should be a valid URL") - - @property - def ignored(self): - """Returns whether or not this IOC is ignored - - >>> if ioc.ignored: - ... ioc.unignore() - - :return: the ignore status - :rtype: bool - :raise InvalidObjectError: if this IOC is missing an `id` or is not a watchlist IOC - """ - if not self.id: - raise InvalidObjectError("missing IOC ID") - if not self._report_id: - raise InvalidObjectError("ignore status only applies to watchlist IOCs") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/iocs/{}/ignore".format( - self._cb.credentials.org_key, - self._report_id, - self.id - ) - resp = self._cb.get_object(url) - return resp["ignored"] - - def ignore(self): - """Sets the ignore status on this IOC. - - Only watchlist IOCs have an ignore status. - - :raises InvalidObjectError: if `id` is missing or this IOC is not from a watchlist - """ - if not self.id: - raise InvalidObjectError("missing Report ID") - if not self._report_id: - raise InvalidObjectError("ignoring only applies to watchlist IOCs") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/iocs/{}/ignore".format( - self._cb.credentials.org_key, - self._report_id, - self.id - ) - self._cb.put_object(url, None) - - def unignore(self): - """Removes the ignore status on this IOC. - - Only watchlist IOCs have an ignore status. - - :raises InvalidObjectError: if `id` is missing or this IOC is not from a watchlist - """ - if not self.id: - raise InvalidObjectError("missing Report ID") - if not self._report_id: - raise InvalidObjectError("ignoring only applies to watchlist IOCs") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}/iocs/{}/ignore".format( - self._cb.credentials.org_key, - self._report_id, - self.id - ) - self._cb.delete_object(url) - - -class Watchlist(FeedModel): - """Represents a ThreatHunter watchlist. - """ - # NOTE(ww): Not documented. - urlobject = "/threathunter/watchlistmgr/v2/watchlist" - urlobject_single = "/threathunter/watchlistmgr/v2/watchlist/{}" - swagger_meta_file = "psc/threathunter/models/watchlist.yaml" - - @classmethod - def _query_implementation(cls, cb): - return WatchlistQuery(cls, cb) - - def __init__(self, cb, model_unique_id=None, initial_data=None): - item = {} - - if initial_data: - item = initial_data - elif model_unique_id: - item = cb.get_object(self.urlobject_single.format(model_unique_id)) - - feed_id = item.get("id") - - super(Watchlist, self).__init__(cb, model_unique_id=feed_id, initial_data=item, - force_init=False, full_doc=True) - - def save(self): - """Saves this watchlist on the Enterprise EDR server. - - :return: The saved watchlist - :rtype: :py:class:`Watchlist` - :raise InvalidObjectError: if :py:meth:`validate` fails - """ - self.validate() - - url = "/threathunter/watchlistmgr/v3/orgs/{}/watchlists".format( - self._cb.credentials.org_key - ) - new_info = self._cb.post_object(url, self._info).json() - self._info.update(new_info) - return self - - def validate(self): - """Validates this watchlist's state. - - :raise InvalidObjectError: if the watchlist's state is invalid - """ - super(Watchlist, self).validate() - - def update(self, **kwargs): - """Updates this watchlist with the given arguments. - - >>> watchlist.update(name="New Name") - - :param kwargs: The fields to update - :type kwargs: dict(str, str) - :raise InvalidObjectError: if `id` is missing or :py:meth:`validate` fails - :raise ApiError: if `report_ids` is given *and* is empty - """ - if not self.id: - raise InvalidObjectError("missing Watchlist ID") - - # NOTE(ww): Special case, according to the docs. - if "report_ids" in kwargs and not kwargs["report_ids"]: - raise ApiError("can't update a watchlist to have an empty report list") - - for key, value in kwargs.items(): - if key in self._info: - self._info[key] = value - - self.validate() - - url = "/threathunter/watchlistmgr/v3/orgs/{}/watchlists/{}".format( - self._cb.credentials.org_key, - self.id - ) - new_info = self._cb.put_object(url, self._info).json() - self._info.update(new_info) - - @property - def classifier_(self): - """Returns the classifier key and value, if any, for this watchlist. - - :rtype: tuple(str, str) or None - """ - classifier_dict = self._info.get("classifier") - - if not classifier_dict: - return None - - return (classifier_dict["key"], classifier_dict["value"]) - - def delete(self): - """Deletes this watchlist from the Enterprise EDR server. - - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing Watchlist ID") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/watchlists/{}".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.delete_object(url) - - def enable_alerts(self): - """Enable alerts for this watchlist. Alerts are not retroactive. - - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing Watchlist ID") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/watchlists/{}/alert".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.put_object(url, None) - - def disable_alerts(self): - """Disable alerts for this watchlist. - - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing Watchlist ID") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/watchlists/{}/alert".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.delete_object(url) - - def enable_tags(self): - """Enable tagging for this watchlist. - - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing Watchlist ID") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/watchlists/{}/tag".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.put_object(url, None) - - def disable_tags(self): - """Disable tagging for this watchlist. - - :raise InvalidObjectError: if `id` is missing - """ - if not self.id: - raise InvalidObjectError("missing Watchlist ID") - - url = "/threathunter/watchlistmgr/v3/orgs/{}/watchlists/{}/tag".format( - self._cb.credentials.org_key, - self.id - ) - self._cb.delete_object(url) - - @property - def feed(self): - """Returns the feed linked to this watchlist, if there is one. - - :return: the feed linked to this watchlist, if any - :rtype: :py:class:`Feed` or None - """ - if not self.classifier: - return None - if self.classifier["key"] != "feed_id": - log.warning("Unexpected classifier type: {}".format(self.classifier["key"])) - return None - - return self._cb.select(Feed, self.classifier["value"]) - - @property - def reports(self): - """Returns a list of :py:class:`Report` instances associated with this watchlist. - - .. NOTE:: - If this watchlist is a classifier (i.e. feed-linked) watchlist, - `reports` will be empty. To get the reports associated with the linked - feed, use :py:attr:`feed` like: - - >>> for report in watchlist.feed.reports: - ... print(report.title) - - :return: A list of reports - :rtype: list(:py:class:`Report`) - """ - if not self.report_ids: - return [] - - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/{}" - reports_ = [] - for rep_id in self.report_ids: - path = url.format(self._cb.credentials.org_key, rep_id) - resp = self._cb.get_object(path) - reports_.append(Report(self._cb, initial_data=resp, from_watchlist=True)) - - return reports_ - - -class ReportSeverity(FeedModel): - """Represents severity information for a watchlist report. - """ - primary_key = "report_id" - swagger_meta_file = "psc/threathunter/models/report_severity.yaml" - - def __init__(self, cb, initial_data=None): - if not initial_data: - raise ApiError("ReportSeverity can only be initialized from initial_data") - - super(ReportSeverity, self).__init__(cb, model_unique_id=initial_data.get(self.primary_key), - initial_data=initial_data, force_init=False, - full_doc=True) - - -class Binary(UnrefreshableModel): - """Represents a retrievable binary. - """ - primary_key = "sha256" - swagger_meta_file = "psc/threathunter/models/binary.yaml" - urlobject_single = "/ubs/v1/orgs/{}/sha256/{}/metadata" - - class Summary(UnrefreshableModel): - """Represents a summary of organization-specific information - for a retrievable binary. - """ - primary_key = "sha256" - urlobject_single = "/ubs/v1/orgs/{}/sha256/{}/summary/device" - - def __init__(self, cb, model_unique_id): - if not validators.sha256(model_unique_id): - raise ApiError("model_unique_id must be a valid SHA256") - - url = self.urlobject_single.format(cb.credentials.org_key, model_unique_id) - item = cb.get_object(url) - - super(Binary.Summary, self).__init__(cb, model_unique_id=model_unique_id, - initial_data=item, force_init=False, - full_doc=True) - - def __init__(self, cb, model_unique_id): - if not validators.sha256(model_unique_id): - raise ApiError("model_unique_id must be a valid SHA256") - - url = self.urlobject_single.format(cb.credentials.org_key, model_unique_id) - item = cb.get_object(url) - - super(Binary, self).__init__(cb, model_unique_id=model_unique_id, - initial_data=item, force_init=False, - full_doc=True) - - @property - def summary(self): - """Returns organization-specific information about this binary. - """ - return self._cb.select(Binary.Summary, self.sha256) - - @property - def download_url(self, expiration_seconds=3600): - """Returns a URL that can be used to download the file - for this binary. Returns None if no download can be found. - - :param expiration_seconds: How long the download should be valid for - :raise InvalidObjectError: if URL retrieval should be retried - :return: A pre-signed AWS download URL - :rtype: str - """ - downloads = self._cb.select(Downloads, [self.sha256], - expiration_seconds=expiration_seconds) - - if self.sha256 in downloads.not_found: - return None - elif self.sha256 in downloads.error: - raise InvalidObjectError("{} should be retried".format(self.sha256)) - else: - return next((item.url - for item in downloads.found - if self.sha256 == item.sha256), None) - - -class Downloads(UnrefreshableModel): - """Represents download information for a list of process hashes. - """ - urlobject = "/ubs/v1/orgs/{}/file/_download" - - class FoundItem(UnrefreshableModel): - """Represents the download URL and process hash for a successfully - located binary. - """ - primary_key = "sha256" - - def __init__(self, cb, item): - super(Downloads.FoundItem, self).__init__(cb, model_unique_id=item["sha256"], - initial_data=item, force_init=False, - full_doc=True) - - def __init__(self, cb, shas, expiration_seconds=3600): - body = { - "sha256": shas, - "expiration_seconds": expiration_seconds, - } - - url = self.urlobject.format(cb.credentials.org_key) - item = cb.post_object(url, body).json() - - super(Downloads, self).__init__(cb, model_unique_id=None, - initial_data=item, force_init=False, - full_doc=True) - - @property - def found(self): - """Returns a list of :py:class:`Downloads.FoundItem`, one - for each binary found in the binary store. - """ - return [Downloads.FoundItem(self._cb, item) for item in self._info["found"]] diff --git a/src/cbapi/psc/threathunter/models/binary.yaml b/src/cbapi/psc/threathunter/models/binary.yaml deleted file mode 100644 index c19d7b14..00000000 --- a/src/cbapi/psc/threathunter/models/binary.yaml +++ /dev/null @@ -1,79 +0,0 @@ -type: object -required: - - sha256 - - md5 - - file_available - - available_file_size - - file_size - - os_type - - architecture -properties: - sha256: - type: string - description: The SHA-256 hash of the file - md5: - type: string - description: The MD5 hash of the file - file_available: - type: boolean - description: If true, the file is available for download - available_file_size: - type: integer - format: int64 # NOTE(ww): docs say long integer - description: The size of the file available for download - file_size: - type: integer - format: int64 - description: The size of the actual file (represented by the hash) - os_type: - type: string - description: The OS that this file is designed for - architecture: - type: array - items: - type: string - description: The set of architectures that this file was compiled for - lang_id: - type: integer - format: int32 # NOTE(ww): Swagger doesn't have a (u)int16 - description: The Language ID value for the Windows VERSIONINFO resource - charset_id: - type: integer - format: int32 - description: The Character set ID value for the Windows VERSIONINFO resource - internal_name: - type: string - description: The internal name from FileVersionInformation - product_name: - type: string - description: The product name from FileVersionInformation - company_name: - type: string - description: The company name from FileVersionInformation - trademark: - type: string - description: The trademark from FileVersionInformation - file_description: - type: string - description: The file description from FileVersionInformation - file_version: - type: string - description: The file version from FileVersionInformation - comments: - type: string - description: Comments from FileVersionInformation - original_filename: - type: string - description: The original filename from FileVersionInformation - product_description: - type: string - description: The product description from FileVersionInformation - product_version: - type: string - description: The product version from FileVersionInformation - private_build: - type: string - description: The private build from FileVersionInformation - special_build: - type: string - description: The special build from FileVersionInformation diff --git a/src/cbapi/psc/threathunter/models/feed.yaml b/src/cbapi/psc/threathunter/models/feed.yaml deleted file mode 100644 index 27c0e4f4..00000000 --- a/src/cbapi/psc/threathunter/models/feed.yaml +++ /dev/null @@ -1,33 +0,0 @@ -type: object -required: - - name - - owner - - provider_url - - summary - - category - - access -properties: - name: - type: string - description: A human-friendly name for this feed - owner: - type: string - description: The feed owner's connector ID - provider_url: - type: string - description: A URL supplied by the feed's provider - summary: - type: string - description: A human-friendly summary for the feed - category: - type: string - description: The feed's category - source_label: - type: string - description: The feed's source label - access: - type: string - description: The feed's access (public or private) - id: - type: string - description: The feed's unique ID diff --git a/src/cbapi/psc/threathunter/models/ioc_v2.yaml b/src/cbapi/psc/threathunter/models/ioc_v2.yaml deleted file mode 100644 index a65f50cb..00000000 --- a/src/cbapi/psc/threathunter/models/ioc_v2.yaml +++ /dev/null @@ -1,23 +0,0 @@ -type: object -required: - - id - - match_type - - values -properties: - id: - type: string - description: The IOC_V2's unique ID - match_type: - type: string - description: How IOCs in this IOC_V2 are matched - values: - type: array - items: - type: string - description: A list of IOCs - field: - type: string - description: The kind of IOCs contained in this IOC_V2 - link: - type: string - description: A URL for some reference for this IOC_V2 diff --git a/src/cbapi/psc/threathunter/models/iocs.yaml b/src/cbapi/psc/threathunter/models/iocs.yaml deleted file mode 100644 index 274cb7a8..00000000 --- a/src/cbapi/psc/threathunter/models/iocs.yaml +++ /dev/null @@ -1,32 +0,0 @@ -type: object -properties: - md5: - type: array - items: - type: string - description: A list of MD5 checksums - ipv4: - type: array - items: - type: string - description: A list of IPv4 addresses - ipv6: - type: array - items: - type: string - description: A list of IPv6 addresses - dns: - type: array - items: - type: string - description: A list of domain names - query: - type: array - items: - type: object # QueryIOC - properties: - index_type: - type: string - search_query: - type: string - description: A list of dicts, each containing an IOC query diff --git a/src/cbapi/psc/threathunter/models/report.yaml b/src/cbapi/psc/threathunter/models/report.yaml deleted file mode 100644 index 6ef3bad1..00000000 --- a/src/cbapi/psc/threathunter/models/report.yaml +++ /dev/null @@ -1,45 +0,0 @@ -type: object -required: - - id - - timestamp - - title - - description - - severity -properties: - id: - type: string - description: The report's unique ID - timestamp: - type: integer - format: int32 - description: When this report was created - title: - type: string - description: A human-friendly title for this report - description: - type: string - description: A human-friendly description for this report - severity: - type: integer - format: int32 - description: The severity of the IOCs within this report - link: - type: string - description: A URL for some reference for this report - tags: - type: array - items: - type: string - description: A list of tags for this report - iocs: - type: object - # NOTE(ww): Explicitly not documented, since we do almost everything - # through IOC_V2 - iocs_v2: - type: array - items: - type: object - description: A list of IOC_V2 dicts associated with this report - visibility: - type: string - description: The visibility of this report diff --git a/src/cbapi/psc/threathunter/models/report_severity.yaml b/src/cbapi/psc/threathunter/models/report_severity.yaml deleted file mode 100644 index da53e10e..00000000 --- a/src/cbapi/psc/threathunter/models/report_severity.yaml +++ /dev/null @@ -1,12 +0,0 @@ -type: object -required: - - report_id - - severity -properties: - report_id: - type: string - description: The unique ID for the corresponding report - severity: - type: integer - format: int32 - description: The severity level diff --git a/src/cbapi/psc/threathunter/models/watchlist.yaml b/src/cbapi/psc/threathunter/models/watchlist.yaml deleted file mode 100644 index 34990997..00000000 --- a/src/cbapi/psc/threathunter/models/watchlist.yaml +++ /dev/null @@ -1,43 +0,0 @@ -type: object -required: - - name - - description - - create_timestamp - - last_update_timestamp -properties: - name: - type: string - description: A human-friendly name for the watchlist - description: - type: string - description: A short description of the watchlist - id: - type: string - description: The watchlist's unique id - tags_enabled: - type: boolean - description: Whether tags are currently enabled - alerts_enabled: - type: boolean - description: Whether alerts are currently enabled - create_timestamp: - type: integer - format: int32 - description: When this watchlist was created - last_update_timestamp: - type: integer - format: int32 - description: Report IDs associated with this watchlist - report_ids: - type: array - items: - type: string - description: Report IDs associated with this watchlist - classifier: - type: object # ClassifierKeyValue - properties: - key: - type: string - value: - type: string - description: A key, value pair specifying an associated feed diff --git a/src/cbapi/psc/threathunter/query.py b/src/cbapi/psc/threathunter/query.py deleted file mode 100644 index ae2b1516..00000000 --- a/src/cbapi/psc/threathunter/query.py +++ /dev/null @@ -1,654 +0,0 @@ -from cbapi.query import PaginatedQuery, BaseQuery, SimpleQuery -from cbapi.errors import ApiError, TimeoutError -import time -from solrq import Q -from six import string_types -import logging -import functools - - -log = logging.getLogger(__name__) - - -class QueryBuilder(object): - """ - Provides a flexible interface for building prepared queries for the Carbon Black - Enterprise EDR backend. - - This object can be instantiated directly, or can be managed implicitly - through the :py:meth:`CbThreatHunterAPI.select` API. - - Examples:: - - >>> from cbapi.psc.threathunter import QueryBuilder - >>> # build a query with chaining - >>> query = QueryBuilder().where(process_name="malicious.exe").and_(device_name="suspect") - >>> # start with an initial query, and chain another condition to it - >>> query = QueryBuilder(device_os="WINDOWS").or_(process_username="root") - - """ - def __init__(self, **kwargs): - if kwargs: - self._query = Q(**kwargs) - else: - self._query = None - self._raw_query = None - self._process_guid = None - - def _guard_query_params(func): - """Decorates the query construction methods of *QueryBuilder*, preventing - them from being called with parameters that would result in an intetnally - inconsistent query. - """ - @functools.wraps(func) - def wrap_guard_query_change(self, q, **kwargs): - if self._raw_query is not None and (kwargs or isinstance(q, Q)): - raise ApiError("Cannot modify a raw query with structured parameters") - if self._query is not None and isinstance(q, string_types): - raise ApiError("Cannot modify a structured query with a raw parameter") - return func(self, q, **kwargs) - return wrap_guard_query_change - - @_guard_query_params - def where(self, q, **kwargs): - """Adds a conjunctive filter to a query. - - :param q: string or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if isinstance(q, string_types): - if self._raw_query is None: - self._raw_query = [] - self._raw_query.append(q) - elif isinstance(q, Q) or kwargs: - if self._query is not None: - raise ApiError("Use .and_() or .or_() for an extant solrq.Q object") - if kwargs: - self._process_guid = self._process_guid or kwargs.get("process_guid") - q = Q(**kwargs) - self._query = q - else: - raise ApiError(".where() only accepts strings or solrq.Q objects") - - return self - - @_guard_query_params - def and_(self, q, **kwargs): - """Adds a conjunctive filter to a query. - - :param q: string or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if isinstance(q, string_types): - self.where(q) - elif isinstance(q, Q) or kwargs: - if kwargs: - self._process_guid = self._process_guid or kwargs.get("process_guid") - q = Q(**kwargs) - if self._query is None: - self._query = q - else: - self._query = self._query & q - else: - raise ApiError(".and_() only accepts strings or solrq.Q objects") - - return self - - @_guard_query_params - def or_(self, q, **kwargs): - """Adds a disjunctive filter to a query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if kwargs: - self._process_guid = self._process_guid or kwargs.get("process_guid") - q = Q(**kwargs) - - if isinstance(q, Q): - if self._query is None: - self._query = q - else: - self._query = self._query | q - else: - raise ApiError(".or_() only accepts solrq.Q objects") - - return self - - @_guard_query_params - def not_(self, q, **kwargs): - """Adds a negative filter to a query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if kwargs: - q = ~ Q(**kwargs) - - if isinstance(q, Q): - if self._query is None: - self._query = q - else: - self._query = self._query & q - else: - raise ApiError(".not_() only accepts solrq.Q objects") - - def _collapse(self): - """The query can be represented by either an array of strings - (_raw_query) which is concatenated and passed directly to Solr, or - a solrq.Q object (_query) which is then converted into a string to - pass to Solr. This function will perform the appropriate conversions to - end up with the 'q' string sent into the POST request to the - PSC-R query endpoint.""" - if self._raw_query is not None: - return " ".join(self._raw_query) - elif self._query is not None: - return str(self._query) - else: - return "*:*" # return everything - - -class Query(PaginatedQuery): - """Represents a prepared query to the Carbon Black Enterprise EDR backend. - - This object is returned as part of a :py:meth:`CbThreatHunterPI.select` - operation on models requested from the Enterprise EDR backend. You should not have to create this class yourself. - - The query is not executed on the server until it's accessed, either as an iterator (where it will generate values - on demand as they're requested) or as a list (where it will retrieve the entire result set and save to a list). - You can also call the Python built-in ``len()`` on this object to retrieve the total number of items matching - the query. - - Examples:: - - >>> from cbapi.psc.threathunter import CbThreatHunterAPI,Process - >>> cb = CbThreatHunterAPI() - >>> query = cb.select(Process) - >>> query = query.where(process_name="notepad.exe") - >>> # alternatively: - >>> query = query.where("process_name:notepad.exe") - - Notes: - - The slicing operator only supports start and end parameters, but not step. ``[1:-1]`` is legal, but - ``[1:2:-1]`` is not. - - You can chain where clauses together to create AND queries; only objects that match all ``where`` clauses - will be returned. - """ - - def __init__(self, doc_class, cb): - super(Query, self).__init__(doc_class, cb, None) - - self._query_builder = QueryBuilder() - self._sort_by = None - self._group_by = None - self._batch_size = 100 - self._default_args = {} - - def where(self, q=None, **kwargs): - """Add a filter to this query. - - :param q: Query string, :py:class:`QueryBuilder`, or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - if not q and not kwargs: - raise ApiError(".where() expects a string, a QueryBuilder, a solrq.Q, or kwargs") - - if isinstance(q, QueryBuilder): - self._query_builder = q - else: - self._query_builder.where(q, **kwargs) - return self - - def and_(self, q=None, **kwargs): - """Add a conjunctive filter to this query. - - :param q: Query string or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - if not q and not kwargs: - raise ApiError(".and_() expects a string, a solrq.Q, or kwargs") - - self._query_builder.and_(q, **kwargs) - return self - - def or_(self, q=None, **kwargs): - """Add a disjunctive filter to this query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - if not q and not kwargs: - raise ApiError(".or_() expects a solrq.Q or kwargs") - - self._query_builder.or_(q, **kwargs) - return self - - def not_(self, q=None, **kwargs): - """Adds a negated filter to this query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - - if not q and not kwargs: - raise ApiError(".not_() expects a solrq.Q, or kwargs") - - self._query_builder.not_(q, **kwargs) - return self - - def _get_query_parameters(self): - args = self._default_args.copy() - args['query'] = self._query_builder._collapse() - if self._query_builder._process_guid is not None: - args["process_guid"] = self._query_builder._process_guid - args["fields"] = [ - "*", - "parent_hash", - "parent_name", - "process_cmdline", - "backend_timestamp", - "device_external_ip", - "device_group", - "device_internal_ip", - "device_os", - "device_policy", - "process_effective_reputation", - "process_reputation", - "process_start_time", - "ttp" - ] - - return args - - def _count(self): - args = self._get_query_parameters() - - log.debug("args: {}".format(str(args))) - - result = self._cb.post_object( - self._doc_class.urlobject.format( - self._cb.credentials.org_key, - args["process_guid"] - ), body=args - ).json() - - self._total_results = int(result.get('num_available', 0)) - self._count_valid = True - - return self._total_results - - def _validate(self, args): - if not self._doc_class.validation_url: - return - - url = self._doc_class.validation_url.format(self._cb.credentials.org_key) - - if args.get('query', False): - args['q'] = args['query'] - - # v2 search sort key does not work with v1 validation - args.pop('sort', None) - - validated = self._cb.get_object(url, query_parameters=args) - - if not validated.get("valid"): - raise ApiError("Invalid query: {}: {}".format(args, validated["invalid_message"])) - - def _search(self, start=0, rows=0): - # iterate over total result set, 100 at a time - args = self._get_query_parameters() - self._validate(args) - - if start != 0: - args['start'] = start - args['rows'] = self._batch_size - - # args = {"search_params": args} - - current = start - numrows = 0 - - still_querying = True - - while still_querying: - url = self._doc_class.urlobject.format( - self._cb.credentials.org_key, - args["process_guid"] - ) - resp = self._cb.post_object(url, body=args) - result = resp.json() - - self._total_results = result.get("num_available", 0) - self._total_segments = result.get("total_segments", 0) - self._processed_segments = result.get("processed_segments", 0) - self._count_valid = True - - results = result.get('results', []) - - for item in results: - yield item - current += 1 - numrows += 1 - if rows and numrows == rows: - still_querying = False - break - - args['start'] = current + 1 # as of 6/2017, the indexing on the Cb Defense backend is still 1-based - - if current >= self._total_results: - break - if not results: - log.debug("server reported total_results overestimated the number of results for this query by {0}" - .format(self._total_results - current)) - log.debug("resetting total_results for this query to {0}".format(current)) - self._total_results = current - break - - -class AsyncProcessQuery(Query): - """Represents the query logic for an asychronous Process query. - - This class specializes :py:class:`Query` to handle the particulars of - process querying. - """ - def __init__(self, doc_class, cb): - super(AsyncProcessQuery, self).__init__(doc_class, cb) - self._query_token = None - self._timeout = 0 - self._timed_out = False - self._sort = [] - - def sort_by(self, key, direction="ASC"): - """Sets the sorting behavior on a query's results. - - Example:: - - >>> cb.select(Process).where(process_name="cmd.exe").sort_by("device_timestamp") - - :param key: the key in the schema to sort by - :param direction: the sort order, either "ASC" or "DESC" - :rtype: :py:class:`AsyncProcessQuery` - """ - found = False - - for sort_item in self._sort: - if sort_item['field'] == key: - sort_item['order'] = direction - found = True - - if not found: - self._sort.append({'field': key, 'order': direction}) - - self._default_args['sort'] = self._sort - - return self - - def timeout(self, msecs): - """Sets the timeout on a process query. - - Example:: - - >>> cb.select(Process).where(process_name="foo.exe").timeout(5000) - - :param: msecs: the timeout duration, in milliseconds - :return: AsyncProcessQuery object - :rtype: :py:class:`AsyncProcessQuery` - """ - self._timeout = msecs - return self - - def _submit(self): - if self._query_token: - raise ApiError("Query already submitted: token {0}".format(self._query_token)) - - args = self._get_query_parameters() - args['rows'] = 10000 - self._validate(args) - - url = "/api/investigate/v2/orgs/{}/processes/search_jobs".format(self._cb.credentials.org_key) - query_start = self._cb.post_object(url, body=args) - - self._query_token = query_start.json().get("job_id") - - self._timed_out = False - self._submit_time = time.time() * 1000 - - def _still_querying(self): - if not self._query_token: - self._submit() - - status_url = "/api/investigate/v1/orgs/{}/processes/search_jobs/{}".format( - self._cb.credentials.org_key, - self._query_token, - ) - result = self._cb.get_object(status_url) - - searchers_contacted = result.get("contacted", 0) - searchers_completed = result.get("completed", 0) - log.debug("contacted = {}, completed = {}".format(searchers_contacted, searchers_completed)) - if searchers_contacted == 0: - return True - if searchers_completed < searchers_contacted: - if self._timeout != 0 and (time.time() * 1000) - self._submit_time > self._timeout: - self._timed_out = True - return False - return True - - return False - - def _count(self): - if self._count_valid: - return self._total_results - - while self._still_querying(): - time.sleep(.5) - - if self._timed_out: - raise TimeoutError(message="user-specified timeout exceeded while waiting for results") - - result_url = "/api/investigate/v2/orgs/{}/processes/search_jobs/{}/results".format( - self._cb.credentials.org_key, - self._query_token, - ) - result = self._cb.get_object(result_url) - - self._total_results = result.get('num_available', 0) - self._count_valid = True - - return self._total_results - - def _search(self, start=0, rows=0): - if not self._query_token: - self._submit() - - while self._still_querying(): - time.sleep(.5) - - if self._timed_out: - raise TimeoutError(message="user-specified timeout exceeded while waiting for results") - - log.debug("Pulling results, timed_out={}".format(self._timed_out)) - - current = start - rows_fetched = 0 - still_fetching = True - result_url_template = "/api/investigate/v2/orgs/{}/processes/search_jobs/{}/results".format( - self._cb.credentials.org_key, - self._query_token - ) - query_parameters = {} - while still_fetching: - result_url = '{}?start={}&rows={}'.format( - result_url_template, - current, - 10 # Batch gets to reduce API calls - ) - - result = self._cb.get_object(result_url, query_parameters=query_parameters) - - self._total_results = result.get('num_available', 0) - self._count_valid = True - - results = result.get('results', []) - - for item in results: - yield item - current += 1 - rows_fetched += 1 - - if rows and rows_fetched >= rows: - still_fetching = False - break - - if current >= self._total_results: - still_fetching = False - - log.debug("current: {}, total_results: {}".format(current, self._total_results)) - - -class TreeQuery(BaseQuery): - """ Represents the logic for a Tree query. - """ - def __init__(self, doc_class, cb): - super(TreeQuery, self).__init__() - self._doc_class = doc_class - self._cb = cb - self._args = {} - - def where(self, **kwargs): - """Adds a conjunctive filter to this *TreeQuery*. - - Example:: - - >>> cb.select(Tree).where(process_guid="...") - - :param: kwargs: Arguments to invoke the *TreeQuery* with. - :return: this *TreeQuery* - :rtype: :py:class:`TreeQuery` - """ - self._args = dict(self._args, **kwargs) - return self - - def and_(self, **kwargs): - """Adds a conjunctive filter to this *TreeQuery*. - - :param: kwargs: Arguments to invoke the *TreeQuery* with. - :return: this *TreeQuery* - :rtype: :py:class:`TreeQuery` - """ - self.where(**kwargs) - return self - - def or_(self, **kwargs): - """Unsupported. Will raise if called. - - :raise: :py:class:`ApiError` - """ - raise ApiError(".or_() cannot be called on Tree queries") - - def _perform_query(self): - if "process_guid" not in self._args: - raise ApiError("required parameter process_guid missing") - - log.debug("Fetching process tree") - - url = self._doc_class.urlobject.format(self._cb.credentials.org_key) - results = self._cb.get_object(url, query_parameters=self._args) - - while results["incomplete_results"]: - result = self._cb.get_object(url, query_parameters=self._args) - results["nodes"]["children"].extend(result["nodes"]["children"]) - results["incomplete_results"] = result["incomplete_results"] - - return results - - -class FeedQuery(SimpleQuery): - """Represents the logic for a :py:class:`Feed` query. - - >>> cb.select(Feed) - >>> cb.select(Feed, id) - >>> cb.select(Feed).where(include_public=True) - """ - def __init__(self, doc_class, cb): - super(FeedQuery, self).__init__(doc_class, cb) - self._args = {} - - def where(self, **kwargs): - self._args = dict(self._args, **kwargs) - return self - - @property - def results(self): - log.debug("Fetching all feeds") - url = self._doc_class.urlobject.format(self._cb.credentials.org_key) - resp = self._cb.get_object(url, query_parameters=self._args) - results = resp.get("results", []) - return [self._doc_class(self._cb, initial_data=item) for item in results] - - -class ReportQuery(SimpleQuery): - """Represents the logic for a :py:class:`Report` query. - - >>> cb.select(Report).where(feed_id=id) - - .. NOTE:: - Only feed reports can be queried. Watchlist reports - should be interacted with via :py:meth:`Watchlist.reports`. - """ - def __init__(self, doc_class, cb): - super(ReportQuery, self).__init__(doc_class, cb) - self._args = {} - - def where(self, **kwargs): - self._args = dict(self._args, **kwargs) - return self - - @property - def results(self): - if "feed_id" not in self._args: - raise ApiError("required parameter feed_id missing") - - feed_id = self._args["feed_id"] - - log.debug("Fetching all reports") - url = self._doc_class.urlobject.format( - self._cb.credentials.org_key, - feed_id, - ) - resp = self._cb.get_object(url) - results = resp.get("results", []) - return [self._doc_class(self._cb, initial_data=item, feed_id=feed_id) for item in results] - - -class WatchlistQuery(SimpleQuery): - """Represents the logic for a :py:class:`Watchlist` query. - - >>> cb.select(Watchlist) - """ - def __init__(self, doc_class, cb): - super(WatchlistQuery, self).__init__(doc_class, cb) - - @property - def results(self): - log.debug("Fetching all watchlists") - - resp = self._cb.get_object(self._doc_class.urlobject) - results = resp.get("results", []) - return [self._doc_class(self._cb, initial_data=item) for item in results] diff --git a/src/cbapi/psc/threathunter/rest_api.py b/src/cbapi/psc/threathunter/rest_api.py deleted file mode 100644 index b163d6ba..00000000 --- a/src/cbapi/psc/threathunter/rest_api.py +++ /dev/null @@ -1,115 +0,0 @@ -from cbapi.psc.threathunter.query import Query -from cbapi.psc.rest_api import CbPSCBaseAPI -from cbapi.psc.threathunter.models import ReportSeverity -from cbapi.errors import CredentialError -import logging - -log = logging.getLogger(__name__) - - -class CbThreatHunterAPI(CbPSCBaseAPI): - """The main entry point into the Carbon Black Cloud Enterprise EDR API. - - :param str profile: (optional) Use the credentials in the named profile when connecting to the Carbon Black server. - Uses the profile named 'default' when not specified. - - Usage:: - - >>> from cbapi.psc.threathunter import CbThreatHunterAPI - >>> cb = CbThreatHunterAPI(profile="production") - """ - def __init__(self, *args, **kwargs): - super(CbThreatHunterAPI, self).__init__(*args, **kwargs) - - if not self.credentials.get("org_key", None): - raise CredentialError("No organization key specified") - - def _perform_query(self, cls, **kwargs): - if hasattr(cls, "_query_implementation"): - return cls._query_implementation(self) - else: - return Query(cls, self, **kwargs) - - def create(self, cls, data=None): - """Creates a new model. - - >>> feed = cb.create(Feed, feed_data) - - :param cls: The model being created - :param data: The data to pre-populate the model with - :type data: dict(str, object) - :return: an instance of `cls` - """ - return cls(self, initial_data=data) - - def validate_query(self, query): - """Validates the given IOC query. - - >>> cb.validate_query("process_name:chrome.exe") # True - - :param str query: the query to validate - :return: whether or not the query is valid - :rtype: bool - """ - args = {"q": query} - url = "/threathunter/search/v1/orgs/{}/processes/search_validation".format( - self.credentials.org_key - ) - resp = self.get_object(url, query_parameters=args) - - return resp.get("valid", False) - - def convert_query(self, query): - """Converts a legacy Carbon Black EDR query to an Enterprise EDR query. - - :param str query: the query to convert - :return: the converted query - :rtype: str - """ - args = {"query": query} - resp = self.post_object("/threathunter/feedmgr/v2/query/translate", args).json() - - return resp.get("query") - - @property - def custom_severities(self): - """Returns a list of active :py:class:`ReportSeverity` instances - - :rtype: list[:py:class:`ReportSeverity`] - """ - # TODO(ww): There's probably a better place to put this. - url = "/threathunter/watchlistmgr/v3/orgs/{}/reports/severity".format( - self.credentials.org_key - ) - resp = self.get_object(url) - items = resp.get("results", []) - return [self.create(ReportSeverity, item) for item in items] - - def queries(self): - """Retrieves a list of queries, active or complete, known by - the Enterprise EDR server. - - :return: a list of query ids - :rtype: list(str) - """ - url = "/threathunter/search/v1/orgs/{}/processes/search_jobs".format( - self.credentials.org_key - ) - ids = self.get_object(url) - return ids.get("query_ids", []) - - def limits(self): - """Returns a dictionary containing API limiting information. - - Example: - - >>> cb.limits() - {u'status_code': 200, u'time_bounds': {u'upper': 1545335070095, u'lower': 1542779216139}} - - :return: a dict of limiting information - :rtype: dict(str, str) - """ - url = "/threathunter/search/v1/orgs/{}/processes/limits".format( - self.credentials.org_key - ) - return self.get_object(url) diff --git a/test/cbapi/psc/test_alertsv6_api.py b/test/cbapi/psc/test_alertsv6_api.py deleted file mode 100755 index c787bba5..00000000 --- a/test/cbapi/psc/test_alertsv6_api.py +++ /dev/null @@ -1,535 +0,0 @@ -import pytest -from cbapi.errors import ApiError -from cbapi.psc.models import BaseAlert, CBAnalyticsAlert, VMwareAlert, WatchlistAlert, WorkflowStatus -from cbapi.psc.rest_api import CbPSCBaseAPI -from test.cbtest import StubResponse, patch_cbapi - - -def test_query_basealert_with_all_bells_and_whistles(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/_search" - assert body == {"query": "Blort", - "criteria": {"category": ["SERIOUS", "CRITICAL"], "device_id": [6023], "device_name": ["HAL"], - "device_os": ["LINUX"], "device_os_version": ["0.1.2"], - "device_username": ["JRN"], "group_results": True, "id": ["S0L0"], - "legacy_alert_id": ["S0L0_1"], "minimum_severity": 6, "policy_id": [8675309], - "policy_name": ["Strict"], "process_name": ["IEXPLORE.EXE"], - "process_sha256": ["0123456789ABCDEF0123456789ABCDEF"], - "reputation": ["SUSPECT_MALWARE"], "tag": ["Frood"], "target_value": ["HIGH"], - "threat_id": ["B0RG"], "type": ["WATCHLIST"], "workflow": ["OPEN"]}, - "sort": [{"field": "name", "order": "DESC"}]} - _was_called = True - return StubResponse({"results": [{"id": "S0L0", "org_key": "Z100", "threat_id": "B0RG", - "workflow": {"state": "OPEN"}}], "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(BaseAlert).where("Blort").set_categories(["SERIOUS", "CRITICAL"]).set_device_ids([6023]) \ - .set_device_names(["HAL"]).set_device_os(["LINUX"]).set_device_os_versions(["0.1.2"]) \ - .set_device_username(["JRN"]).set_group_results(True).set_alert_ids(["S0L0"]) \ - .set_legacy_alert_ids(["S0L0_1"]).set_minimum_severity(6).set_policy_ids([8675309]) \ - .set_policy_names(["Strict"]).set_process_names(["IEXPLORE.EXE"]) \ - .set_process_sha256(["0123456789ABCDEF0123456789ABCDEF"]).set_reputations(["SUSPECT_MALWARE"]) \ - .set_tags(["Frood"]).set_target_priorities(["HIGH"]).set_threat_ids(["B0RG"]).set_types(["WATCHLIST"]) \ - .set_workflows(["OPEN"]).sort_by("name", "DESC") - a = query.one() - assert _was_called - assert a.id == "S0L0" - assert a.org_key == "Z100" - assert a.threat_id == "B0RG" - assert a.workflow_.state == "OPEN" - - -def test_query_basealert_with_create_time_as_start_end(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/_search" - assert body == {"query": "Blort", - "criteria": {"create_time": {"start": "2019-09-30T12:34:56", "end": "2019-10-01T12:00:12"}}} - _was_called = True - return StubResponse({"results": [{"id": "S0L0", "org_key": "Z100", "threat_id": "B0RG", - "workflow": {"state": "OPEN"}}], "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(BaseAlert).where("Blort").set_create_time(start="2019-09-30T12:34:56", - end="2019-10-01T12:00:12") - a = query.one() - assert _was_called - assert a.id == "S0L0" - assert a.org_key == "Z100" - assert a.threat_id == "B0RG" - assert a.workflow_.state == "OPEN" - - -def test_query_basealert_with_create_time_as_range(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/_search" - assert body == {"query": "Blort", "criteria": {"create_time": {"range": "-3w"}}} - _was_called = True - return StubResponse({"results": [{"id": "S0L0", "org_key": "Z100", "threat_id": "B0RG", - "workflow": {"state": "OPEN"}}], "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(BaseAlert).where("Blort").set_create_time(range="-3w") - a = query.one() - assert _was_called - assert a.id == "S0L0" - assert a.org_key == "Z100" - assert a.threat_id == "B0RG" - assert a.workflow_.state == "OPEN" - - -def test_query_basealert_facets(monkeypatch): - _was_called = False - - def _run_facet_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/_facet" - assert body["query"] == "Blort" - t = body["criteria"] - assert t["workflow"] == ["OPEN"] - t = body["terms"] - assert t["rows"] == 0 - assert t["fields"] == ["REPUTATION", "STATUS"] - _was_called = True - return StubResponse({"results": [{"field": {}, - "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, - "values": [{"id": "status", "name": "statusX", "total": 9}]}]}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_facet_query) - query = api.select(BaseAlert).where("Blort").set_workflows(["OPEN"]) - f = query.facets(["REPUTATION", "STATUS"]) - assert _was_called - assert f == [{"field": {}, "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, "values": [{"id": "status", "name": "statusX", "total": 9}]}] - - -def test_query_basealert_invalid_create_time_combinations(): - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - with pytest.raises(ApiError): - api.select(BaseAlert).set_create_time() - with pytest.raises(ApiError): - api.select(BaseAlert).set_create_time(start="2019-09-30T12:34:56", - end="2019-10-01T12:00:12", range="-3w") - with pytest.raises(ApiError): - api.select(BaseAlert).set_create_time(start="2019-09-30T12:34:56", range="-3w") - with pytest.raises(ApiError): - api.select(BaseAlert).set_create_time(end="2019-10-01T12:00:12", range="-3w") - - -def test_query_basealert_invalid_criteria_values(): - tests = [ - {"method": "set_categories", "arg": ["DOUBLE_DARE"]}, - {"method": "set_device_ids", "arg": ["Bogus"]}, - {"method": "set_device_names", "arg": [42]}, - {"method": "set_device_os", "arg": ["TI994A"]}, - {"method": "set_device_os_versions", "arg": [8808]}, - {"method": "set_device_username", "arg": [-1]}, - {"method": "set_alert_ids", "arg": [9001]}, - {"method": "set_legacy_alert_ids", "arg": [9001]}, - {"method": "set_policy_ids", "arg": ["Bogus"]}, - {"method": "set_policy_names", "arg": [323]}, - {"method": "set_process_names", "arg": [7071]}, - {"method": "set_process_sha256", "arg": [123456789]}, - {"method": "set_reputations", "arg": ["MICROSOFT_FUDWARE"]}, - {"method": "set_tags", "arg": [-1]}, - {"method": "set_target_priorities", "arg": ["DOGWASH"]}, - {"method": "set_threat_ids", "arg": [4096]}, - {"method": "set_types", "arg": ["ERBOSOFT"]}, - {"method": "set_workflows", "arg": ["IN_LIMBO"]}, - ] - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - query = api.select(BaseAlert) - for t in tests: - meth = getattr(query, t["method"], None) - with pytest.raises(ApiError): - meth(t["arg"]) - - -def test_query_cbanalyticsalert_with_all_bells_and_whistles(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/cbanalytics/_search" - assert body == {"query": "Blort", - "criteria": {"category": ["SERIOUS", "CRITICAL"], "device_id": [6023], "device_name": ["HAL"], - "device_os": ["LINUX"], "device_os_version": ["0.1.2"], - "device_username": ["JRN"], "group_results": True, "id": ["S0L0"], - "legacy_alert_id": ["S0L0_1"], "minimum_severity": 6, "policy_id": [8675309], - "policy_name": ["Strict"], "process_name": ["IEXPLORE.EXE"], - "process_sha256": ["0123456789ABCDEF0123456789ABCDEF"], - "reputation": ["SUSPECT_MALWARE"], "tag": ["Frood"], "target_value": ["HIGH"], - "threat_id": ["B0RG"], "type": ["WATCHLIST"], "workflow": ["OPEN"], - "blocked_threat_category": ["RISKY_PROGRAM"], "device_location": ["ONSITE"], - "kill_chain_status": ["EXECUTE_GOAL"], - "not_blocked_threat_category": ["NEW_MALWARE"], "policy_applied": ["APPLIED"], - "reason_code": ["ATTACK_VECTOR"], "run_state": ["RAN"], "sensor_action": ["DENY"], - "threat_cause_vector": ["WEB"]}, "sort": [{"field": "name", "order": "DESC"}]} - _was_called = True - return StubResponse({"results": [{"id": "S0L0", "org_key": "Z100", "threat_id": "B0RG", - "workflow": {"state": "OPEN"}}], "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(CBAnalyticsAlert).where("Blort").set_categories(["SERIOUS", "CRITICAL"]) \ - .set_device_ids([6023]).set_device_names(["HAL"]).set_device_os(["LINUX"]).set_device_os_versions(["0.1.2"]) \ - .set_device_username(["JRN"]).set_group_results(True).set_alert_ids(["S0L0"]).set_legacy_alert_ids(["S0L0_1"]) \ - .set_minimum_severity(6).set_policy_ids([8675309]).set_policy_names(["Strict"]) \ - .set_process_names(["IEXPLORE.EXE"]).set_process_sha256(["0123456789ABCDEF0123456789ABCDEF"]) \ - .set_reputations(["SUSPECT_MALWARE"]).set_tags(["Frood"]).set_target_priorities(["HIGH"]) \ - .set_threat_ids(["B0RG"]).set_types(["WATCHLIST"]).set_workflows(["OPEN"]) \ - .set_blocked_threat_categories(["RISKY_PROGRAM"]).set_device_locations(["ONSITE"]) \ - .set_kill_chain_statuses(["EXECUTE_GOAL"]).set_not_blocked_threat_categories(["NEW_MALWARE"]) \ - .set_policy_applied(["APPLIED"]).set_reason_code(["ATTACK_VECTOR"]).set_run_states(["RAN"]) \ - .set_sensor_actions(["DENY"]).set_threat_cause_vectors(["WEB"]).sort_by("name", "DESC") - a = query.one() - assert _was_called - assert a.id == "S0L0" - assert a.org_key == "Z100" - assert a.threat_id == "B0RG" - assert a.workflow_.state == "OPEN" - - -def test_query_cbanalyticsalert_facets(monkeypatch): - _was_called = False - - def _run_facet_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/cbanalytics/_facet" - assert body == {"query": "Blort", "criteria": {"workflow": ["OPEN"]}, - "terms": {"rows": 0, "fields": ["REPUTATION", "STATUS"]}} - _was_called = True - return StubResponse({"results": [{"field": {}, - "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, - "values": [{"id": "status", "name": "statusX", "total": 9}]}]}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_facet_query) - query = api.select(CBAnalyticsAlert).where("Blort").set_workflows(["OPEN"]) - f = query.facets(["REPUTATION", "STATUS"]) - assert _was_called - assert f == [{"field": {}, "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, "values": [{"id": "status", "name": "statusX", "total": 9}]}] - - -def test_query_cbanalyticsalert_invalid_criteria_values(): - tests = [ - {"method": "set_blocked_threat_categories", "arg": ["MINOR"]}, - {"method": "set_device_locations", "arg": ["NARNIA"]}, - {"method": "set_kill_chain_statuses", "arg": ["SPAWN_COPIES"]}, - {"method": "set_not_blocked_threat_categories", "arg": ["MINOR"]}, - {"method": "set_policy_applied", "arg": ["MAYBE"]}, - {"method": "set_reason_code", "arg": [55]}, - {"method": "set_run_states", "arg": ["MIGHT_HAVE"]}, - {"method": "set_sensor_actions", "arg": ["FLIP_A_COIN"]}, - {"method": "set_threat_cause_vectors", "arg": ["NETWORK"]} - ] - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - query = api.select(CBAnalyticsAlert) - for t in tests: - meth = getattr(query, t["method"], None) - with pytest.raises(ApiError): - meth(t["arg"]) - - -def test_query_vmwarealert_with_all_bells_and_whistles(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/vmware/_search" - assert body == {"query": "Blort", - "criteria": {"category": ["SERIOUS", "CRITICAL"], "device_id": [6023], "device_name": ["HAL"], - "device_os": ["LINUX"], "device_os_version": ["0.1.2"], - "device_username": ["JRN"], "group_results": True, "id": ["S0L0"], - "legacy_alert_id": ["S0L0_1"], "minimum_severity": 6, "policy_id": [8675309], - "policy_name": ["Strict"], "process_name": ["IEXPLORE.EXE"], - "process_sha256": ["0123456789ABCDEF0123456789ABCDEF"], - "reputation": ["SUSPECT_MALWARE"], "tag": ["Frood"], "target_value": ["HIGH"], - "threat_id": ["B0RG"], "type": ["WATCHLIST"], "workflow": ["OPEN"], - "group_id": [14]}, "sort": [{"field": "name", "order": "DESC"}]} - _was_called = True - return StubResponse({"results": [{"id": "S0L0", "org_key": "Z100", "threat_id": "B0RG", - "workflow": {"state": "OPEN"}}], "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(VMwareAlert).where("Blort").set_categories(["SERIOUS", "CRITICAL"]).set_device_ids([6023]) \ - .set_device_names(["HAL"]).set_device_os(["LINUX"]).set_device_os_versions(["0.1.2"]) \ - .set_device_username(["JRN"]).set_group_results(True).set_alert_ids(["S0L0"]) \ - .set_legacy_alert_ids(["S0L0_1"]).set_minimum_severity(6).set_policy_ids([8675309]) \ - .set_policy_names(["Strict"]).set_process_names(["IEXPLORE.EXE"]) \ - .set_process_sha256(["0123456789ABCDEF0123456789ABCDEF"]).set_reputations(["SUSPECT_MALWARE"]) \ - .set_tags(["Frood"]).set_target_priorities(["HIGH"]).set_threat_ids(["B0RG"]).set_types(["WATCHLIST"]) \ - .set_workflows(["OPEN"]).set_group_ids([14]).sort_by("name", "DESC") - a = query.one() - assert _was_called - assert a.id == "S0L0" - assert a.org_key == "Z100" - assert a.threat_id == "B0RG" - assert a.workflow_.state == "OPEN" - - -def test_query_vmwarealert_facets(monkeypatch): - _was_called = False - - def _run_facet_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/vmware/_facet" - assert body == {"query": "Blort", "criteria": {"workflow": ["OPEN"]}, - "terms": {"rows": 0, "fields": ["REPUTATION", "STATUS"]}} - _was_called = True - return StubResponse({"results": [{"field": {}, - "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, - "values": [{"id": "status", "name": "statusX", "total": 9}]}]}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_facet_query) - query = api.select(VMwareAlert).where("Blort").set_workflows(["OPEN"]) - f = query.facets(["REPUTATION", "STATUS"]) - assert _was_called - assert f == [{"field": {}, "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, "values": [{"id": "status", "name": "statusX", "total": 9}]}] - - -def test_query_vmwarealert_invalid_group_ids(): - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", - org_key="Z100", ssl_verify=True) - with pytest.raises(ApiError): - api.select(VMwareAlert).set_group_ids(["Bogus"]) - - -def test_query_watchlistalert_with_all_bells_and_whistles(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/watchlist/_search" - assert body == {"query": "Blort", - "criteria": {"category": ["SERIOUS", "CRITICAL"], "device_id": [6023], "device_name": ["HAL"], - "device_os": ["LINUX"], "device_os_version": ["0.1.2"], - "device_username": ["JRN"], "group_results": True, "id": ["S0L0"], - "legacy_alert_id": ["S0L0_1"], "minimum_severity": 6, "policy_id": [8675309], - "policy_name": ["Strict"], "process_name": ["IEXPLORE.EXE"], - "process_sha256": ["0123456789ABCDEF0123456789ABCDEF"], - "reputation": ["SUSPECT_MALWARE"], "tag": ["Frood"], "target_value": ["HIGH"], - "threat_id": ["B0RG"], "type": ["WATCHLIST"], "workflow": ["OPEN"], - "watchlist_id": ["100"], "watchlist_name": ["Gandalf"]}, - "sort": [{"field": "name", "order": "DESC"}]} - _was_called = True - return StubResponse({"results": [{"id": "S0L0", "org_key": "Z100", "threat_id": "B0RG", - "workflow": {"state": "OPEN"}}], "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(WatchlistAlert).where("Blort").set_categories(["SERIOUS", "CRITICAL"]).set_device_ids([6023]) \ - .set_device_names(["HAL"]).set_device_os(["LINUX"]).set_device_os_versions(["0.1.2"]) \ - .set_device_username(["JRN"]).set_group_results(True).set_alert_ids(["S0L0"]) \ - .set_legacy_alert_ids(["S0L0_1"]).set_minimum_severity(6).set_policy_ids([8675309]) \ - .set_policy_names(["Strict"]).set_process_names(["IEXPLORE.EXE"]) \ - .set_process_sha256(["0123456789ABCDEF0123456789ABCDEF"]).set_reputations(["SUSPECT_MALWARE"]) \ - .set_tags(["Frood"]).set_target_priorities(["HIGH"]).set_threat_ids(["B0RG"]).set_types(["WATCHLIST"]) \ - .set_workflows(["OPEN"]).set_watchlist_ids(["100"]).set_watchlist_names(["Gandalf"]).sort_by("name", "DESC") - a = query.one() - assert _was_called - assert a.id == "S0L0" - assert a.org_key == "Z100" - assert a.threat_id == "B0RG" - assert a.workflow_.state == "OPEN" - - -def test_query_watchlistalert_facets(monkeypatch): - _was_called = False - - def _run_facet_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/watchlist/_facet" - assert body == {"query": "Blort", "criteria": {"workflow": ["OPEN"]}, - "terms": {"rows": 0, "fields": ["REPUTATION", "STATUS"]}} - _was_called = True - return StubResponse({"results": [{"field": {}, - "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, - "values": [{"id": "status", "name": "statusX", "total": 9}]}]}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_facet_query) - query = api.select(WatchlistAlert).where("Blort").set_workflows(["OPEN"]) - f = query.facets(["REPUTATION", "STATUS"]) - assert _was_called - assert f == [{"field": {}, "values": [{"id": "reputation", "name": "reputationX", "total": 4}]}, - {"field": {}, "values": [{"id": "status", "name": "statusX", "total": 9}]}] - - -def test_query_watchlistalert_invalid_criteria_values(): - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", - org_key="Z100", ssl_verify=True) - with pytest.raises(ApiError): - api.select(WatchlistAlert).set_watchlist_ids([888]) - with pytest.raises(ApiError): - api.select(WatchlistAlert).set_watchlist_names([69]) - - -def test_alerts_bulk_dismiss(monkeypatch): - _was_called = False - - def _do_dismiss(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/workflow/_criteria" - assert body == {"query": "Blort", "state": "DISMISSED", "remediation_state": "Fixed", "comment": "Yessir", - "criteria": {"device_name": ["HAL9000"]}} - _was_called = True - return StubResponse({"request_id": "497ABX"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_dismiss) - q = api.select(BaseAlert).where("Blort").set_device_names(["HAL9000"]) - reqid = q.dismiss("Fixed", "Yessir") - assert _was_called - assert reqid == "497ABX" - - -def test_alerts_bulk_undismiss(monkeypatch): - _was_called = False - - def _do_update(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/workflow/_criteria" - assert body == {"query": "Blort", "state": "OPEN", "remediation_state": "Fixed", "comment": "NoSir", - "criteria": {"device_name": ["HAL9000"]}} - _was_called = True - return StubResponse({"request_id": "497ABX"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_update) - q = api.select(BaseAlert).where("Blort").set_device_names(["HAL9000"]) - reqid = q.update("Fixed", "NoSir") - assert _was_called - assert reqid == "497ABX" - - -def test_alerts_bulk_dismiss_watchlist(monkeypatch): - _was_called = False - - def _do_dismiss(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/watchlist/workflow/_criteria" - assert body == {"query": "Blort", "state": "DISMISSED", "remediation_state": "Fixed", "comment": "Yessir", - "criteria": {"device_name": ["HAL9000"]}} - _was_called = True - return StubResponse({"request_id": "497ABX"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_dismiss) - q = api.select(WatchlistAlert).where("Blort").set_device_names(["HAL9000"]) - reqid = q.dismiss("Fixed", "Yessir") - assert _was_called - assert reqid == "497ABX" - - -def test_alerts_bulk_dismiss_cbanalytics(monkeypatch): - _was_called = False - - def _do_dismiss(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/cbanalytics/workflow/_criteria" - assert body == {"query": "Blort", "state": "DISMISSED", "remediation_state": "Fixed", "comment": "Yessir", - "criteria": {"device_name": ["HAL9000"]}} - _was_called = True - return StubResponse({"request_id": "497ABX"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_dismiss) - q = api.select(CBAnalyticsAlert).where("Blort").set_device_names(["HAL9000"]) - reqid = q.dismiss("Fixed", "Yessir") - assert _was_called - assert reqid == "497ABX" - - -def test_alerts_bulk_dismiss_vmware(monkeypatch): - _was_called = False - - def _do_dismiss(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/vmware/workflow/_criteria" - assert body == {"query": "Blort", "state": "DISMISSED", "remediation_state": "Fixed", "comment": "Yessir", - "criteria": {"device_name": ["HAL9000"]}} - _was_called = True - return StubResponse({"request_id": "497ABX"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_dismiss) - q = api.select(VMwareAlert).where("Blort").set_device_names(["HAL9000"]) - reqid = q.dismiss("Fixed", "Yessir") - assert _was_called - assert reqid == "497ABX" - - -def test_alerts_bulk_dismiss_threat(monkeypatch): - _was_called = False - - def _do_dismiss(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/threat/workflow/_criteria" - assert body == {"threat_id": ["B0RG", "F3R3NG1"], "state": "DISMISSED", "remediation_state": "Fixed", - "comment": "Yessir"} - _was_called = True - return StubResponse({"request_id": "497ABX"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_dismiss) - reqid = api.bulk_threat_dismiss(["B0RG", "F3R3NG1"], "Fixed", "Yessir") - assert _was_called - assert reqid == "497ABX" - - -def test_alerts_bulk_undismiss_threat(monkeypatch): - _was_called = False - - def _do_update(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/threat/workflow/_criteria" - assert body == {"threat_id": ["B0RG", "F3R3NG1"], "state": "OPEN", "remediation_state": "Fixed", - "comment": "NoSir"} - _was_called = True - return StubResponse({"request_id": "497ABX"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_update) - reqid = api.bulk_threat_update(["B0RG", "F3R3NG1"], "Fixed", "NoSir") - assert _was_called - assert reqid == "497ABX" - - -def test_load_workflow(monkeypatch): - _was_called = False - - def _get_workflow(url, parms=None, default=None): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/workflow/status/497ABX" - _was_called = True - return {"errors": [], "failed_ids": [], "id": "497ABX", "num_hits": 0, "num_success": 0, "status": "QUEUED", - "workflow": {"state": "DISMISSED", "remediation": "Fixed", "comment": "Yessir", - "changed_by": "Robocop", "last_update_time": "2019-10-31T16:03:13.951Z"}} - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", - org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_workflow) - workflow = api.select(WorkflowStatus, "497ABX") - assert _was_called - assert workflow.id_ == "497ABX" diff --git a/test/cbapi/psc/test_models.py b/test/cbapi/psc/test_models.py index 4b86dede..c9158fa7 100755 --- a/test/cbapi/psc/test_models.py +++ b/test/cbapi/psc/test_models.py @@ -1,5 +1,5 @@ import pytest -from cbapi.psc.models import Device, BaseAlert, WorkflowStatus +from cbapi.psc.models import Device from cbapi.psc.rest_api import CbPSCBaseAPI from test.cbtest import StubResponse, patch_cbapi @@ -177,137 +177,3 @@ def _update_sensor_version(url, body, **kwargs): dev = Device(api, 6023, {"id": 6023}) dev.update_sensor_version({"RHEL": "2.3.4.5"}) assert _was_called - - -def test_BaseAlert_dismiss(monkeypatch): - _was_called = False - - def _do_dismiss(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/ESD14U2C/workflow" - assert body == {"state": "DISMISSED", "remediation_state": "Fixed", "comment": "Yessir"} - _was_called = True - return StubResponse({"state": "DISMISSED", "remediation": "Fixed", "comment": "Yessir", - "changed_by": "Robocop", "last_update_time": "2019-10-31T16:03:13.951Z"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_dismiss) - alert = BaseAlert(api, "ESD14U2C", {"id": "ESD14U2C", "workflow": {"state": "OPEN"}}) - alert.dismiss("Fixed", "Yessir") - assert _was_called - assert alert.workflow_.changed_by == "Robocop" - assert alert.workflow_.state == "DISMISSED" - assert alert.workflow_.remediation == "Fixed" - assert alert.workflow_.comment == "Yessir" - assert alert.workflow_.last_update_time == "2019-10-31T16:03:13.951Z" - - -def test_BaseAlert_undismiss(monkeypatch): - _was_called = False - - def _do_update(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/alerts/ESD14U2C/workflow" - assert body == {"state": "OPEN", "remediation_state": "Fixed", "comment": "NoSir"} - _was_called = True - return StubResponse({"state": "OPEN", "remediation": "Fixed", "comment": "NoSir", - "changed_by": "Robocop", "last_update_time": "2019-10-31T16:03:13.951Z"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_update) - alert = BaseAlert(api, "ESD14U2C", {"id": "ESD14U2C", "workflow": {"state": "DISMISS"}}) - alert.update("Fixed", "NoSir") - assert _was_called - assert alert.workflow_.changed_by == "Robocop" - assert alert.workflow_.state == "OPEN" - assert alert.workflow_.remediation == "Fixed" - assert alert.workflow_.comment == "NoSir" - assert alert.workflow_.last_update_time == "2019-10-31T16:03:13.951Z" - - -def test_BaseAlert_dismiss_threat(monkeypatch): - _was_called = False - - def _do_dismiss(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/threat/B0RG/workflow" - assert body == {"state": "DISMISSED", "remediation_state": "Fixed", "comment": "Yessir"} - _was_called = True - return StubResponse({"state": "DISMISSED", "remediation": "Fixed", "comment": "Yessir", - "changed_by": "Robocop", "last_update_time": "2019-10-31T16:03:13.951Z"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_dismiss) - alert = BaseAlert(api, "ESD14U2C", {"id": "ESD14U2C", "threat_id": "B0RG", "workflow": {"state": "OPEN"}}) - wf = alert.dismiss_threat("Fixed", "Yessir") - assert _was_called - assert wf.changed_by == "Robocop" - assert wf.state == "DISMISSED" - assert wf.remediation == "Fixed" - assert wf.comment == "Yessir" - assert wf.last_update_time == "2019-10-31T16:03:13.951Z" - - -def test_BaseAlert_undismiss_threat(monkeypatch): - _was_called = False - - def _do_update(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/threat/B0RG/workflow" - assert body == {"state": "OPEN", "remediation_state": "Fixed", "comment": "NoSir"} - _was_called = True - return StubResponse({"state": "OPEN", "remediation": "Fixed", "comment": "NoSir", - "changed_by": "Robocop", "last_update_time": "2019-10-31T16:03:13.951Z"}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_do_update) - alert = BaseAlert(api, "ESD14U2C", {"id": "ESD14U2C", "threat_id": "B0RG", "workflow": {"state": "OPEN"}}) - wf = alert.update_threat("Fixed", "NoSir") - assert _was_called - assert wf.changed_by == "Robocop" - assert wf.state == "OPEN" - assert wf.remediation == "Fixed" - assert wf.comment == "NoSir" - assert wf.last_update_time == "2019-10-31T16:03:13.951Z" - - -def test_WorkflowStatus(monkeypatch): - _times_called = 0 - - def _get_workflow(url, parms=None, default=None): - nonlocal _times_called - assert url == "/appservices/v6/orgs/Z100/workflow/status/W00K13" - if _times_called >= 0 and _times_called <= 3: - _stat = "QUEUED" - elif _times_called >= 4 and _times_called <= 6: - _stat = "IN_PROGRESS" - elif _times_called >= 7 and _times_called <= 9: - _stat = "FINISHED" - else: - pytest.fail("_get_workflow called too many times") - _times_called = _times_called + 1 - return {"errors": [], "failed_ids": [], "id": "W00K13", "num_hits": 0, "num_success": 0, "status": _stat, - "workflow": {"state": "DISMISSED", "remediation": "Fixed", "comment": "Yessir", - "changed_by": "Robocop", "last_update_time": "2019-10-31T16:03:13.951Z"}} - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_workflow) - wfstat = WorkflowStatus(api, "W00K13") - assert wfstat.workflow_.changed_by == "Robocop" - assert wfstat.workflow_.state == "DISMISSED" - assert wfstat.workflow_.remediation == "Fixed" - assert wfstat.workflow_.comment == "Yessir" - assert wfstat.workflow_.last_update_time == "2019-10-31T16:03:13.951Z" - assert _times_called == 1 - assert wfstat.queued - assert not wfstat.in_progress - assert not wfstat.finished - assert _times_called == 4 - assert not wfstat.queued - assert wfstat.in_progress - assert not wfstat.finished - assert _times_called == 7 - assert not wfstat.queued - assert not wfstat.in_progress - assert wfstat.finished - assert _times_called == 10 diff --git a/tests/test_defense_policy.py b/tests/test_defense_policy.py deleted file mode 100644 index 48653764..00000000 --- a/tests/test_defense_policy.py +++ /dev/null @@ -1,53 +0,0 @@ -import sys -import time - -import glob -import json -import os -import unittest - -from cbapi.psc.defense import * - - -sys.path.append(os.path.dirname(__file__)) -import requests_cache - -@unittest.skip("temporarily disabled") -def test_policy(rulefiles): - requests_cache.uninstall_cache() - defense_api = CbDefenseAPI(profile="test") - - - default_policies = [policy for policy in defense_api.select(Policy) if policy.name == "default"] - new_policy = defense_api.create(Policy) - new_policy.policy = default_policies[0].policy - new_policy.name = "cbapi-python-test-%d" % time.time() - new_policy.priorityLevel = "LOW" - new_policy.description = "Test policy" - new_policy.version = 2 - new_policy.save() - - for t in rulefiles: - try: - test_rule(new_policy, t) - print("Added rule %s" % t) - except Exception as e: - print("Exception adding rule %s: %s" % (t, e)) - - new_policy.delete() - - -@unittest.skip("temporarily disabled") -def test_rule(new_policy, fn): - new_rule = json.load(open(fn, "r")) - new_policy.add_rule(new_rule) - - -if __name__ == '__main__': - rulefiles = glob.glob(os.path.join(os.path.dirname(__file__), "data", "defense", "policy_rules", "*.json")) - print(rulefiles) - - test_policy(rulefiles) - - - unittest.main() From 223815cc77e280593f34ed49d209b46b9b48a601 Mon Sep 17 00:00:00 2001 From: Emanuela Mitreva Date: Thu, 25 Jul 2024 15:59:34 +0300 Subject: [PATCH 2/4] more cleanup --- examples/livequery/manage_run.py | 149 ------- examples/livequery/run_device_summary.py | 88 ---- examples/livequery/run_facets.py | 106 ----- examples/livequery/run_search.py | 73 ---- src/cbapi/__init__.py | 2 - src/cbapi/example_helpers.py | 31 -- src/cbapi/psc/__init__.py | 6 - src/cbapi/psc/base_query.py | 271 ------------- src/cbapi/psc/devices_query.py | 361 ----------------- src/cbapi/psc/livequery/__init__.py | 7 - src/cbapi/psc/livequery/models.py | 294 -------------- .../psc/livequery/models/device_summary.yaml | 52 --- src/cbapi/psc/livequery/models/facet.yaml | 22 - src/cbapi/psc/livequery/models/result.yaml | 41 -- src/cbapi/psc/livequery/models/run.yaml | 86 ---- src/cbapi/psc/livequery/query.py | 375 ----------------- src/cbapi/psc/livequery/rest_api.py | 36 -- src/cbapi/psc/models.py | 181 --------- src/cbapi/psc/models/device.yaml | 310 -------------- src/cbapi/psc/rest_api.py | 191 --------- test/cbapi/__init__.py | 0 test/cbapi/psc/__init__.py | 0 test/cbapi/psc/livequery/__init__.py | 0 test/cbapi/psc/livequery/test_models.py | 234 ----------- test/cbapi/psc/livequery/test_rest_api.py | 161 -------- test/cbapi/psc/test_devicev6_api.py | 383 ------------------ test/cbapi/psc/test_models.py | 179 -------- 27 files changed, 3639 deletions(-) delete mode 100644 examples/livequery/manage_run.py delete mode 100755 examples/livequery/run_device_summary.py delete mode 100755 examples/livequery/run_facets.py delete mode 100644 examples/livequery/run_search.py delete mode 100644 src/cbapi/psc/__init__.py delete mode 100755 src/cbapi/psc/base_query.py delete mode 100755 src/cbapi/psc/devices_query.py delete mode 100644 src/cbapi/psc/livequery/__init__.py delete mode 100644 src/cbapi/psc/livequery/models.py delete mode 100755 src/cbapi/psc/livequery/models/device_summary.yaml delete mode 100755 src/cbapi/psc/livequery/models/facet.yaml delete mode 100644 src/cbapi/psc/livequery/models/result.yaml delete mode 100644 src/cbapi/psc/livequery/models/run.yaml delete mode 100644 src/cbapi/psc/livequery/query.py delete mode 100644 src/cbapi/psc/livequery/rest_api.py delete mode 100755 src/cbapi/psc/models.py delete mode 100755 src/cbapi/psc/models/device.yaml delete mode 100755 src/cbapi/psc/rest_api.py delete mode 100755 test/cbapi/__init__.py delete mode 100755 test/cbapi/psc/__init__.py delete mode 100755 test/cbapi/psc/livequery/__init__.py delete mode 100755 test/cbapi/psc/livequery/test_models.py delete mode 100755 test/cbapi/psc/livequery/test_rest_api.py delete mode 100755 test/cbapi/psc/test_devicev6_api.py delete mode 100755 test/cbapi/psc/test_models.py diff --git a/examples/livequery/manage_run.py b/examples/livequery/manage_run.py deleted file mode 100644 index 46e67552..00000000 --- a/examples/livequery/manage_run.py +++ /dev/null @@ -1,149 +0,0 @@ -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_livequery_object -from cbapi.psc.livequery.models import Run - - -def create_run(cb, args): - query = cb.query(args.sql) - - if args.device_ids: - query.device_ids(args.device_ids) - if args.device_types: - query.device_types(args.device_types) - if args.policy_ids: - query.policy_ids(args.policy_ids) - if args.notify: - query.notify_on_finish() - if args.name: - query.name(args.name) - - run = query.submit() - print(run) - - -def run_status(cb, args): - run = cb.select(Run, args.id) - print(run) - - -def run_stop(cb, args): - run = cb.select(Run, args.id) - if run.stop(): - print("Run {} has been stopped.".format(run.id)) - print(run) - else: - print("Unable to stop run {}".format(run.id)) - - -def run_delete(cb, args): - run = cb.select(Run, args.id) - if run.delete(): - print("Run {} has been deleted.".format(run.id)) - else: - print("Unable to delete run {}".format(run.id)) - - -def run_history(cb, args): - results = cb.query_history(args.query) - if args.sort_by: - dir = "DESC" if args.descending_results else "ASC" - results.sort_by(args.sort_by, direction=dir) - for result in results: - print(result) - - -def main(): - parser = build_cli_parser("Create and manage LiveQuery runs") - commands = parser.add_subparsers(help="Commands", dest="command_name") - - create_command = commands.add_parser("create", help="Create a new LiveQuery run") - create_command.add_argument( - "-s", "--sql", type=str, required=True, help="The query to run" - ) - create_command.add_argument( - "-n", - "--notify", - action="store_true", - help="Notify by email when the run finishes", - ) - create_command.add_argument( - "-N", "--name", type=str, required=False, help="The name of the run" - ) - create_command.add_argument( - "--device_ids", - nargs="+", - type=int, - required=False, - help="Device IDs to filter on", - ) - create_command.add_argument( - "--device_types", - nargs="+", - type=str, - required=False, - help="Device types to filter on", - ) - create_command.add_argument( - "--policy_ids", - nargs="+", - type=str, - required=False, - help="Policy IDs to filter on", - ) - - status_command = commands.add_parser( - "status", help="Retrieve information about a run" - ) - status_command.add_argument( - "-i", "--id", type=str, required=True, help="The run ID" - ) - - stop_command = commands.add_parser( - "stop", help="Stops/cancels a current run" - ) - stop_command.add_argument( - "-i", "--id", type=str, required=True, help="The run ID" - ) - - delete_command = commands.add_parser( - "delete", help="Permanently delete a run" - ) - delete_command.add_argument( - "-i", "--id", type=str, required=True, help="The run ID" - ) - - history_command = commands.add_parser( - "history", help="List history of all runs" - ) - history_command.add_argument( - "-q", "--query", type=str, required=False, help="Query string to use" - ) - history_command.add_argument( - "-S", "--sort_by", type=str, help="sort by this field", required=False - ) - history_command.add_argument( - "-D", - "--descending_results", - help="return results in descending order", - action="store_true", - required=False - ) - - args = parser.parse_args() - cb = get_cb_livequery_object(args) - - if args.command_name == "create": - return create_run(cb, args) - elif args.command_name == "status": - return run_status(cb, args) - elif args.command_name == "stop": - return run_stop(cb, args) - elif args.command_name == "delete": - return run_delete(cb, args) - elif args.command_name == "history": - return run_history(cb, args) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/livequery/run_device_summary.py b/examples/livequery/run_device_summary.py deleted file mode 100755 index 33ee8931..00000000 --- a/examples/livequery/run_device_summary.py +++ /dev/null @@ -1,88 +0,0 @@ -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_livequery_object -from cbapi.psc.livequery.models import Result - - -def main(): - parser = build_cli_parser("Search the device summaries of a LiveQuery run") - parser.add_argument("-i", "--id", type=str, required=True, help="Run ID") - parser.add_argument("-q", "--query", type=str, required=False, help="Search query") - - parser.add_argument( - "--device_ids", - nargs="+", - type=int, - required=False, - help="Device IDs to filter on", - ) - parser.add_argument( - "--device_names", - nargs="+", - type=int, - required=False, - help="Device names to filter on", - ) - parser.add_argument( - "--policy_ids", - nargs="+", - type=int, - required=False, - help="Policy IDs to filter on", - ) - parser.add_argument( - "--policy_names", - nargs="+", - type=int, - required=False, - help="Policy names to filter on", - ) - parser.add_argument( - "--statuses", - nargs="+", - type=str, - required=False, - help="Statuses to filter on", - ) - parser.add_argument( - "-S", "--sort_by", type=str, help="sort by this field", required=False - ) - parser.add_argument( - "-D", - "--descending_results", - help="return results in descending order", - action="store_true", - ) - - args = parser.parse_args() - cb = get_cb_livequery_object(args) - - results = cb.select(Result).run_id(args.id) - result = results.first() - if result is None: - print("ERROR: No results.") - return 1 - - summaries = result.query_device_summaries() - if args.query: - summaries.where(args.query) - if args.device_ids: - summaries.criteria(device_id=args.device_ids) - if args.device_names: - summaries.criteria(device_name=args.device_names) - if args.policy_ids: - summaries.criteria(policy_id=args.policy_ids) - if args.policy_names: - summaries.criteria(policy_name=args.policy_names) - if args.statuses: - summaries.criteria(status=args.statuses) - if args.sort_by: - dir = "DESC" if args.descending_results else "ASC" - summaries.sort_by(args.sort_by, direction=dir) - - for summary in summaries: - print(summary) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/livequery/run_facets.py b/examples/livequery/run_facets.py deleted file mode 100755 index 14bfc6c4..00000000 --- a/examples/livequery/run_facets.py +++ /dev/null @@ -1,106 +0,0 @@ -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_livequery_object -from cbapi.psc.livequery.models import Result - - -def main(): - parser = build_cli_parser("Search the facets of a LiveQuery run") - parser.add_argument("-i", "--id", type=str, required=True, help="Run ID") - parser.add_argument( - "--result", - action="store_true", - help="Run facet query on results" - ) - parser.add_argument( - "--device_summary", - action="store_true", - help="Run facet query on device summaries" - ) - parser.add_argument( - "-f", - "--fields", - nargs="+", - type=str, - required=False, - help="Fields to be displayed in results", - ) - - parser.add_argument("-q", "--query", type=str, required=False, help="Search query") - parser.add_argument( - "--device_ids", - nargs="+", - type=int, - required=False, - help="Device IDs to filter on", - ) - parser.add_argument( - "--device_names", - nargs="+", - type=int, - required=False, - help="Device names to filter on", - ) - parser.add_argument( - "--policy_ids", - nargs="+", - type=int, - required=False, - help="Policy IDs to filter on", - ) - parser.add_argument( - "--policy_names", - nargs="+", - type=int, - required=False, - help="Policy names to filter on", - ) - parser.add_argument( - "--statuses", - nargs="+", - type=str, - required=False, - help="Statuses to filter on", - ) - - args = parser.parse_args() - if not (args.result or args.device_summary): - print("ERROR: One of --result or --device_summary must be specified") - return 1 - if args.result and args.device_summary: - print("ERROR: --result and --device_summary cannot both be specified") - return 1 - - cb = get_cb_livequery_object(args) - - results = cb.select(Result).run_id(args.id) - result = results.first() - if result is None: - print("ERROR: No results.") - return 1 - - if args.result: - facets = result.query_result_facets() - elif args.device_summary: - facets = result.query_device_summary_facets() - if args.fields: - facets.facet_field(args.fields) - if args.query: - facets.where(args.query) - if args.device_ids: - facets.criteria(device_id=args.device_ids) - if args.device_names: - facets.criteria(device_name=args.device_names) - if args.policy_ids: - facets.criteria(policy_id=args.policy_ids) - if args.policy_names: - facets.criteria(policy_name=args.policy_names) - if args.statuses: - facets.criteria(status=args.statuses) - - for facet in facets: - print(facet) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/examples/livequery/run_search.py b/examples/livequery/run_search.py deleted file mode 100644 index 66b2e6a7..00000000 --- a/examples/livequery/run_search.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys - -from cbapi.example_helpers import build_cli_parser, get_cb_livequery_object -from cbapi.psc.livequery.models import Result - - -def main(): - parser = build_cli_parser("Search the results of a LiveQuery run") - parser.add_argument("-i", "--id", type=str, required=True, help="Run ID") - parser.add_argument("-q", "--query", type=str, required=False, help="Search query") - parser.add_argument( - "-F", "--fields_only", action="store_true", help="Show only fields" - ) - parser.add_argument( - "--device_ids", - nargs="+", - type=int, - required=False, - help="Device IDs to filter on", - ) - parser.add_argument( - "--device_types", - nargs="+", - type=str, - required=False, - help="Device types to filter on", - ) - parser.add_argument( - "--statuses", - nargs="+", - type=str, - required=False, - help="Statuses to filter on", - ) - parser.add_argument( - "-S", "--sort_by", type=str, help="sort by this field", required=False - ) - parser.add_argument( - "-D", - "--descending_results", - help="return results in descending order", - action="store_true", - ) - - args = parser.parse_args() - cb = get_cb_livequery_object(args) - - results = cb.select(Result).run_id(args.id) - if args.query: - results = results.where(args.query) - - if args.device_ids: - results.criteria(device_id=args.device_ids) - if args.device_types: - results.criteria(device_type=args.device_types) - if args.statuses: - results.criteria(status=args.statuses) - - if args.sort_by: - direction = "ASC" - if args.descending_results: - direction = "DESC" - results.sort_by(args.sort_by, direction=direction) - - for result in results: - if args.fields_only: - print(result.fields_) - else: - print(result) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/src/cbapi/__init__.py b/src/cbapi/__init__.py index 163499f0..d90ed52d 100644 --- a/src/cbapi/__init__.py +++ b/src/cbapi/__init__.py @@ -11,5 +11,3 @@ # New API as of cbapi 0.9.0 from cbapi.response.rest_api import CbEnterpriseResponseAPI, CbResponseAPI from cbapi.protection.rest_api import CbEnterpriseProtectionAPI, CbProtectionAPI -from cbapi.psc import CbPSCBaseAPI -from cbapi.psc.livequery import CbLiveQueryAPI diff --git a/src/cbapi/example_helpers.py b/src/cbapi/example_helpers.py index e0ee2731..798b94cf 100644 --- a/src/cbapi/example_helpers.py +++ b/src/cbapi/example_helpers.py @@ -15,8 +15,6 @@ import hashlib from cbapi.protection import CbEnterpriseProtectionAPI -from cbapi.psc import CbPSCBaseAPI -from cbapi.psc.livequery import CbLiveQueryAPI from cbapi.response import CbEnterpriseResponseAPI log = logging.getLogger(__name__) @@ -78,35 +76,6 @@ def get_cb_protection_object(args): return cb -def get_cb_psc_object(args): - if args.verbose: - logging.basicConfig() - logging.getLogger("cbapi").setLevel(logging.DEBUG) - logging.getLogger("__main__").setLevel(logging.DEBUG) - - if args.cburl and args.apitoken: - cb = CbPSCBaseAPI(url=args.cburl, token=args.apitoken, ssl_verify=(not args.no_ssl_verify)) - else: - cb = CbPSCBaseAPI(profile=args.profile) - - return cb - - -def get_cb_livequery_object(args): - if args.verbose: - logging.basicConfig() - logging.getLogger("cbapi").setLevel(logging.DEBUG) - logging.getLogger("__main__").setLevel(logging.DEBUG) - - if args.cburl and args.apitoken and args.orgkey: - cb = CbLiveQueryAPI(url=args.cburl, token=args.apitoken, org_key=args.orgkey, - ssl_verify=(not args.no_ssl_verify)) - else: - cb = CbLiveQueryAPI(profile=args.profile) - - return cb - - def get_object_by_name_or_id(cb, cls, name_field="name", id=None, name=None, force_init=True): clsname = cls.__name__ try: diff --git a/src/cbapi/psc/__init__.py b/src/cbapi/psc/__init__.py deleted file mode 100644 index 239b1eb6..00000000 --- a/src/cbapi/psc/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Exported public API for the Cb PSC API - -from __future__ import absolute_import - -from .rest_api import CbPSCBaseAPI -from .models import Device diff --git a/src/cbapi/psc/base_query.py b/src/cbapi/psc/base_query.py deleted file mode 100755 index 7812b3d8..00000000 --- a/src/cbapi/psc/base_query.py +++ /dev/null @@ -1,271 +0,0 @@ -from cbapi.errors import ApiError, MoreThanOneResultError -import functools -from six import string_types -from solrq import Q - - -class QueryBuilder(object): - """ - Provides a flexible interface for building prepared queries for the CB - PSC backend. - - This object can be instantiated directly, or can be managed implicitly - through the :py:meth:`select` API. - """ - - def __init__(self, **kwargs): - if kwargs: - self._query = Q(**kwargs) - else: - self._query = None - self._raw_query = None - - def _guard_query_params(func): - """Decorates the query construction methods of *QueryBuilder*, preventing - them from being called with parameters that would result in an internally - inconsistent query. - """ - - @functools.wraps(func) - def wrap_guard_query_change(self, q, **kwargs): - if self._raw_query is not None and (kwargs or isinstance(q, Q)): - raise ApiError("Cannot modify a raw query with structured parameters") - if self._query is not None and isinstance(q, string_types): - raise ApiError("Cannot modify a structured query with a raw parameter") - return func(self, q, **kwargs) - - return wrap_guard_query_change - - @_guard_query_params - def where(self, q, **kwargs): - """Adds a conjunctive filter to a query. - - :param q: string or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if isinstance(q, string_types): - if self._raw_query is None: - self._raw_query = [] - self._raw_query.append(q) - elif isinstance(q, Q) or kwargs: - if self._query is not None: - raise ApiError("Use .and_() or .or_() for an extant solrq.Q object") - if kwargs: - q = Q(**kwargs) - self._query = q - else: - raise ApiError(".where() only accepts strings or solrq.Q objects") - - return self - - @_guard_query_params - def and_(self, q, **kwargs): - """Adds a conjunctive filter to a query. - - :param q: string or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if isinstance(q, string_types): - self.where(q) - elif isinstance(q, Q) or kwargs: - if kwargs: - q = Q(**kwargs) - if self._query is None: - self._query = q - else: - self._query = self._query & q - else: - raise ApiError(".and_() only accepts strings or solrq.Q objects") - - return self - - @_guard_query_params - def or_(self, q, **kwargs): - """Adds a disjunctive filter to a query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if kwargs: - q = Q(**kwargs) - - if isinstance(q, Q): - if self._query is None: - self._query = q - else: - self._query = self._query | q - else: - raise ApiError(".or_() only accepts solrq.Q objects") - - return self - - @_guard_query_params - def not_(self, q, **kwargs): - """Adds a negative filter to a query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: QueryBuilder object - :rtype: :py:class:`QueryBuilder` - """ - if kwargs: - q = ~Q(**kwargs) - - if isinstance(q, Q): - if self._query is None: - self._query = q - else: - self._query = self._query & q - else: - raise ApiError(".not_() only accepts solrq.Q objects") - - def _collapse(self): - """The query can be represented by either an array of strings - (_raw_query) which is concatenated and passed directly to Solr, or - a solrq.Q object (_query) which is then converted into a string to - pass to Solr. This function will perform the appropriate conversions to - end up with the 'q' string sent into the POST request to the - PSC-R query endpoint.""" - if self._raw_query is not None: - return " ".join(self._raw_query) - elif self._query is not None: - return str(self._query) - else: - return None # return everything - - -class PSCQueryBase: - """ - Represents the base of all LiveQuery query classes. - """ - - def __init__(self, doc_class, cb): - self._doc_class = doc_class - self._cb = cb - self._count_valid = False - - -class QueryBuilderSupportMixin: - """ - A mixin that supplies wrapper methods to access the _query_builder. - """ - def where(self, q=None, **kwargs): - """Add a filter to this query. - - :param q: Query string, :py:class:`QueryBuilder`, or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - - if not q: - return self - if isinstance(q, QueryBuilder): - self._query_builder = q - else: - self._query_builder.where(q, **kwargs) - return self - - def and_(self, q=None, **kwargs): - """Add a conjunctive filter to this query. - - :param q: Query string or `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - if not q and not kwargs: - raise ApiError(".and_() expects a string, a solrq.Q, or kwargs") - - self._query_builder.and_(q, **kwargs) - return self - - def or_(self, q=None, **kwargs): - """Add a disjunctive filter to this query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - if not q and not kwargs: - raise ApiError(".or_() expects a solrq.Q or kwargs") - - self._query_builder.or_(q, **kwargs) - return self - - def not_(self, q=None, **kwargs): - """Adds a negated filter to this query. - - :param q: `solrq.Q` object - :param kwargs: Arguments to construct a `solrq.Q` with - :return: Query object - :rtype: :py:class:`Query` - """ - - if not q and not kwargs: - raise ApiError(".not_() expects a solrq.Q, or kwargs") - - self._query_builder.not_(q, **kwargs) - return self - - -class IterableQueryMixin: - """ - A mix-in to provide iterability to a query. - """ - def all(self): - """ - Returns all the items of a query as a list. - - :return: List of query items - """ - return self._perform_query() - - def first(self): - """ - Returns the first item that would be returned as the result of a query. - - :return: First query item - """ - allres = list(self) - res = allres[:1] - if not len(res): - return None - return res[0] - - def one(self): - """ - Returns the only item that would be returned by a query. - - :return: Sole query return item - :raises MoreThanOneResultError: If the query returns zero items, or more than one item - """ - allres = list(self) - res = allres[:2] - if len(res) == 0: - raise MoreThanOneResultError( - message="0 results for query {0:s}".format(self._query) - ) - if len(res) > 1: - raise MoreThanOneResultError( - message="{0:d} results found for query {1:s}".format( - len(self), self._query - ) - ) - return res[0] - - def __len__(self): - return self._count() - - def __getitem__(self, item): - return None - - def __iter__(self): - return self._perform_query() diff --git a/src/cbapi/psc/devices_query.py b/src/cbapi/psc/devices_query.py deleted file mode 100755 index 5a5e185d..00000000 --- a/src/cbapi/psc/devices_query.py +++ /dev/null @@ -1,361 +0,0 @@ -from cbapi.errors import ApiError -from .base_query import PSCQueryBase, QueryBuilder, QueryBuilderSupportMixin, IterableQueryMixin - - -class DeviceSearchQuery(PSCQueryBase, QueryBuilderSupportMixin, IterableQueryMixin): - """ - Represents a query that is used to locate Device objects. - """ - VALID_OS = ["WINDOWS", "ANDROID", "MAC", "IOS", "LINUX", "OTHER"] - VALID_STATUSES = ["PENDING", "REGISTERED", "UNINSTALLED", "DEREGISTERED", - "ACTIVE", "INACTIVE", "ERROR", "ALL", "BYPASS_ON", - "BYPASS", "QUARANTINE", "SENSOR_OUTOFDATE", - "DELETED", "LIVE"] - VALID_PRIORITIES = ["LOW", "MEDIUM", "HIGH", "MISSION_CRITICAL"] - VALID_DIRECTIONS = ["ASC", "DESC"] - - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._query_builder = QueryBuilder() - self._criteria = {} - self._time_filter = {} - self._exclusions = {} - self._sortcriteria = {} - - def _update_criteria(self, key, newlist): - """ - Updates the criteria being collected for a query. Assumes the specified criteria item is - defined as a list; the list passed in will be set as the value for this criteria item, or - appended to the existing one if there is one. - - :param str key: The key for the criteria item to be set - :param list newlist: List of values to be set for the criteria item - """ - oldlist = self._criteria.get(key, []) - self._criteria[key] = oldlist + newlist - - def _update_exclusions(self, key, newlist): - """ - Updates the exclusion criteria being collected for a query. Assumes the specified criteria item is - defined as a list; the list passed in will be set as the value for this criteria item, or - appended to the existing one if there is one. - - :param str key: The key for the criteria item to be set - :param list newlist: List of values to be set for the criteria item - """ - oldlist = self._exclusions.get(key, []) - self._exclusions[key] = oldlist + newlist - - def set_ad_group_ids(self, ad_group_ids): - """ - Restricts the devices that this query is performed on to the specified - AD group IDs. - - :param ad_group_ids: list of ints - :return: This instance - """ - if not all(isinstance(ad_group_id, int) for ad_group_id in ad_group_ids): - raise ApiError("One or more invalid AD group IDs") - self._update_criteria("ad_group_id", ad_group_ids) - return self - - def set_device_ids(self, device_ids): - """ - Restricts the devices that this query is performed on to the specified - device IDs. - - :param device_ids: list of ints - :return: This instance - """ - if not all(isinstance(device_id, int) for device_id in device_ids): - raise ApiError("One or more invalid device IDs") - self._update_criteria("id", device_ids) - return self - - def set_last_contact_time(self, *args, **kwargs): - """ - Restricts the devices that this query is performed on to the specified - last contact time (either specified as a start and end point or as a - range). - - :return: This instance - """ - if kwargs.get("start", None) and kwargs.get("end", None): - if kwargs.get("range", None): - raise ApiError("cannot specify range= in addition to start= and end=") - stime = kwargs["start"] - if not isinstance(stime, str): - stime = stime.isoformat() - etime = kwargs["end"] - if not isinstance(etime, str): - etime = etime.isoformat() - self._time_filter = {"start": stime, "end": etime} - elif kwargs.get("range", None): - if kwargs.get("start", None) or kwargs.get("end", None): - raise ApiError("cannot specify start= or end= in addition to range=") - self._time_filter = {"range": kwargs["range"]} - else: - raise ApiError("must specify either start= and end= or range=") - return self - - def set_os(self, operating_systems): - """ - Restricts the devices that this query is performed on to the specified - operating systems. - - :param operating_systems: list of operating systems - :return: This instance - """ - if not all((osval in DeviceSearchQuery.VALID_OS) for osval in operating_systems): - raise ApiError("One or more invalid operating systems") - self._update_criteria("os", operating_systems) - return self - - def set_policy_ids(self, policy_ids): - """ - Restricts the devices that this query is performed on to the specified - policy IDs. - - :param policy_ids: list of ints - :return: This instance - """ - if not all(isinstance(policy_id, int) for policy_id in policy_ids): - raise ApiError("One or more invalid policy IDs") - self._update_criteria("policy_id", policy_ids) - return self - - def set_status(self, statuses): - """ - Restricts the devices that this query is performed on to the specified - status values. - - :param statuses: list of strings - :return: This instance - """ - if not all((stat in DeviceSearchQuery.VALID_STATUSES) for stat in statuses): - raise ApiError("One or more invalid status values") - self._update_criteria("status", statuses) - return self - - def set_target_priorities(self, target_priorities): - """ - Restricts the devices that this query is performed on to the specified - target priority values. - - :param target_priorities: list of strings - :return: This instance - """ - if not all((prio in DeviceSearchQuery.VALID_PRIORITIES) for prio in target_priorities): - raise ApiError("One or more invalid target priority values") - self._update_criteria("target_priority", target_priorities) - return self - - def set_exclude_sensor_versions(self, sensor_versions): - """ - Restricts the devices that this query is performed on to exclude specified - sensor versions. - - :param sensor_versions: List of sensor versions to exclude - :return: This instance - """ - if not all(isinstance(v, str) for v in sensor_versions): - raise ApiError("One or more invalid sensor versions") - self._update_exclusions("sensor_version", sensor_versions) - return self - - def sort_by(self, key, direction="ASC"): - """Sets the sorting behavior on a query's results. - - Example:: - - >>> cb.select(Device).sort_by("name") - - :param key: the key in the schema to sort by - :param direction: the sort order, either "ASC" or "DESC" - :rtype: :py:class:`DeviceSearchQuery` - """ - if direction not in DeviceSearchQuery.VALID_DIRECTIONS: - raise ApiError("invalid sort direction specified") - self._sortcriteria = {"field": key, "order": direction} - return self - - def _build_request(self, from_row, max_rows): - """ - Creates the request body for an API call. - - :param int from_row: The row to start the query at. - :param int max_rows: The maximum number of rows to be returned. - :return: A dict containing the complete request body. - """ - mycrit = self._criteria - if self._time_filter: - mycrit["last_contact_time"] = self._time_filter - request = {"criteria": mycrit, "exclusions": self._exclusions} - request["query"] = self._query_builder._collapse() - if from_row > 0: - request["start"] = from_row - if max_rows >= 0: - request["rows"] = max_rows - if self._sortcriteria != {}: - request["sort"] = [self._sortcriteria] - return request - - def _build_url(self, tail_end): - """ - Creates the URL to be used for an API call. - - :param str tail_end: String to be appended to the end of the generated URL. - """ - url = self._doc_class.urlobject.format(self._cb.credentials.org_key) + tail_end - return url - - def _count(self): - """ - Returns the number of results from the run of this query. - - :return: The number of results from the run of this query. - """ - if self._count_valid: - return self._total_results - - url = self._build_url("/_search") - request = self._build_request(0, -1) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - return self._total_results - - def _perform_query(self, from_row=0, max_rows=-1): - """ - Performs the query and returns the results of the query in an iterable fashion. - - :param int from_row: The row to start the query at (default 0). - :param int max_rows: The maximum number of rows to be returned (default -1, meaning "all"). - """ - url = self._build_url("/_search") - current = from_row - numrows = 0 - still_querying = True - while still_querying: - request = self._build_request(current, max_rows) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - results = result.get("results", []) - for item in results: - yield self._doc_class(self._cb, item["id"], item) - current += 1 - numrows += 1 - - if max_rows > 0 and numrows == max_rows: - still_querying = False - break - - from_row = current - if current >= self._total_results: - still_querying = False - break - - def download(self): - """ - Uses the query parameters that have been set to download all - device listings in CSV format. - - Example:: - - >>> cb.select(Device).set_status(["ALL"]).download() - - :return: The CSV raw data as returned from the server. - """ - tmp = self._criteria.get("status", []) - if not tmp: - raise ApiError("at least one status must be specified to download") - query_params = {"status": ",".join(tmp)} - tmp = self._criteria.get("ad_group_id", []) - if tmp: - query_params["ad_group_id"] = ",".join([str(t) for t in tmp]) - tmp = self._criteria.get("policy_id", []) - if tmp: - query_params["policy_id"] = ",".join([str(t) for t in tmp]) - tmp = self._criteria.get("target_priority", []) - if tmp: - query_params["target_priority"] = ",".join(tmp) - tmp = self._query_builder._collapse() - if tmp: - query_params["query_string"] = tmp - if self._sortcriteria: - query_params["sort_field"] = self._sortcriteria["field"] - query_params["sort_order"] = self._sortcriteria["order"] - url = self._build_url("/_search/download") - # AGRB 10/3/2019 - Header is TEMPORARY until bug is fixed in API. Remove when fix deployed. - return self._cb.get_raw_data(url, query_params, headers={"Content-Type": "application/json"}) - - def _bulk_device_action(self, action_type, options=None): - """ - Perform a bulk action on all devices matching the current search criteria. - - :param str action_type: The action type to be performed. - :param dict options: Options for the bulk device action. Default None. - """ - request = {"action_type": action_type, "search": self._build_request(0, -1)} - if options: - request["options"] = options - return self._cb._raw_device_action(request) - - def background_scan(self, scan): - """ - Set the background scan option for the specified devices. - - :param boolean scan: True to turn background scan on, False to turn it off. - """ - return self._bulk_device_action("BACKGROUND_SCAN", self._cb._action_toggle(scan)) - - def bypass(self, enable): - """ - Set the bypass option for the specified devices. - - :param boolean enable: True to enable bypass, False to disable it. - """ - return self._bulk_device_action("BYPASS", self._cb._action_toggle(enable)) - - def delete_sensor(self): - """ - Delete the specified sensor devices. - """ - return self._bulk_device_action("DELETE_SENSOR") - - def uninstall_sensor(self): - """ - Uninstall the specified sensor devices. - """ - return self._bulk_device_action("UNINSTALL_SENSOR") - - def quarantine(self, enable): - """ - Set the quarantine option for the specified devices. - - :param boolean enable: True to enable quarantine, False to disable it. - """ - return self._bulk_device_action("QUARANTINE", self._cb._action_toggle(enable)) - - def update_policy(self, policy_id): - """ - Set the current policy for the specified devices. - - :param int policy_id: ID of the policy to set for the devices. - """ - return self._bulk_device_action("UPDATE_POLICY", {"policy_id": policy_id}) - - def update_sensor_version(self, sensor_version): - """ - Update the sensor version for the specified devices. - - :param dict sensor_version: New version properties for the sensor. - """ - return self._bulk_device_action("UPDATE_SENSOR_VERSION", {"sensor_version": sensor_version}) diff --git a/src/cbapi/psc/livequery/__init__.py b/src/cbapi/psc/livequery/__init__.py deleted file mode 100644 index d48b5b1d..00000000 --- a/src/cbapi/psc/livequery/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Exported public API for the Cb LiveQuery API - -from __future__ import absolute_import - -from .rest_api import CbLiveQueryAPI -from cbapi.psc.livequery.models import Run, RunHistory, Result, DeviceSummary -from cbapi.psc.livequery.models import ResultFacet, DeviceSummaryFacet diff --git a/src/cbapi/psc/livequery/models.py b/src/cbapi/psc/livequery/models.py deleted file mode 100644 index 38514034..00000000 --- a/src/cbapi/psc/livequery/models.py +++ /dev/null @@ -1,294 +0,0 @@ -from __future__ import absolute_import -from cbapi.models import UnrefreshableModel, NewBaseModel -from cbapi.errors import ApiError, ServerError -from .query import RunQuery, RunHistoryQuery, ResultQuery, FacetQuery -import logging -import time - -log = logging.getLogger(__name__) - - -class Run(NewBaseModel): - """ - Represents a LiveQuery run. - - Example:: - - >>> run = cb.select(Run, run_id) - >>> print(run.name, run.sql, run.create_time) - >>> print(run.status, run.match_count) - >>> run.refresh() - """ - primary_key = "id" - swagger_meta_file = "psc/livequery/models/run.yaml" - urlobject = "/livequery/v1/orgs/{}/runs" - urlobject_single = "/livequery/v1/orgs/{}/runs/{}" - _is_deleted = False - - def __init__(self, cb, model_unique_id=None, initial_data=None): - if initial_data is not None: - item = initial_data - elif model_unique_id is not None: - url = self.urlobject_single.format(cb.credentials.org_key, model_unique_id) - item = cb.get_object(url) - - model_unique_id = item.get("id") - - super(Run, self).__init__( - cb, - model_unique_id=model_unique_id, - initial_data=item, - force_init=False, - full_doc=True, - ) - - @classmethod - def _query_implementation(cls, cb): - return RunQuery(cls, cb) - - def _refresh(self): - if self._is_deleted: - raise ApiError("cannot refresh a deleted query") - url = self.urlobject_single.format(self._cb.credentials.org_key, self.id) - resp = self._cb.get_object(url) - self._info = resp - self._last_refresh_time = time.time() - return True - - def stop(self): - if self._is_deleted: - raise ApiError("cannot stop a deleted query") - url = self.urlobject_single.format(self._cb.credentials.org_key, self.id) + "/status" - result = self._cb.put_object(url, {'status': 'CANCELLED'}) - if (result.status_code == 200): - try: - self._info = result.json() - self._last_refresh_time = time.time() - return True - except Exception: - raise ServerError(result.status_code, "Cannot parse response as JSON: {0:s}".format(result.content)) - return False - - def delete(self): - if self._is_deleted: - return True # already deleted - url = self.urlobject_single.format(self._cb.credentials.org_key, self.id) - result = self._cb.delete_object(url) - if result.status_code == 200: - self._is_deleted = True - return True - return False - - -class RunHistory(Run): - """ - Represents a historical LiveQuery ``Run``. - """ - urlobject_history = "/livequery/v1/orgs/{}/runs/_search" - - def __init__(self, cb, initial_data=None): - item = initial_data - model_unique_id = item.get("id") - super(Run, self).__init__(cb, - model_unique_id, initial_data=item, - force_init=False, full_doc=True) - - @classmethod - def _query_implementation(cls, cb): - return RunHistoryQuery(cls, cb) - - -class Result(UnrefreshableModel): - """ - Represents a single result from a LiveQuery ``Run``. - """ - primary_key = "id" - swagger_meta_file = "psc/livequery/models/result.yaml" - urlobject = "/livequery/v1/orgs/{}/runs/{}/results/_search" - - class Device(UnrefreshableModel): - """ - Represents device information for a result. - """ - primary_key = "id" - - def __init__(self, cb, initial_data): - super(Result.Device, self).__init__( - cb, - model_unique_id=initial_data["id"], - initial_data=initial_data, - force_init=False, - full_doc=True, - ) - - class Fields(UnrefreshableModel): - """ - Represents the fields of a result. - """ - def __init__(self, cb, initial_data): - super(Result.Fields, self).__init__( - cb, - model_unique_id=None, - initial_data=initial_data, - force_init=False, - full_doc=True, - ) - - class Metrics(UnrefreshableModel): - """ - Represents the metrics for a result. - """ - def __init__(self, cb, initial_data): - super(Result.Metrics, self).__init__( - cb, - model_unique_id=None, - initial_data=initial_data, - force_init=False, - full_doc=True, - ) - - @classmethod - def _query_implementation(cls, cb): - return ResultQuery(cls, cb) - - def __init__(self, cb, initial_data): - super(Result, self).__init__( - cb, - model_unique_id=initial_data["id"], - initial_data=initial_data, - force_init=False, - full_doc=True, - ) - self._run_id = initial_data["id"] - self._device = Result.Device(cb, initial_data=initial_data["device"]) - self._fields = Result.Fields(cb, initial_data=initial_data["fields"]) - if "metrics" in initial_data: - self._metrics = Result.Metrics(cb, initial_data=initial_data["metrics"]) - else: - self._metrics = Result.Metrics(cb, initial_data=None) - - @property - def device_(self): - """ - Returns the reified ``Result.Device`` for this result. - """ - return self._device - - @property - def fields_(self): - """ - Returns the reified ``Result.Fields`` for this result. - """ - return self._fields - - @property - def metrics_(self): - """ - Returns the reified ``Result.Metrics`` for this result. - """ - return self._metrics - - def query_device_summaries(self): - return self._cb.select(DeviceSummary).run_id(self._run_id) - - def query_result_facets(self): - return self._cb.select(ResultFacet).run_id(self._run_id) - - def query_device_summary_facets(self): - return self._cb.select(DeviceSummaryFacet).run_id(self._run_id) - - -class DeviceSummary(UnrefreshableModel): - """ - Represents the summary of results from a single device during a single LiveQuery ``Run``. - """ - primary_key = "id" - swagger_meta_file = "psc/livequery/models/device_summary.yaml" - urlobject = "/livequery/v1/orgs/{}/runs/{}/results/device_summaries/_search" - - class Metrics(UnrefreshableModel): - """ - Represents the metrics for a result. - """ - def __init__(self, cb, initial_data): - super(DeviceSummary.Metrics, self).__init__( - cb, - model_unique_id=None, - initial_data=initial_data, - force_init=False, - full_doc=True, - ) - - @classmethod - def _query_implementation(cls, cb): - return ResultQuery(cls, cb) - - def __init__(self, cb, initial_data): - super(DeviceSummary, self).__init__( - cb, - model_unique_id=initial_data["id"], - initial_data=initial_data, - force_init=False, - full_doc=True, - ) - self._metrics = DeviceSummary.Metrics(cb, initial_data=initial_data["metrics"]) - - @property - def metrics_(self): - """ - Returns the reified ``DeviceSummary.Metrics`` for this result. - """ - return self._metrics - - -class ResultFacet(UnrefreshableModel): - """ - Represents the summary of results for a single field in a LiveQuery ``Run``. - """ - primary_key = "field" - swagger_meta_file = "psc/livequery/models/facet.yaml" - urlobject = "/livequery/v1/orgs/{}/runs/{}/results/_facet" - - class Values(UnrefreshableModel): - """ - Represents the values associated with a field. - """ - def __init__(self, cb, initial_data): - super(ResultFacet.Values, self).__init__( - cb, - model_unique_id=None, - initial_data=initial_data, - force_init=False, - full_doc=True, - ) - - @classmethod - def _query_implementation(cls, cb): - return FacetQuery(cls, cb) - - def __init__(self, cb, initial_data): - super(ResultFacet, self).__init__( - cb, - model_unique_id=None, - initial_data=initial_data, - force_init=False, - full_doc=True - ) - self._values = ResultFacet.Values(cb, initial_data=initial_data["values"]) - - @property - def values_(self): - """ - Returns the reified ``ResultFacet.Values`` for this result. - """ - return self._values - - -class DeviceSummaryFacet(ResultFacet): - """ - Represents the summary of results for a single device summary in a LiveQuery ``Run``. - """ - urlobject = "/livequery/v1/orgs/{}/runs/{}/results/device_summaries/_facet" - - def __init__(self, cb, initial_data): - super(DeviceSummaryFacet, self).__init__(cb, initial_data) diff --git a/src/cbapi/psc/livequery/models/device_summary.yaml b/src/cbapi/psc/livequery/models/device_summary.yaml deleted file mode 100755 index a4b32c88..00000000 --- a/src/cbapi/psc/livequery/models/device_summary.yaml +++ /dev/null @@ -1,52 +0,0 @@ -type: object -required: [] # TODO(ww): Find out which result fields are required -properties: - id: - type: string - description: The result's unique ID - total_results: - type: integer - format: int32 # NOTE(ww): This is a guess - description: Number of results returned for this particular device - device_id: - type: integer - format: int32 # NOTE(ww): This is a guess - description: The device's unique ID - device_name: - type: string - description: The device's name - time_received: - type: string - description: The time at which this result was received # NOTE(ww): This is a guess - format: date-time - status: - type: string - description: The result's status - device_message: - type: string - description: Placeholder # TODO(ww): Needs description - os: - type: string - description: The device's operating system - policy_id: - type: integer - format: int32 # NOTE(ww): This is a guess - description: The device's policy ID - policy_name: - type: string - description: The device's policy name - metrics: - type: array - description: Metrics associated with the device - items: - type: object - description: Individual metrics entries - properties: - key: - type: string - description: The name of the particular metric - value: - type: number - format: float - description: The value of the particular metric - \ No newline at end of file diff --git a/src/cbapi/psc/livequery/models/facet.yaml b/src/cbapi/psc/livequery/models/facet.yaml deleted file mode 100755 index b20151b3..00000000 --- a/src/cbapi/psc/livequery/models/facet.yaml +++ /dev/null @@ -1,22 +0,0 @@ -type: object -required: [] # TODO(ww): Find out which result fields are required -properties: - field: - type: string - description: The name of the field being summarized - values: - type: array - items: - type: object - properties: - total: - type: integer - format: int32 # NOTE(ww): This is a guess - description: The total number of times this value appears in the query output - id: - type: string - description: The ID of the value being enumerated - name: - type: string - description: The name of the value being enumerated - \ No newline at end of file diff --git a/src/cbapi/psc/livequery/models/result.yaml b/src/cbapi/psc/livequery/models/result.yaml deleted file mode 100644 index ff1d3bc8..00000000 --- a/src/cbapi/psc/livequery/models/result.yaml +++ /dev/null @@ -1,41 +0,0 @@ -type: object -required: [] # TODO(ww): Find out which result fields are required -properties: - id: - type: string - description: The result's unique ID - device: - type: object - description: The device associated with the result - properties: - id: - type: integer - format: int32 # NOTE(ww): This is a guess - description: The device's unique ID - name: - type: string - description: The device's name - policy_id: - type: integer - format: int32 # NOTE(ww): This is a guess - description: The device's policy ID - policy_name: - type: string - description: The device's policy name - status: - type: string - description: The result's status - time_received: - type: string - description: The time at which this result was received # NOTE(ww): This is a guess - device_message: - type: string - description: Placeholder # TODO(ww): Needs description - fields: - type: object - description: The fields returned by the backing osquery query - metrics: # TODO(ww): Document each field in metrics? - type: object - description: Metrics associated with the result's host - - diff --git a/src/cbapi/psc/livequery/models/run.yaml b/src/cbapi/psc/livequery/models/run.yaml deleted file mode 100644 index 089407be..00000000 --- a/src/cbapi/psc/livequery/models/run.yaml +++ /dev/null @@ -1,86 +0,0 @@ -type: object -required: # TODO(ww): Find out which run fields are required - - id -properties: - template_id: - type: string # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - org_key: - type: string - description: The organization key for this run - name: - type: string - description: The name of the LiveQuery run - id: - type: string - description: The run's unique ID - sql: - type: string - description: The LiveQuery query - created_by: - type: string - description: Placeholder # TODO(ww): Needs description - create_time: - type: string - description: When this run was created - status_update_time: - type: string - description: When the status of this run was last updated - timeout_time: - type: string - description: Placeholder # TODO(ww): Needs description - cancellation_time: - type: string - description: Placeholder # TODO(ww): Needs description - cancelled_by: - type: string # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - archive_time: - type: string - description: Placeholder # TODO(ww): Needs description - archived_by: - type: string # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - notify_on_finish: - type: boolean - description: Whether or not to send an email on query completion - active_org_devices: - type: integer - format: int32 # NOTE(ww): This is a guess - description: The number of devices active in the organization - status: - type: string - description: The run status - device_filter: - type: object - description: Any device filter rules associated with the run - schedule: - type: string # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - last_result_time: - type: string - description: When the most recent result for this run was reported - total_results: - type: integer - format: int32 - description: Placeholder # TODO(ww): Needs description - match_count: - type: integer - format: int32 # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - no_match_count: - type: integer - format: int32 # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - error_count: - type: integer - format: int32 # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - not_supported_count: - type: integer - format: int32 # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description - cancelled_count: - type: integer - format: int32 # NOTE(ww): This is a guess - description: Placeholder # TODO(ww): Needs description diff --git a/src/cbapi/psc/livequery/query.py b/src/cbapi/psc/livequery/query.py deleted file mode 100644 index f4a5a05e..00000000 --- a/src/cbapi/psc/livequery/query.py +++ /dev/null @@ -1,375 +0,0 @@ -from cbapi.errors import ApiError -from cbapi.psc.base_query import QueryBuilder, PSCQueryBase -from cbapi.psc.base_query import QueryBuilderSupportMixin, IterableQueryMixin -import logging -from six import string_types - -log = logging.getLogger(__name__) - - -class RunQuery(PSCQueryBase): - """ - Represents a query that either creates or retrieves the - status of a LiveQuery run. - """ - - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._query_token = None - self._query_body = {"device_filter": {}} - self._device_filter = self._query_body["device_filter"] - - def device_ids(self, device_ids): - """ - Restricts the devices that this LiveQuery run is performed on - to the given IDs. - - :param device_ids: list of ints - :return: This instance - """ - if not all(isinstance(device_id, int) for device_id in device_ids): - raise ApiError("One or more invalid device IDs") - self._device_filter["device_ids"] = device_ids - return self - - def device_types(self, device_types): - """ - Restricts the devices that this LiveQuery run is performed on - to the given device types. - - :param device_types: list of strs - :return: This instance - """ - if not all(isinstance(device_type, str) for device_type in device_types): - raise ApiError("One or more invalid device types") - self._device_filter["device_types"] = device_types - return self - - def policy_ids(self, policy_ids): - """ - Restricts this LiveQuery run to the given policy IDs. - - :param policy_ids: list of ints - :return: This instance - """ - if not all(isinstance(policy_id, int) for policy_id in policy_ids): - raise ApiError("One or more invalid policy IDs") - self._device_filter["policy_ids"] = policy_ids - return self - - def where(self, sql): - """ - Sets this LiveQuery run's underlying SQL. - - :param sql: The SQL to execute - :return: This instance - """ - self._query_body["sql"] = sql - return self - - def name(self, name): - """ - Sets this LiveQuery run's name. If no name is explicitly set, - the run is named after its SQL. - - :param name: The run name - :return: This instance - """ - self._query_body["name"] = name - return self - - def notify_on_finish(self): - """ - Sets the notify-on-finish flag on this LiveQuery run. - - :return: This instance - """ - self._query_body["notify_on_finish"] = True - return self - - def submit(self): - """ - Submits this LiveQuery run. - - :return: A new ``Run`` instance containing the run's status - """ - if self._query_token is not None: - raise ApiError( - "Query already submitted: token {0}".format(self._query_token) - ) - - if "sql" not in self._query_body: - raise ApiError("Missing LiveQuery SQL") - - url = self._doc_class.urlobject.format(self._cb.credentials.org_key) - resp = self._cb.post_object(url, body=self._query_body) - - return self._doc_class(self._cb, initial_data=resp.json()) - - -class RunHistoryQuery(PSCQueryBase, QueryBuilderSupportMixin, IterableQueryMixin): - """ - Represents a query that retrieves historic LiveQuery runs. - """ - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._query_builder = QueryBuilder() - self._sort = {} - - def sort_by(self, key, direction="ASC"): - """Sets the sorting behavior on a query's results. - - Example:: - - >>> cb.select(Result).run_id(my_run).where(username="foobar").sort_by("uid") - - :param key: the key in the schema to sort by - :param direction: the sort order, either "ASC" or "DESC" - :rtype: :py:class:`ResultQuery` - """ - self._sort.update({"field": key, "order": direction}) - return self - - def _build_request(self, start, rows): - request = {"start": start} - - if self._query_builder: - request["query"] = self._query_builder._collapse() - if rows != 0: - request["rows"] = rows - if self._sort: - request["sort"] = [self._sort] - - return request - - def _count(self): - if self._count_valid: - return self._total_results - - url = self._doc_class.urlobject_history.format( - self._cb.credentials.org_key - ) - request = self._build_request(start=0, rows=0) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - return self._total_results - - def _perform_query(self, start=0, rows=0): - url = self._doc_class.urlobject_history.format( - self._cb.credentials.org_key - ) - current = start - numrows = 0 - still_querying = True - while still_querying: - request = self._build_request(start, rows) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - results = result.get("results", []) - for item in results: - yield self._doc_class(self._cb, item) - current += 1 - numrows += 1 - - if rows and numrows == rows: - still_querying = False - break - - start = current - if current >= self._total_results: - still_querying = False - break - - -class ResultQuery(PSCQueryBase, QueryBuilderSupportMixin, IterableQueryMixin): - """ - Represents a query that retrieves results from a LiveQuery run. - """ - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._query_builder = QueryBuilder() - self._criteria = {} - self._sort = {} - self._batch_size = 100 - self._run_id = None - - def criteria(self, **kwargs): - """Sets the filter criteria on a query's results. - - Example:: - - >>> cb.select(Result).run_id(my_run).criteria(device_id=[123, 456]) - - """ - self._criteria.update(kwargs) - return self - - def sort_by(self, key, direction="ASC"): - """Sets the sorting behavior on a query's results. - - Example:: - - >>> cb.select(Result).run_id(my_run).where(username="foobar").sort_by("uid") - - :param key: the key in the schema to sort by - :param direction: the sort order, either "ASC" or "DESC" - :rtype: :py:class:`ResultQuery` - """ - self._sort.update({"field": key, "order": direction}) - return self - - def run_id(self, run_id): - """Sets the run ID to query results for. - - Example:: - - >>> cb.select(Result).run_id(my_run) - """ - self._run_id = run_id - return self - - def _build_request(self, start, rows): - request = {"start": start, "query": self._query_builder._collapse()} - - if rows != 0: - request["rows"] = rows - if self._criteria: - request["criteria"] = self._criteria - if self._sort: - request["sort"] = [self._sort] - - return request - - def _count(self): - if self._count_valid: - return self._total_results - - if self._run_id is None: - raise ApiError("Can't retrieve count without a run ID") - - url = self._doc_class.urlobject.format( - self._cb.credentials.org_key, self._run_id - ) - request = self._build_request(start=0, rows=0) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - return self._total_results - - def _perform_query(self, start=0, rows=0): - if self._run_id is None: - raise ApiError("Can't retrieve results without a run ID") - - url = self._doc_class.urlobject.format( - self._cb.credentials.org_key, self._run_id - ) - current = start - numrows = 0 - still_querying = True - while still_querying: - request = self._build_request(start, rows) - resp = self._cb.post_object(url, body=request) - result = resp.json() - - self._total_results = result["num_found"] - self._count_valid = True - - results = result.get("results", []) - for item in results: - yield self._doc_class(self._cb, item) - current += 1 - numrows += 1 - - if rows and numrows == rows: - still_querying = False - break - - start = current - if current >= self._total_results: - still_querying = False - break - - -class FacetQuery(PSCQueryBase, QueryBuilderSupportMixin, IterableQueryMixin): - """ - Represents a query that receives facet information from a LiveQuery run. - """ - def __init__(self, doc_class, cb): - super().__init__(doc_class, cb) - self._query_builder = QueryBuilder() - self._facet_fields = [] - self._criteria = {} - self._run_id = None - - def facet_field(self, field): - """Sets the facet fields to be received by this query. - - Example:: - - >>> cb.select(ResultFacet).run_id(my_run).facet_field(["device.policy_name", "device.os"]) - - :param field: Field(s) to be received, either single string or list of strings - :return: Query object - :rtype: :py:class:`Query` - """ - if isinstance(field, string_types): - self._facet_fields.append(field) - else: - for name in field: - self._facet_fields.append(name) - return self - - def criteria(self, **kwargs): - """Sets the filter criteria on a query's results. - - Example:: - - >>> cb.select(ResultFacet).run_id(my_run).criteria(device_id=[123, 456]) - - """ - self._criteria.update(kwargs) - return self - - def run_id(self, run_id): - """Sets the run ID to query results for. - - Example:: - - >>> cb.select(ResultFacet).run_id(my_run) - """ - self._run_id = run_id - return self - - def _build_request(self, rows): - terms = {"fields": self._facet_fields} - if rows != 0: - terms["rows"] = rows - request = {"query": self._query_builder._collapse(), "terms": terms} - if self._criteria: - request["criteria"] = self._criteria - return request - - def _perform_query(self, rows=0): - if self._run_id is None: - raise ApiError("Can't retrieve results without a run ID") - - url = self._doc_class.urlobject.format( - self._cb.credentials.org_key, self._run_id - ) - request = self._build_request(rows) - resp = self._cb.post_object(url, body=request) - result = resp.json() - results = result.get("terms", []) - for item in results: - yield self._doc_class(self._cb, item) diff --git a/src/cbapi/psc/livequery/rest_api.py b/src/cbapi/psc/livequery/rest_api.py deleted file mode 100644 index 824677f0..00000000 --- a/src/cbapi/psc/livequery/rest_api.py +++ /dev/null @@ -1,36 +0,0 @@ -from cbapi.psc.livequery.models import Run, RunHistory -from cbapi.psc.rest_api import CbPSCBaseAPI -from cbapi.errors import CredentialError, ApiError -import logging - -log = logging.getLogger(__name__) - - -class CbLiveQueryAPI(CbPSCBaseAPI): - """The main entry point into the Carbon Black Cloud LiveQuery API. - - :param str profile: (optional) Use the credentials in the named profile when connecting to the Carbon Black server. - Uses the profile named 'default' when not specified. - - Usage:: - - >>> from cbapi.psc.livequery import CbLiveQueryAPI - >>> cb = CbLiveQueryAPI(profile="production") - """ - def __init__(self, *args, **kwargs): - super(CbLiveQueryAPI, self).__init__(*args, **kwargs) - - if not self.credentials.get("org_key", None): - raise CredentialError("No organization key specified") - - def _perform_query(self, cls, **kwargs): - if hasattr(cls, "_query_implementation"): - return cls._query_implementation(self) - else: - raise ApiError("All LiveQuery models should provide _query_implementation") - - def query(self, sql): - return self.select(Run).where(sql=sql) - - def query_history(self, query=None): - return self.select(RunHistory).where(query) diff --git a/src/cbapi/psc/models.py b/src/cbapi/psc/models.py deleted file mode 100755 index 321dac06..00000000 --- a/src/cbapi/psc/models.py +++ /dev/null @@ -1,181 +0,0 @@ -from cbapi.models import MutableBaseModel, UnrefreshableModel -from cbapi.errors import ServerError -from cbapi.psc.devices_query import DeviceSearchQuery - -from copy import deepcopy -import logging -import json -import time - -log = logging.getLogger(__name__) - - -class PSCMutableModel(MutableBaseModel): - _change_object_http_method = "PATCH" - _change_object_key_name = None - - def __init__(self, cb, model_unique_id=None, initial_data=None, force_init=False, full_doc=False): - super(PSCMutableModel, self).__init__(cb, model_unique_id=model_unique_id, initial_data=initial_data, - force_init=force_init, full_doc=full_doc) - if not self._change_object_key_name: - self._change_object_key_name = self.primary_key - - def _parse(self, obj): - if type(obj) == dict and self.info_key in obj: - return obj[self.info_key] - - def _update_object(self): - if self._change_object_http_method != "PATCH": - return self._update_entire_object() - else: - return self._patch_object() - - def _update_entire_object(self): - if self.__class__.primary_key in self._dirty_attributes.keys() or self._model_unique_id is None: - new_object_info = deepcopy(self._info) - try: - if not self._new_object_needs_primary_key: - del(new_object_info[self.__class__.primary_key]) - except Exception: - pass - log.debug("Creating a new {0:s} object".format(self.__class__.__name__)) - ret = self._cb.api_json_request(self.__class__._new_object_http_method, self.urlobject, - data={self.info_key: new_object_info}) - else: - log.debug("Updating {0:s} with unique ID {1:s}".format(self.__class__.__name__, str(self._model_unique_id))) - ret = self._cb.api_json_request(self.__class__._change_object_http_method, - self._build_api_request_uri(), data={self.info_key: self._info}) - - return self._refresh_if_needed(ret) - - def _patch_object(self): - if self.__class__.primary_key in self._dirty_attributes.keys() or self._model_unique_id is None: - log.debug("Creating a new {0:s} object".format(self.__class__.__name__)) - ret = self._cb.api_json_request(self.__class__._new_object_http_method, self.urlobject, - data=self._info) - else: - updates = {} - for k in self._dirty_attributes.keys(): - updates[k] = self._info[k] - log.debug("Updating {0:s} with unique ID {1:s}".format(self.__class__.__name__, str(self._model_unique_id))) - ret = self._cb.api_json_request(self.__class__._change_object_http_method, - self._build_api_request_uri(), data=updates) - - return self._refresh_if_needed(ret) - - def _refresh_if_needed(self, request_ret): - refresh_required = True - - if request_ret.status_code not in range(200, 300): - try: - message = json.loads(request_ret.text)[0] - except Exception: - message = request_ret.text - - raise ServerError(request_ret.status_code, message, - result="Did not update {} record.".format(self.__class__.__name__)) - else: - try: - message = request_ret.json() - log.debug("Received response: %s" % message) - if not isinstance(message, dict): - raise ServerError(request_ret.status_code, message, - result="Unknown error updating {0:s} record.".format(self.__class__.__name__)) - else: - if message.get("success", False): - if isinstance(message.get(self.info_key, None), dict): - self._info = message.get(self.info_key) - self._full_init = True - refresh_required = False - else: - if self._change_object_key_name in message.keys(): - # if all we got back was an ID, try refreshing to get the entire record. - log.debug("Only received an ID back from the server, forcing a refresh") - self._info[self.primary_key] = message[self._change_object_key_name] - refresh_required = True - else: - # "success" is False - raise ServerError(request_ret.status_code, message.get("message", ""), - result="Did not update {0:s} record.".format(self.__class__.__name__)) - except Exception: - pass - - self._dirty_attributes = {} - if refresh_required: - self.refresh() - return self._model_unique_id - - -class Device(PSCMutableModel): - urlobject = "/appservices/v6/orgs/{0}/devices" - urlobject_single = "/appservices/v6/orgs/{0}/devices/{1}" - primary_key = "id" - swagger_meta_file = "psc/models/device.yaml" - - def __init__(self, cb, model_unique_id, initial_data=None): - super(Device, self).__init__(cb, model_unique_id, initial_data) - if model_unique_id is not None and initial_data is None: - self._refresh() - - @classmethod - def _query_implementation(cls, cb): - return DeviceSearchQuery(cls, cb) - - def _refresh(self): - url = self.urlobject_single.format(self._cb.credentials.org_key, self._model_unique_id) - resp = self._cb.get_object(url) - self._info = resp - self._last_refresh_time = time.time() - return True - - def background_scan(self, flag): - """ - Set the background scan option for this device. - - :param boolean flag: True to turn background scan on, False to turn it off. - """ - return self._cb.device_background_scan([self._model_unique_id], flag) - - def bypass(self, flag): - """ - Set the bypass option for this device. - - :param boolean flag: True to enable bypass, False to disable it. - """ - return self._cb.device_bypass([self._model_unique_id], flag) - - def delete_sensor(self): - """ - Delete this sensor device. - """ - return self._cb.device_delete_sensor([self._model_unique_id]) - - def uninstall_sensor(self): - """ - Uninstall this sensor device. - """ - return self._cb.device_uninstall_sensor([self._model_unique_id]) - - def quarantine(self, flag): - """ - Set the quarantine option for this device. - - :param boolean flag: True to enable quarantine, False to disable it. - """ - return self._cb.device_quarantine([self._model_unique_id], flag) - - def update_policy(self, policy_id): - """ - Set the current policy for this device. - - :param int policy_id: ID of the policy to set for the devices. - """ - return self._cb.device_update_policy([self._model_unique_id], policy_id) - - def update_sensor_version(self, sensor_version): - """ - Update the sensor version for this device. - - :param dict sensor_version: New version properties for the sensor. - """ - return self._cb.device_update_sensor_version([self._model_unique_id], sensor_version) diff --git a/src/cbapi/psc/models/device.yaml b/src/cbapi/psc/models/device.yaml deleted file mode 100755 index f03c3a3d..00000000 --- a/src/cbapi/psc/models/device.yaml +++ /dev/null @@ -1,310 +0,0 @@ -type: object -properties: - activation_code: - type: string - description: Device activation code - activation_code_expiry_time: - type: string - description: When the expiration code expires and cannot be used to register a device - ad_group_id: - type: integer - format: int64 - description: Device's AD group - av_ave_version: - type: string - description: AVE version (part of AV Version) - av_engine: - type: string - example: '4.3.0.203-ave.8.3.42.106:avpack.8.4.2.36:vdf.8.12.142.100' - description: Current AV version - av_last_scan_time: - type: string - description: Last AV scan time - av_master: - type: boolean - description: Whether the device is an AV Master (?) - av_pack_version: - type: string - description: Pack version (part of AV Version) - av_product_version: - type: string - description: AV Product version (part of AV Version) - av_status: - type: array - description: AV Statuses - items: - type: string - enum: - - AV_NOT_REGISTERED - - AV_REGISTERED - - AV_DEREGISTERED - - AV_ACTIVE - - AV_BYPASS - - NOT_INSTALLED - - INSTALLED - - UNINSTALLED - - INSTALLED_SERVER - - UNINSTALLED_SERVER - - FULLY_ENABLED - - FULLY_DISABLED - - SIGNATURE_UPDATE_DISABLED - - ONACCESS_SCAN_DISABLED - - ONDEMOND_SCAN_DISABLED - - ONDEMAND_SCAN_DISABLED - - PRODUCT_UPDATE_DISABLED - av_update_servers: - type: array - description: Device's AV servers - items: - type: string - av_vdf_version: - type: string - description: VDF version (part of AV Version) - current_sensor_policy_name: - type: string - description: Current MSM policy name - deregistered_time: - type: string - format: date-time - description: When the device was deregistered with the PSC backend - device_id: - type: integer - format: int64 - description: ID of the device - device_meta_data_item_list: - type: array - description: MSM Device metadata - items: - type: object - properties: - key_name: - type: string - key_value: - type: string - position: - type: integer - format: int32 - device_owner_id: - type: integer - format: int64 - description: ID of the user who owns the device - email: - type: string - description: Email of the user who owns the device - encoded_activation_code: - type: string - description: Encoded device activation code - first_name: - type: string - description: First name of the user who owns the device - id: - type: integer - format: int64 - description: ID of the device - last_contact_time: - type: string - format: date-time - description: Time the device last checked into the PSC backend - last_device_policy_changed_time: - type: string - format: date-time - description: Last time the device's policy was changed - last_device_policy_requested_time: - type: string - format: date-time - description: Last time the device requested policy updates - last_external_ip_address: - type: string - description: Device's external IP - last_internal_ip_address: - type: string - description: Device's internal IP - last_location: - type: string - description: Location of the device (on-/off-premises) - enum: - - UNKNOWN - - ONSITE - - OFFSITE - last_name: - type: string - description: Last name of the user who owns the device - last_policy_updated_time: - type: string - format: date-time - description: Last time the device was MSM processed - last_reported_time: - type: string - format: date-time - description: Time when device last reported an event to PSC backend - last_reset_time: - type: string - format: date-time - description: When the sensor was last reset - last_shutdown_time: - type: string - format: date-time - description: When the device last shut down - linux_kernel_version: - type: string - description: Linux kernel version - login_user_name: - type: string - description: Last acive logged in username - mac_address: - type: string - description: Device's hardware MAC address - middle_name: - type: string - description: Middle name of the user who owns the device - name: - type: string - description: Device Hostname - organization_id: - type: integer - format: int64 - example: 1000 - description: Org ID to which the device belongs - organization_name: - type: string - description: Name of the org that owns this device - os: - type: string - example: WINDOWS - description: Device type - enum: - - WINDOWS - - ANDROID - - MAC - - IOS - - LINUX - - OTHER - os_version: - type: string - example: 'Windows 7 x86 SP: 1' - description: Version of the OS - passive_mode: - type: boolean - description: Whether the device is in passive mode (bypass?) - policy_id: - type: integer - format: int64 - description: ID of the policy this device is using - policy_name: - type: string - description: Name of the policy this device is using - policy_override: - type: boolean - description: Manually assigned policy (overrides mass sensor management) - quarantined: - type: boolean - description: Whether the device is quarantined - registered_time: - type: string - format: date-time - description: When the device was registered with the PSC backend - scan_last_action_time: - type: string - format: date-time - description: When the background scan was last active - scan_last_complete_time: - type: string - format: date-time - description: When the background scan was last completed - scan_status: - type: string - description: Background scan status - enum: - - NEVER_RUN - - STOPPED - - IN_PROGRESS - - COMPLETED - sensor_out_of_date: - type: boolean - description: Whether the device is out of date - sensor_states: - type: array - description: Active sensor states - items: - type: string - enum: - - ACTIVE - - PANICS_DETECTED - - LOOP_DETECTED - - DB_CORRUPTION_DETECTED - - CSR_ACTION - - REPUX_ACTION - - DRIVER_INIT_ERROR - - REMGR_INIT_ERROR - - UNSUPPORTED_OS - - SENSOR_UPGRADE_IN_PROGRESS - - SENSOR_UNREGISTERED - - WATCHDOG - - SENSOR_RESET_IN_PROGRESS - - DRIVER_INIT_REBOOT_REQUIRED - - SENSOR_SHUTDOWN - - SENSOR_MAINTENANCE - - DEBUG_MODE_ENABLED - - AUTO_UPDATE_DISABLED - - SELF_PROTECT_DISABLED - - VDI_MODE_ENABLED - - POC_MODE_ENABLED - - SECURITY_CENTER_OPTLN_DISABLED - - LIVE_RESPONSE_RUNNING - - LIVE_RESPONSE_NOT_RUNNING - - LIVE_RESPONSE_KILLED - - LIVE_RESPONSE_NOT_KILLED - - LIVE_RESPONSE_ENABLED - - LIVE_RESPONSE_DISABLED - sensor_version: - type: string - example: 3.4.0.0 - description: Version of the PSC sensor - status: - type: string - description: Device status - enum: - - PENDING - - REGISTERED - - UNINSTALLED - - DEREGISTERED - - ACTIVE - - INACTIVE - - ERROR - - ALL - - BYPASS_ON - - BYPASS - - QUARANTINE - - SENSOR_OUTOFDATE - - DELETED - - LIVE - target_priority_type: - type: string - example: MISSION_CRITICAL - description: Priority of the device - enum: - - LOW - - MEDIUM - - HIGH - - MISSION_CRITICAL - uninstall_code: - type: string - description: Code to enter to uninstall this device - vdi_base_device: - type: integer - format: int64 - description: VDI Base device - virtual_machine: - type: boolean - description: Whether this device is a Virtual Machine (VMware AppDefense integration - virtualization_provider: - type: string - description: VM Virtualization Provider - windows_platform: - type: string - description: 'Type of windows platform (client/server, x86/x64)' - enum: - - CLIENT_X86 - - CLIENT_X64 - - SERVER_X86 - - SERVER_X64 diff --git a/src/cbapi/psc/rest_api.py b/src/cbapi/psc/rest_api.py deleted file mode 100755 index ad7a68da..00000000 --- a/src/cbapi/psc/rest_api.py +++ /dev/null @@ -1,191 +0,0 @@ -from cbapi.connection import BaseAPI -from cbapi.errors import ApiError, ServerError -import logging - -log = logging.getLogger(__name__) - - -class CbPSCBaseAPI(BaseAPI): - """The main entry point into the Cb PSC API. - - :param str profile: (optional) Use the credentials in the named profile when connecting to the Carbon Black server. - Uses the profile named 'default' when not specified. - - Usage:: - - >>> from cbapi import CbPSCBaseAPI - >>> cb = CbPSCBaseAPI(profile="production") - """ - def __init__(self, *args, **kwargs): - super(CbPSCBaseAPI, self).__init__(product_name="psc", *args, **kwargs) - self._lr_scheduler = None - - def _perform_query(self, cls, **kwargs): - if hasattr(cls, "_query_implementation"): - return cls._query_implementation(self) - else: - raise ApiError("All PSC models should provide _query_implementation") - - # ---- Device API - - def _raw_device_action(self, request): - """ - Invokes the API method for a device action. - - :param dict request: The request body to be passed as JSON to the API method. - :return: The parsed JSON output from the request. - :raises ServerError: If the API method returns an HTTP error code. - """ - url = "/appservices/v6/orgs/{0}/device_actions".format(self.credentials.org_key) - resp = self.post_object(url, body=request) - if resp.status_code == 200: - return resp.json() - elif resp.status_code == 204: - return None - else: - raise ServerError(error_code=resp.status_code, message="Device action error: {0}".format(resp.content)) - - def _device_action(self, device_ids, action_type, options=None): - """ - Executes a device action on multiple device IDs. - - :param list device_ids: The list of device IDs to execute the action on. - :param str action_type: The action type to be performed. - :param dict options: Options for the bulk device action. Default None. - """ - request = {"action_type": action_type, "device_id": device_ids} - if options: - request["options"] = options - return self._raw_device_action(request) - - def _action_toggle(self, flag): - """ - Converts a boolean flag value into a "toggle" option. - - :param boolean flag: The value to be converted. - :return: A dict containing the appropriate "toggle" element. - """ - if flag: - return {"toggle": "ON"} - else: - return {"toggle": "OFF"} - - def device_background_scan(self, device_ids, scan): - """ - Set the background scan option for the specified devices. - - :param list device_ids: List of IDs of devices to be set. - :param boolean scan: True to turn background scan on, False to turn it off. - """ - return self._device_action(device_ids, "BACKGROUND_SCAN", self._action_toggle(scan)) - - def device_bypass(self, device_ids, enable): - """ - Set the bypass option for the specified devices. - - :param list device_ids: List of IDs of devices to be set. - :param boolean enable: True to enable bypass, False to disable it. - """ - return self._device_action(device_ids, "BYPASS", self._action_toggle(enable)) - - def device_delete_sensor(self, device_ids): - """ - Delete the specified sensor devices. - - :param list device_ids: List of IDs of devices to be deleted. - """ - return self._device_action(device_ids, "DELETE_SENSOR") - - def device_uninstall_sensor(self, device_ids): - """ - Uninstall the specified sensor devices. - - :param list device_ids: List of IDs of devices to be uninstalled. - """ - return self._device_action(device_ids, "UNINSTALL_SENSOR") - - def device_quarantine(self, device_ids, enable): - """ - Set the quarantine option for the specified devices. - - :param list device_ids: List of IDs of devices to be set. - :param boolean enable: True to enable quarantine, False to disable it. - """ - return self._device_action(device_ids, "QUARANTINE", self._action_toggle(enable)) - - def device_update_policy(self, device_ids, policy_id): - """ - Set the current policy for the specified devices. - - :param list device_ids: List of IDs of devices to be changed. - :param int policy_id: ID of the policy to set for the devices. - """ - return self._device_action(device_ids, "UPDATE_POLICY", {"policy_id": policy_id}) - - def device_update_sensor_version(self, device_ids, sensor_version): - """ - Update the sensor version for the specified devices. - - :param list device_ids: List of IDs of devices to be changed. - :param dict sensor_version: New version properties for the sensor. - """ - return self._device_action(device_ids, "UPDATE_SENSOR_VERSION", {"sensor_version": sensor_version}) - - # ---- Alerts API - - def alert_search_suggestions(self, query): - """ - Returns suggestions for keys and field values that can be used in a search. - - :param query str: A search query to use. - :return: A list of search suggestions expressed as dict objects. - """ - query_params = {"suggest.q": query} - url = "/appservices/v6/orgs/{0}/alerts/search_suggestions".format(self.credentials.org_key) - output = self.get_object(url, query_params) - return output["suggestions"] - - def _bulk_threat_update_status(self, threat_ids, status, remediation, comment): - """ - Update the status of alerts associated with multiple threat IDs, past and future. - - :param list threat_ids: List of string threat IDs. - :param str status: The status to set for all alerts, either "OPEN" or "DISMISSED". - :param str remediation: The remediation state to set for all alerts. - :param str comment: The comment to set for all alerts. - """ - if not all(isinstance(t, str) for t in threat_ids): - raise ApiError("One or more invalid threat ID values") - request = {"state": status, "threat_id": threat_ids} - if remediation is not None: - request["remediation_state"] = remediation - if comment is not None: - request["comment"] = comment - url = "/appservices/v6/orgs/{0}/threat/workflow/_criteria".format(self.credentials.org_key) - resp = self.post_object(url, body=request) - output = resp.json() - return output["request_id"] - - def bulk_threat_update(self, threat_ids, remediation=None, comment=None): - """ - Update the alert status of alerts associated with multiple threat IDs. - The alerts will be left in an OPEN state after this request. - - :param threat_ids list: List of string threat IDs. - :param remediation str: The remediation state to set for all alerts. - :param comment str: The comment to set for all alerts. - :return: The request ID, which may be used to select a WorkflowStatus object. - """ - return self._bulk_threat_update_status(threat_ids, "OPEN", remediation, comment) - - def bulk_threat_dismiss(self, threat_ids, remediation=None, comment=None): - """ - Dismiss the alerts associated with multiple threat IDs. - The alerts will be left in a DISMISSED state after this request. - - :param threat_ids list: List of string threat IDs. - :param remediation str: The remediation state to set for all alerts. - :param comment str: The comment to set for all alerts. - :return: The request ID, which may be used to select a WorkflowStatus object. - """ - return self._bulk_threat_update_status(threat_ids, "DISMISSED", remediation, comment) diff --git a/test/cbapi/__init__.py b/test/cbapi/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/test/cbapi/psc/__init__.py b/test/cbapi/psc/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/test/cbapi/psc/livequery/__init__.py b/test/cbapi/psc/livequery/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/test/cbapi/psc/livequery/test_models.py b/test/cbapi/psc/livequery/test_models.py deleted file mode 100755 index cc227c2e..00000000 --- a/test/cbapi/psc/livequery/test_models.py +++ /dev/null @@ -1,234 +0,0 @@ -import pytest -from cbapi.psc.livequery.rest_api import CbLiveQueryAPI -from cbapi.psc.livequery.models import Run, Result -from cbapi.psc.livequery.query import ResultQuery, FacetQuery -from cbapi.errors import ApiError -from test.cbtest import StubResponse, patch_cbapi - - -def test_run_refresh(monkeypatch): - _was_called = False - - def _get_run(url, parms=None, default=None): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg" - _was_called = True - return {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "COMPLETE"} - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_run) - run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"}) - rc = run.refresh() - assert _was_called - assert rc - assert run.org_key == "Z100" - assert run.name == "FoobieBletch" - assert run.id == "abcdefg" - assert run.status == "COMPLETE" - - -def test_run_stop(monkeypatch): - _was_called = False - - def _execute_stop(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/status" - assert body == {"status": "CANCELLED"} - _was_called = True - return StubResponse({"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "CANCELLED"}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, PUT=_execute_stop) - run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"}) - rc = run.stop() - assert _was_called - assert rc - assert run.org_key == "Z100" - assert run.name == "FoobieBletch" - assert run.id == "abcdefg" - assert run.status == "CANCELLED" - - -def test_run_stop_failed(monkeypatch): - _was_called = False - - def _execute_stop(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/status" - assert body == {"status": "CANCELLED"} - _was_called = True - return StubResponse({"error_message": "The query is not presently running."}, 409) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, PUT=_execute_stop) - run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "CANCELLED"}) - rc = run.stop() - assert _was_called - assert not rc - - -def test_run_delete(monkeypatch): - _was_called = False - - def _execute_delete(url): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg" - if _was_called: - pytest.fail("_execute_delete should not be called twice!") - _was_called = True - return StubResponse(None) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, DELETE=_execute_delete) - run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"}) - rc = run.delete() - assert _was_called - assert rc - assert run._is_deleted - # Now ensure that certain operations that don't make sense on a deleted object raise ApiError - with pytest.raises(ApiError): - run.refresh() - with pytest.raises(ApiError): - run.stop() - # And make sure that deleting a deleted object returns True immediately - rc = run.delete() - assert rc - - -def test_run_delete_failed(monkeypatch): - _was_called = False - - def _execute_delete(url): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg" - _was_called = True - return StubResponse(None, 403) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, DELETE=_execute_delete) - run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"}) - rc = run.delete() - assert _was_called - assert not rc - assert not run._is_deleted - - -def test_result_device_summaries(monkeypatch): - _was_called = False - - def _run_summaries(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/results/device_summaries/_search" - assert body == {"query": "foo", "criteria": {"device_name": ["AxCx", "A7X"]}, - "sort": [{"field": "device_name", "order": "ASC"}], "start": 0} - _was_called = True - return StubResponse({"org_key": "Z100", "num_found": 2, - "results": [{"id": "ghijklm", "total_results": 2, "device_id": 314159, - "metrics": [{"key": "aaa", "value": 0.0}, {"key": "bbb", "value": 0.0}]}, - {"id": "mnopqrs", "total_results": 3, "device_id": 271828, - "metrics": [{"key": "aaa", "value": 0.0}, {"key": "bbb", "value": 0.0}]}]}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_summaries) - result = Result(api, {"id": "abcdefg", "device": {"id": "abcdefg"}, "fields": {}, "metrics": {}}) - query = result.query_device_summaries().where("foo").criteria(device_name=["AxCx", "A7X"]).sort_by("device_name") - assert isinstance(query, ResultQuery) - count = 0 - for item in query.all(): - if item.id == "ghijklm": - assert item.total_results == 2 - assert item.device_id == 314159 - elif item.id == "mnopqrs": - assert item.total_results == 3 - assert item.device_id == 271828 - else: - pytest.fail("Invalid object with ID %s seen" % item.id) - count = count + 1 - assert _was_called - assert count == 2 - - -def test_result_query_result_facets(monkeypatch): - _was_called = False - - def _run_facets(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/results/_facet" - assert body == {"query": "xyzzy", "criteria": {"device_name": ["AxCx", "A7X"]}, - "terms": {"fields": ["alpha", "bravo", "charlie"]}} - _was_called = True - return StubResponse({"terms": [{"field": "alpha", "values": [{"total": 1, "id": "alpha1", "name": "alpha1"}, - {"total": 2, "id": "alpha2", "name": "alpha2"}]}, - {"field": "bravo", "values": [{"total": 1, "id": "bravo1", "name": "bravo1"}, - {"total": 2, "id": "bravo2", "name": "bravo2"}]}, - {"field": "charlie", "values": [{"total": 1, "id": "charlie1", - "name": "charlie1"}, - {"total": 2, "id": "charlie2", - "name": "charlie2"}]}]}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_facets) - result = Result(api, {"id": "abcdefg", "device": {"id": "abcdefg"}, "fields": {}, "metrics": {}}) - query = result.query_result_facets().where("xyzzy").facet_field("alpha").facet_field(["bravo", "charlie"]) \ - .criteria(device_name=["AxCx", "A7X"]) - assert isinstance(query, FacetQuery) - count = 0 - for item in query.all(): - vals = item.values - if item.field == "alpha": - assert vals[0]["id"] == "alpha1" - assert vals[1]["id"] == "alpha2" - elif item.field == "bravo": - assert vals[0]["id"] == "bravo1" - assert vals[1]["id"] == "bravo2" - elif item.field == "charlie": - assert vals[0]["id"] == "charlie1" - assert vals[1]["id"] == "charlie2" - else: - pytest.fail("Unknown field name %s seen" % item.field) - count = count + 1 - assert _was_called - assert count == 3 - - -def test_result_query_device_summary_facets(monkeypatch): - _was_called = False - - def _run_facets(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/results/device_summaries/_facet" - assert body == {"query": "xyzzy", "criteria": {"device_name": ["AxCx", "A7X"]}, - "terms": {"fields": ["alpha", "bravo", "charlie"]}} - _was_called = True - return StubResponse({"terms": [{"field": "alpha", "values": [{"total": 1, "id": "alpha1", "name": "alpha1"}, - {"total": 2, "id": "alpha2", "name": "alpha2"}]}, - {"field": "bravo", "values": [{"total": 1, "id": "bravo1", "name": "bravo1"}, - {"total": 2, "id": "bravo2", "name": "bravo2"}]}, - {"field": "charlie", "values": [{"total": 1, "id": "charlie1", - "name": "charlie1"}, - {"total": 2, "id": "charlie2", - "name": "charlie2"}]}]}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_facets) - result = Result(api, {"id": "abcdefg", "device": {"id": "abcdefg"}, "fields": {}, "metrics": {}}) - query = result.query_device_summary_facets().where("xyzzy").facet_field("alpha") \ - .facet_field(["bravo", "charlie"]).criteria(device_name=["AxCx", "A7X"]) - assert isinstance(query, FacetQuery) - count = 0 - for item in query.all(): - vals = item.values - if item.field == "alpha": - assert vals[0]["id"] == "alpha1" - assert vals[1]["id"] == "alpha2" - elif item.field == "bravo": - assert vals[0]["id"] == "bravo1" - assert vals[1]["id"] == "bravo2" - elif item.field == "charlie": - assert vals[0]["id"] == "charlie1" - assert vals[1]["id"] == "charlie2" - else: - pytest.fail("Unknown field name %s seen" % item.field) - count = count + 1 - assert _was_called - assert count == 3 diff --git a/test/cbapi/psc/livequery/test_rest_api.py b/test/cbapi/psc/livequery/test_rest_api.py deleted file mode 100755 index b9d38a17..00000000 --- a/test/cbapi/psc/livequery/test_rest_api.py +++ /dev/null @@ -1,161 +0,0 @@ -import pytest -from cbapi.psc.livequery.rest_api import CbLiveQueryAPI -from cbapi.psc.livequery.models import Run -from cbapi.psc.livequery.query import RunQuery, RunHistoryQuery -from cbapi.errors import ApiError, CredentialError -from test.cbtest import StubResponse, patch_cbapi - - -def test_no_org_key(): - with pytest.raises(CredentialError): - CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", ssl_verify=True) # note: no org_key - - -def test_simple_get(monkeypatch): - _was_called = False - - def _get_run(url, parms=None, default=None): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/abcdefg" - _was_called = True - return {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg"} - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_run) - run = api.select(Run, "abcdefg") - assert _was_called - assert run.org_key == "Z100" - assert run.name == "FoobieBletch" - assert run.id == "abcdefg" - - -def test_query(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs" - assert body == {"sql": "select * from whatever;", "device_filter": {}} - _was_called = True - return StubResponse({"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg"}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.query("select * from whatever;") - assert isinstance(query, RunQuery) - run = query.submit() - assert _was_called - assert run.org_key == "Z100" - assert run.name == "FoobieBletch" - assert run.id == "abcdefg" - - -def test_query_with_everything(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs" - assert body == {"sql": "select * from whatever;", "name": "AmyWasHere", "notify_on_finish": True, - "device_filter": {"device_ids": [1, 2, 3], "device_types": ["Alpha", "Bravo", "Charlie"], - "policy_ids": [16, 27, 38]}} - _was_called = True - return StubResponse({"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg"}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.query("select * from whatever;").device_ids([1, 2, 3]).device_types(["Alpha", "Bravo", "Charlie"]) \ - .policy_ids([16, 27, 38]).name("AmyWasHere").notify_on_finish() - assert isinstance(query, RunQuery) - run = query.submit() - assert _was_called - assert run.org_key == "Z100" - assert run.name == "FoobieBletch" - assert run.id == "abcdefg" - - -def test_query_device_ids_broken(): - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - query = api.query("select * from whatever;") - with pytest.raises(ApiError): - query = query.device_ids(["Bogus"]) - - -def test_query_device_types_broken(): - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - query = api.query("select * from whatever;") - with pytest.raises(ApiError): - query = query.device_types([420]) - - -def test_query_policy_ids_broken(): - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - query = api.query("select * from whatever;") - with pytest.raises(ApiError): - query = query.policy_ids(["Bogus"]) - - -def test_query_history(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/_search" - assert body == {"query": "xyzzy", "start": 0} - _was_called = True - return StubResponse({"org_key": "Z100", "num_found": 3, - "results": [{"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg"}, - {"org_key": "Z100", "name": "Aoxomoxoa", "id": "cdefghi"}, - {"org_key": "Z100", "name": "Read_Me", "id": "efghijk"}]}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.query_history("xyzzy") - assert isinstance(query, RunHistoryQuery) - count = 0 - for item in query.all(): - assert item.org_key == "Z100" - if item.id == "abcdefg": - assert item.name == "FoobieBletch" - elif item.id == "cdefghi": - assert item.name == "Aoxomoxoa" - elif item.id == "efghijk": - assert item.name == "Read_Me" - else: - pytest.fail("Unknown item ID: %s" % item.id) - count = count + 1 - assert _was_called - assert count == 3 - - -def test_query_history_with_everything(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/livequery/v1/orgs/Z100/runs/_search" - assert body == {"query": "xyzzy", "sort": [{"field": "id", "order": "ASC"}], "start": 0} - _was_called = True - return StubResponse({"org_key": "Z100", "num_found": 3, - "results": [{"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg"}, - {"org_key": "Z100", "name": "Aoxomoxoa", "id": "cdefghi"}, - {"org_key": "Z100", "name": "Read_Me", "id": "efghijk"}]}) - - api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.query_history("xyzzy").sort_by("id") - assert isinstance(query, RunHistoryQuery) - count = 0 - for item in query.all(): - assert item.org_key == "Z100" - if item.id == "abcdefg": - assert item.name == "FoobieBletch" - elif item.id == "cdefghi": - assert item.name == "Aoxomoxoa" - elif item.id == "efghijk": - assert item.name == "Read_Me" - else: - pytest.fail("Unknown item ID: %s" % item.id) - count = count + 1 - assert _was_called - assert count == 3 diff --git a/test/cbapi/psc/test_devicev6_api.py b/test/cbapi/psc/test_devicev6_api.py deleted file mode 100755 index a73570e7..00000000 --- a/test/cbapi/psc/test_devicev6_api.py +++ /dev/null @@ -1,383 +0,0 @@ -import pytest -from cbapi.errors import ApiError -from cbapi.psc.models import Device -from cbapi.psc.rest_api import CbPSCBaseAPI -from test.cbtest import StubResponse, patch_cbapi - - -def test_get_device(monkeypatch): - _was_called = False - - def _get_device(url): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/devices/6023" - _was_called = True - return {"device_id": 6023, "organization_name": "thistestworks"} - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device) - rc = api.select(Device, 6023) - assert _was_called - assert isinstance(rc, Device) - assert rc.device_id == 6023 - assert rc.organization_name == "thistestworks" - - -def test_device_background_scan(monkeypatch): - _was_called = False - - def _call_background_scan(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "BACKGROUND_SCAN", "device_id": [6023], "options": {"toggle": "ON"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_call_background_scan) - api.device_background_scan([6023], True) - assert _was_called - - -def test_device_bypass(monkeypatch): - _was_called = False - - def _call_bypass(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "BYPASS", "device_id": [6023], "options": {"toggle": "OFF"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_call_bypass) - api.device_bypass([6023], False) - assert _was_called - - -def test_device_delete_sensor(monkeypatch): - _was_called = False - - def _call_delete_sensor(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "DELETE_SENSOR", "device_id": [6023]} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_call_delete_sensor) - api.device_delete_sensor([6023]) - assert _was_called - - -def test_device_uninstall_sensor(monkeypatch): - _was_called = False - - def _call_uninstall_sensor(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UNINSTALL_SENSOR", "device_id": [6023]} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_call_uninstall_sensor) - api.device_uninstall_sensor([6023]) - assert _was_called - - -def test_device_quarantine(monkeypatch): - _was_called = False - - def _call_quarantine(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "QUARANTINE", "device_id": [6023], "options": {"toggle": "ON"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_call_quarantine) - api.device_quarantine([6023], True) - assert _was_called - - -def test_device_update_policy(monkeypatch): - _was_called = False - - def _call_update_policy(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UPDATE_POLICY", "device_id": [6023], "options": {"policy_id": 8675309}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_call_update_policy) - api.device_update_policy([6023], 8675309) - assert _was_called - - -def test_device_update_sensor_version(monkeypatch): - _was_called = False - - def _call_update_sensor_version(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UPDATE_SENSOR_VERSION", "device_id": [6023], - "options": {"sensor_version": {"RHEL": "2.3.4.5"}}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_call_update_sensor_version) - api.device_update_sensor_version([6023], {"RHEL": "2.3.4.5"}) - assert _was_called - - -def test_query_device_with_all_bells_and_whistles(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/devices/_search" - assert body == {"query": "foobar", - "criteria": {"ad_group_id": [14, 25], "os": ["LINUX"], "policy_id": [8675309], - "status": ["ALL"], "target_priority": ["HIGH"]}, - "exclusions": {"sensor_version": ["0.1"]}, - "sort": [{"field": "name", "order": "DESC"}]} - _was_called = True - return StubResponse({"results": [{"id": 6023, "organization_name": "thistestworks"}], - "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(Device).where("foobar").set_ad_group_ids([14, 25]).set_os(["LINUX"]) \ - .set_policy_ids([8675309]).set_status(["ALL"]).set_target_priorities(["HIGH"]) \ - .set_exclude_sensor_versions(["0.1"]).sort_by("name", "DESC") - d = query.one() - assert _was_called - assert d.id == 6023 - assert d.organization_name == "thistestworks" - - -def test_query_device_with_last_contact_time_as_start_end(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/devices/_search" - assert body == {"query": "foobar", - "criteria": {"last_contact_time": {"start": "2019-09-30T12:34:56", - "end": "2019-10-01T12:00:12"}}, "exclusions": {}} - _was_called = True - return StubResponse({"results": [{"id": 6023, "organization_name": "thistestworks"}], - "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(Device).where("foobar") \ - .set_last_contact_time(start="2019-09-30T12:34:56", end="2019-10-01T12:00:12") - d = query.one() - assert _was_called - assert d.id == 6023 - assert d.organization_name == "thistestworks" - - -def test_query_device_with_last_contact_time_as_range(monkeypatch): - _was_called = False - - def _run_query(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/devices/_search" - assert body == {"query": "foobar", "criteria": {"last_contact_time": {"range": "-3w"}}, "exclusions": {}} - _was_called = True - return StubResponse({"results": [{"id": 6023, "organization_name": "thistestworks"}], - "num_found": 1}) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_run_query) - query = api.select(Device).where("foobar").set_last_contact_time(range="-3w") - d = query.one() - assert _was_called - assert d.id == 6023 - assert d.organization_name == "thistestworks" - - -def test_query_device_invalid_last_contact_time_combinations(): - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - with pytest.raises(ApiError): - api.select(Device).set_last_contact_time() - with pytest.raises(ApiError): - api.select(Device).set_last_contact_time(start="2019-09-30T12:34:56", end="2019-10-01T12:00:12", - range="-3w") - with pytest.raises(ApiError): - api.select(Device).set_last_contact_time(start="2019-09-30T12:34:56", range="-3w") - with pytest.raises(ApiError): - api.select(Device).set_last_contact_time(end="2019-10-01T12:00:12", range="-3w") - - -def test_query_device_invalid_criteria_values(): - tests = [ - {"method": "set_ad_group_ids", "arg": ["Bogus"]}, - {"method": "set_policy_ids", "arg": ["Bogus"]}, - {"method": "set_os", "arg": ["COMMODORE_64"]}, - {"method": "set_status", "arg": ["Bogus"]}, - {"method": "set_target_priorities", "arg": ["Bogus"]}, - {"method": "set_exclude_sensor_versions", "arg": [12703]} - ] - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - query = api.select(Device) - for t in tests: - meth = getattr(query, t["method"], None) - with pytest.raises(ApiError): - meth(t["arg"]) - - -def test_query_device_invalid_sort_direction(): - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - with pytest.raises(ApiError): - api.select(Device).sort_by("policy_name", "BOGUS") - - -def test_query_device_download(monkeypatch): - _was_called = False - - def _run_download(url, query_params, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/devices/_search/download" - assert query_params == {"status": "ALL", "ad_group_id": "14,25", "policy_id": "8675309", - "target_priority": "HIGH", "query_string": "foobar", "sort_field": "name", - "sort_order": "DESC"} - _was_called = True - return "123456789,123456789,123456789" - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, RAW_GET=_run_download) - rc = api.select(Device).where("foobar").set_ad_group_ids([14, 25]).set_policy_ids([8675309]) \ - .set_status(["ALL"]).set_target_priorities(["HIGH"]).sort_by("name", "DESC").download() - assert _was_called - assert rc == "123456789,123456789,123456789" - - -def test_query_device_do_background_scan(monkeypatch): - _was_called = False - - def _background_scan(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "BACKGROUND_SCAN", - "search": {"query": "foobar", "criteria": {}, "exclusions": {}}, "options": {"toggle": "ON"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_background_scan) - api.select(Device).where("foobar").background_scan(True) - assert _was_called - - -def test_query_device_do_bypass(monkeypatch): - _was_called = False - - def _bypass(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "BYPASS", - "search": {"query": "foobar", "criteria": {}, "exclusions": {}}, "options": {"toggle": "OFF"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_bypass) - api.select(Device).where("foobar").bypass(False) - assert _was_called - - -def test_query_device_do_delete_sensor(monkeypatch): - _was_called = False - - def _delete_sensor(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "DELETE_SENSOR", - "search": {"query": "foobar", "criteria": {}, "exclusions": {}}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_delete_sensor) - api.select(Device).where("foobar").delete_sensor() - assert _was_called - - -def test_query_device_do_uninstall_sensor(monkeypatch): - _was_called = False - - def _uninstall_sensor(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UNINSTALL_SENSOR", - "search": {"query": "foobar", "criteria": {}, "exclusions": {}}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_uninstall_sensor) - api.select(Device).where("foobar").uninstall_sensor() - assert _was_called - - -def test_query_device_do_quarantine(monkeypatch): - _was_called = False - - def _quarantine(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "QUARANTINE", - "search": {"query": "foobar", "criteria": {}, "exclusions": {}}, "options": {"toggle": "ON"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_quarantine) - api.select(Device).where("foobar").quarantine(True) - assert _was_called - - -def test_query_device_do_update_policy(monkeypatch): - _was_called = False - - def _update_policy(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UPDATE_POLICY", - "search": {"query": "foobar", "criteria": {}, "exclusions": {}}, - "options": {"policy_id": 8675309}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_update_policy) - api.select(Device).where("foobar").update_policy(8675309) - assert _was_called - - -def test_query_device_do_update_sensor_version(monkeypatch): - _was_called = False - - def _update_sensor_version(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UPDATE_SENSOR_VERSION", - "search": {"query": "foobar", "criteria": {}, "exclusions": {}}, - "options": {"sensor_version": {"RHEL": "2.3.4.5"}}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", - org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, POST=_update_sensor_version) - api.select(Device).where("foobar").update_sensor_version({"RHEL": "2.3.4.5"}) - assert _was_called diff --git a/test/cbapi/psc/test_models.py b/test/cbapi/psc/test_models.py deleted file mode 100755 index c9158fa7..00000000 --- a/test/cbapi/psc/test_models.py +++ /dev/null @@ -1,179 +0,0 @@ -import pytest -from cbapi.psc.models import Device -from cbapi.psc.rest_api import CbPSCBaseAPI -from test.cbtest import StubResponse, patch_cbapi - - -class StubScheduler: - def __init__(self, expected_id): - self.expected_id = expected_id - self.was_called = False - - def request_session(self, sensor_id): - assert sensor_id == self.expected_id - self.was_called = True - return {"itworks": True} - - -def test_Device_lr_session(monkeypatch): - - def _get_session(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - sked = StubScheduler(6023) - api._lr_scheduler = sked - patch_cbapi(monkeypatch, api, GET=_get_session) - dev = Device(api, 6023, {"id": 6023}) - sess = dev.lr_session() - assert sess["itworks"] - assert sked.was_called - - -def test_Device_background_scan(monkeypatch): - _was_called = False - - def _get_device(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - def _background_scan(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "BACKGROUND_SCAN", "device_id": [6023], "options": {"toggle": "ON"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device, POST=_background_scan) - dev = Device(api, 6023, {"id": 6023}) - dev.background_scan(True) - assert _was_called - - -def test_Device_bypass(monkeypatch): - _was_called = False - - def _get_device(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - def _bypass(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "BYPASS", "device_id": [6023], "options": {"toggle": "OFF"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device, POST=_bypass) - dev = Device(api, 6023, {"id": 6023}) - dev.bypass(False) - assert _was_called - - -def test_Device_delete_sensor(monkeypatch): - _was_called = False - - def _get_device(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - def _delete_sensor(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "DELETE_SENSOR", "device_id": [6023]} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device, POST=_delete_sensor) - dev = Device(api, 6023, {"id": 6023}) - dev.delete_sensor() - assert _was_called - - -def test_Device_uninstall_sensor(monkeypatch): - _was_called = False - - def _get_device(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - def _uninstall_sensor(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UNINSTALL_SENSOR", "device_id": [6023]} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device, POST=_uninstall_sensor) - dev = Device(api, 6023, {"id": 6023}) - dev.uninstall_sensor() - assert _was_called - - -def test_Device_quarantine(monkeypatch): - _was_called = False - - def _get_device(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - def _quarantine(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "QUARANTINE", "device_id": [6023], "options": {"toggle": "ON"}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device, POST=_quarantine) - dev = Device(api, 6023, {"id": 6023}) - dev.quarantine(True) - assert _was_called - - -def test_Device_update_policy(monkeypatch): - _was_called = False - - def _get_device(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - def _update_policy(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UPDATE_POLICY", "device_id": [6023], "options": {"policy_id": 8675309}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device, POST=_update_policy) - dev = Device(api, 6023, {"id": 6023}) - dev.update_policy(8675309) - assert _was_called - - -def test_Device_update_sensor_version(monkeypatch): - _was_called = False - - def _get_device(url, parms=None, default=None): - assert url == "/appservices/v6/orgs/Z100/devices/6023" - return {"id": 6023} - - def _update_sensor_version(url, body, **kwargs): - nonlocal _was_called - assert url == "/appservices/v6/orgs/Z100/device_actions" - assert body == {"action_type": "UPDATE_SENSOR_VERSION", "device_id": [6023], - "options": {"sensor_version": {"RHEL": "2.3.4.5"}}} - _was_called = True - return StubResponse(None, 204) - - api = CbPSCBaseAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True) - patch_cbapi(monkeypatch, api, GET=_get_device, POST=_update_sensor_version) - dev = Device(api, 6023, {"id": 6023}) - dev.update_sensor_version({"RHEL": "2.3.4.5"}) - assert _was_called From 707dcf1bf57cd6127a224525723a3fd52814965e Mon Sep 17 00:00:00 2001 From: Emanuela Mitreva Date: Thu, 25 Jul 2024 16:02:21 +0300 Subject: [PATCH 3/4] one more --- src/cbapi/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cbapi/auth.py b/src/cbapi/auth.py index 60cce5a8..bfc6c163 100644 --- a/src/cbapi/auth.py +++ b/src/cbapi/auth.py @@ -78,7 +78,7 @@ def get_credentials(self, profile=None): class FileCredentialStore(object): def __init__(self, product_name, **kwargs): - if product_name not in ("response", "protection", "psc"): + if product_name not in ("response", "protection"): raise CredentialError("Product name {0:s} not valid".format(product_name)) self.credential_search_path = [ From 9dea00137db30096d8ca154264b982901aecf946 Mon Sep 17 00:00:00 2001 From: Emanuela Mitreva Date: Fri, 26 Jul 2024 10:21:21 +0300 Subject: [PATCH 4/4] Bump version --- README.md | 2 +- docs/changelog.rst | 7 +++++++ docs/conf.py | 2 +- setup.py | 2 +- src/cbapi/__init__.py | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 26625acc..a736bf24 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Python bindings for Carbon Black REST API -**Latest Version: 1.7.10** +**Latest Version: 2.0.0** _**Notice**:_ * The Carbon Black Cloud portion of CBAPI has moved to https://github.com/carbonblack/carbon-black-cloud-sdk-python. Any future development and bug fixes for Carbon Black Cloud APIs will be made there. Carbon Black EDR and App Control will remain supported at CBAPI diff --git a/docs/changelog.rst b/docs/changelog.rst index 37ba3b0c..ac13cca6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -12,6 +12,13 @@ Updates `Carbon Black Cloud Python SDK on the Developer Network `_ for details. +CbAPI 2.0.0 - Release July 29, 2024 +------------------------------------ + +Breaking Changes + * Removing psc functionalities. + + CbAPI 1.7.10 - Release Feb 1, 2023 ------------------------------------ diff --git a/docs/conf.py b/docs/conf.py index 52251114..33cecda7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,7 +61,7 @@ # The short X.Y version. version = u'1.7' # The full version, including alpha/beta/rc tags. -release = u'1.7.10' +release = u'2.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index 50703cc8..ef4e12d5 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ install_requires.extend(['packaging']) setup( name='cbapi', - version='1.7.10', + version='2.0.0', url='https://github.com/carbonblack/cbapi-python', license='MIT', author='Carbon Black', diff --git a/src/cbapi/__init__.py b/src/cbapi/__init__.py index d90ed52d..aea6d6ff 100644 --- a/src/cbapi/__init__.py +++ b/src/cbapi/__init__.py @@ -6,7 +6,7 @@ __author__ = 'Carbon Black Developer Network' __license__ = 'MIT' __copyright__ = 'Copyright 2018-2022 VMware Carbon Black' -__version__ = '1.7.10' +__version__ = '2.0.0' # New API as of cbapi 0.9.0 from cbapi.response.rest_api import CbEnterpriseResponseAPI, CbResponseAPI