From 1c78512f15541bb62bfb1e7c9989b4bab2bb091f Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sat, 22 Nov 2025 07:23:14 +0000 Subject: [PATCH 01/48] feat: Add list watchlist function --- src/secops/chronicle/__init__.py | 5 ++ src/secops/chronicle/client.py | 22 ++++++ src/secops/chronicle/utils/__init__.py | 0 src/secops/chronicle/utils/request_utils.py | 80 +++++++++++++++++++++ src/secops/chronicle/watchlist.py | 47 ++++++++++++ 5 files changed, 154 insertions(+) create mode 100644 src/secops/chronicle/utils/__init__.py create mode 100644 src/secops/chronicle/utils/request_utils.py create mode 100644 src/secops/chronicle/watchlist.py diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index 403640df..b4acb8fe 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -163,6 +163,9 @@ find_udm_field_values, ) from secops.chronicle.validate import validate_query +from secops.chronicle.watchlist import ( + list_watchlists, +) __all__ = [ # Client @@ -304,4 +307,6 @@ "update_data_table", "update_data_table_rows", "replace_data_table_rows", + # Watchlist + "list_watchlists", ] diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 580c64c4..b9e94094 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -258,6 +258,9 @@ find_udm_field_values as _find_udm_field_values, ) from secops.chronicle.validate import validate_query as _validate_query +from secops.chronicle.watchlist import ( + list_watchlists as _list_watchlists, +) from secops.exceptions import SecOpsError @@ -554,6 +557,25 @@ def validate_query(self, query: str) -> Dict[str, Any]: """ return _validate_query(self, query) + def list_watchlists( + self, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Dict[str, Any]: + """Get a list of all watchlists. + + Args: + page_size: Maximum number of watchlists to return per page + page_token: Token for the next page of results, if available + + Returns: + Dictionary with list of watchlists + + Raises: + APIError: If the API request fails + """ + return _list_watchlists(self, page_size, page_token) + def get_stats( self, query: str, diff --git a/src/secops/chronicle/utils/__init__.py b/src/secops/chronicle/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py new file mode 100644 index 00000000..773b6c05 --- /dev/null +++ b/src/secops/chronicle/utils/request_utils.py @@ -0,0 +1,80 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Helper functions for Chronicle.""" + +from typing import Dict, Any, Optional +from secops.exceptions import APIError + +def paginated_request( + client, + base_url: str, + path: str, + items_key: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + extra_params: Optional[Dict[str, Any]] = None, +) -> Dict[str, list[Any]]: + """ + Helper to get items from endpoints that use pagination. + + Args: + client: ChronicleClient instance + base_url: The base URL to use, example: + - v1alpha (ChronicleClient.base_url) + - v1 (ChronicleClient.base_v1_url) + path: URL path after {base_url}/{instance_id}/ + items_key: JSON key holding the array of items (e.g., 'curatedRules') + page_size: Maximum number of rules to return per page. + page_token: Token for the next page of results, if available. + extra_params: extra query params to include on every request + + Returns: + List of items from the paginated collection. + + Raises: + APIError: If the HTTP request fails. + """ + url = f"{base_url}/{client.instance_id}/{path}" + results = [] + next_token = page_token + + while True: + # Build params each loop to prevent stale keys being + # included in the next request + params = {"pageSize": 1000 if not page_size else page_size} + if next_token: + params["pageToken"] = next_token + if extra_params: + # copy to avoid passed dict being mutated + params.update(dict(extra_params)) + + response = client.session.get(url, params=params) + if response.status_code != 200: + raise APIError(f"Failed to list {items_key}: {response.text}") + + data = response.json() + results.extend(data.get(items_key, [])) + + # If caller provided page_size, return only this page + if page_size is not None: + break + + # Otherwise, auto-paginate + next_token = data.get("nextPageToken") + if not next_token: + break + + return {items_key: results} \ No newline at end of file diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py new file mode 100644 index 00000000..71dd0538 --- /dev/null +++ b/src/secops/chronicle/watchlist.py @@ -0,0 +1,47 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Watchlist functionality for Chronicle.""" + +from typing import Dict, Any, List, Optional + +from secops.exceptions import APIError, SecOpsError +from secops.chronicle.utils.request_utils import paginated_request + +def list_watchlists( + client, + page_size: Optional[str] = None, + page_token: Optional[str] = None, +) -> Dict[str, Any]: + """ Get a list of all watchlists + + Args: + client: ChronicleClient instance + page_size: Number of results to return per page + page_token: Token for the page to retrieve + + Returns: + List of watchlists + + Raises: + APIError: If the API request fails + """ + return paginated_request( + client, + base_url=client.base_v1_url, + path="watchlists", + items_key="watchlists", + page_size=page_size, + page_token=page_token, + ) From a50b9dd1f70d24d63ce70aeb7f0009329a18f75a Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sat, 22 Nov 2025 07:23:19 +0000 Subject: [PATCH 02/48] feat: Add list watchlist function --- api_module_mapping.md | 735 +++++++++++++++++++++--------------------- 1 file changed, 368 insertions(+), 367 deletions(-) diff --git a/api_module_mapping.md b/api_module_mapping.md index 900d3894..8a252ab3 100644 --- a/api_module_mapping.md +++ b/api_module_mapping.md @@ -1,90 +1,91 @@ # SecOps API Endpoint and SDK Wrapper Module Mapping -Following shows mapping between SecOps [REST Resource](https://cloud.google.com/chronicle/docs/reference/rest) and SDK wrapper module and its respective CLI command (if available). +Following shows mapping between SecOps [REST Resource](https://cloud.google.com/chronicle/docs/reference/rest) and SDK +wrapper module and its respective CLI command (if available). **Note:** All the REST resources mentioned have suffix `projects.locations.instances`. -|REST Resource |Version|secops-wrapper module |CLI Command | -|------------------------------------------------------------------------------|-------|------------------------------------------------------------|---------------------------------------| -|dataAccessLabels.create |v1 | | | -|dataAccessLabels.delete |v1 | | | -|dataAccessLabels.get |v1 | | | -|dataAccessLabels.list |v1 | | | -|dataAccessLabels.patch |v1 | | | -|dataAccessScopes.create |v1 | | | -|dataAccessScopes.delete |v1 | | | -|dataAccessScopes.get |v1 | | | -|dataAccessScopes.list |v1 | | | -|dataAccessScopes.patch |v1 | | | -|get |v1 | | | -|operations.cancel |v1 | | | -|operations.delete |v1 | | | -|operations.get |v1 | | | -|operations.list |v1 | | | -|referenceLists.create |v1 |chronicle.reference_list.create_reference_list |secops reference-list create | -|referenceLists.get |v1 |chronicle.reference_list.get_reference_list |secops reference-list get | -|referenceLists.list |v1 |chronicle.reference_list.list_reference_lists |secops reference-list list | -|referenceLists.patch |v1 |chronicle.reference_list.update_reference_list |secops reference-list update | -|rules.create |v1 |chronicle.rule.create_rule |secops rule create | -|rules.delete |v1 |chronicle.rule.delete_rule |secops rule delete | -|rules.deployments.list |v1 | | | -|rules.get |v1 |chronicle.rule.get_rule |secops rule get | -|rules.getDeployment |v1 | | | -|rules.list |v1 |chronicle.rule.list_rules |secops rule list | -|rules.listRevisions |v1 | | | -|rules.patch |v1 |chronicle.rule.update_rule |secops rule update | -|rules.retrohunts.create |v1 |chronicle.rule_retrohunt.create_retrohunt | | -|rules.retrohunts.get |v1 |chronicle.rule_retrohunt.get_retrohunt | | -|rules.retrohunts.list |v1 | | | -|rules.updateDeployment |v1 |chronicle.rule.enable_rule |secops rule enable | -|watchlists.create |v1 | | | -|watchlists.delete |v1 | | | -|watchlists.get |v1 | | | -|watchlists.list |v1 | | | -|watchlists.patch |v1 | | | -|dataAccessLabels.create |v1beta | | | -|dataAccessLabels.delete |v1beta | | | -|dataAccessLabels.get |v1beta | | | -|dataAccessLabels.list |v1beta | | | -|dataAccessLabels.patch |v1beta | | | -|dataAccessScopes.create |v1beta | | | -|dataAccessScopes.delete |v1beta | | | -|dataAccessScopes.get |v1beta | | | -|dataAccessScopes.list |v1beta | | | -|dataAccessScopes.patch |v1beta | | | -|get |v1beta | | | -|operations.cancel |v1beta | | | -|operations.delete |v1beta | | | -|operations.get |v1beta | | | -|operations.list |v1beta | | | -|referenceLists.create |v1beta | | | -|referenceLists.get |v1beta | | | -|referenceLists.list |v1beta | | | -|referenceLists.patch |v1beta | | | -|rules.create |v1beta | | | -|rules.delete |v1beta | | | -|rules.deployments.list |v1beta | | | -|rules.get |v1beta | | | -|rules.getDeployment |v1beta | | | -|rules.list |v1beta | | | -|rules.listRevisions |v1beta | | | -|rules.patch |v1beta | | | -|rules.retrohunts.create |v1beta | | | -|rules.retrohunts.get |v1beta | | | -|rules.retrohunts.list |v1beta | | | -|rules.updateDeployment |v1beta | | | -|watchlists.create |v1beta | | | -|watchlists.delete |v1beta | | | -|watchlists.get |v1beta | | | -|watchlists.list |v1beta | | | -|watchlists.patch |v1beta | | | -|analytics.entities.analyticValues.list |v1alpha| | | -|analytics.list |v1alpha| | | -|batchValidateWatchlistEntities |v1alpha| | | -|bigQueryAccess.provide |v1alpha| | | -|bigQueryExport.provision |v1alpha| | | -|cases.countPriorities |v1alpha| | | -|curatedRuleSetCategories.curatedRuleSets.curatedRuleSetDeployments.batchUpdate | v1alpha | chronicle.rule_set.batch_update_curated_rule_set_deployments | | +| REST Resource | Version | secops-wrapper module | CLI Command | +|--------------------------------------------------------------------------------|---------|-------------------------------------------------------------------------------------------------------------------|------------------------------------------------| +| dataAccessLabels.create | v1 | | | +| dataAccessLabels.delete | v1 | | | +| dataAccessLabels.get | v1 | | | +| dataAccessLabels.list | v1 | | | +| dataAccessLabels.patch | v1 | | | +| dataAccessScopes.create | v1 | | | +| dataAccessScopes.delete | v1 | | | +| dataAccessScopes.get | v1 | | | +| dataAccessScopes.list | v1 | | | +| dataAccessScopes.patch | v1 | | | +| get | v1 | | | +| operations.cancel | v1 | | | +| operations.delete | v1 | | | +| operations.get | v1 | | | +| operations.list | v1 | | | +| referenceLists.create | v1 | chronicle.reference_list.create_reference_list | secops reference-list create | +| referenceLists.get | v1 | chronicle.reference_list.get_reference_list | secops reference-list get | +| referenceLists.list | v1 | chronicle.reference_list.list_reference_lists | secops reference-list list | +| referenceLists.patch | v1 | chronicle.reference_list.update_reference_list | secops reference-list update | +| rules.create | v1 | chronicle.rule.create_rule | secops rule create | +| rules.delete | v1 | chronicle.rule.delete_rule | secops rule delete | +| rules.deployments.list | v1 | | | +| rules.get | v1 | chronicle.rule.get_rule | secops rule get | +| rules.getDeployment | v1 | | | +| rules.list | v1 | chronicle.rule.list_rules | secops rule list | +| rules.listRevisions | v1 | | | +| rules.patch | v1 | chronicle.rule.update_rule | secops rule update | +| rules.retrohunts.create | v1 | chronicle.rule_retrohunt.create_retrohunt | | +| rules.retrohunts.get | v1 | chronicle.rule_retrohunt.get_retrohunt | | +| rules.retrohunts.list | v1 | | | +| rules.updateDeployment | v1 | chronicle.rule.enable_rule | secops rule enable | +| watchlists.create | v1 | | | +| watchlists.delete | v1 | | | +| watchlists.get | v1 | | | +| watchlists.list | v1 | chronicle.watchlist.list_watchlists | | +| watchlists.patch | v1 | | | +| dataAccessLabels.create | v1beta | | | +| dataAccessLabels.delete | v1beta | | | +| dataAccessLabels.get | v1beta | | | +| dataAccessLabels.list | v1beta | | | +| dataAccessLabels.patch | v1beta | | | +| dataAccessScopes.create | v1beta | | | +| dataAccessScopes.delete | v1beta | | | +| dataAccessScopes.get | v1beta | | | +| dataAccessScopes.list | v1beta | | | +| dataAccessScopes.patch | v1beta | | | +| get | v1beta | | | +| operations.cancel | v1beta | | | +| operations.delete | v1beta | | | +| operations.get | v1beta | | | +| operations.list | v1beta | | | +| referenceLists.create | v1beta | | | +| referenceLists.get | v1beta | | | +| referenceLists.list | v1beta | | | +| referenceLists.patch | v1beta | | | +| rules.create | v1beta | | | +| rules.delete | v1beta | | | +| rules.deployments.list | v1beta | | | +| rules.get | v1beta | | | +| rules.getDeployment | v1beta | | | +| rules.list | v1beta | | | +| rules.listRevisions | v1beta | | | +| rules.patch | v1beta | | | +| rules.retrohunts.create | v1beta | | | +| rules.retrohunts.get | v1beta | | | +| rules.retrohunts.list | v1beta | | | +| rules.updateDeployment | v1beta | | | +| watchlists.create | v1beta | | | +| watchlists.delete | v1beta | | | +| watchlists.get | v1beta | | | +| watchlists.list | v1beta | | | +| watchlists.patch | v1beta | | | +| analytics.entities.analyticValues.list | v1alpha | | | +| analytics.list | v1alpha | | | +| batchValidateWatchlistEntities | v1alpha | | | +| bigQueryAccess.provide | v1alpha | | | +| bigQueryExport.provision | v1alpha | | | +| cases.countPriorities | v1alpha | | | +| curatedRuleSetCategories.curatedRuleSets.curatedRuleSetDeployments.batchUpdate | v1alpha | chronicle.rule_set.batch_update_curated_rule_set_deployments | | | curatedRuleSetCategories.curatedRuleSets.curatedRuleSetDeployments.patch | v1alpha | chronicle.rule_set.update_curated_rule_set_deployment | secops curated-rule rule-set-deployment update | | curatedRuleSetCategories.curatedRuleSets.curatedRuleSetDeployments.list | v1alpha | chronicle.rule_set.list_curated_rule_set_deployments | secops curated-rule rule-set-deployment list | | curatedRuleSetCategories.curatedRuleSets.curatedRuleSetDeployments.get | v1alpha | chronicle.rule_set.get_curated_rule_set_deployment
chronicle.rule_set.get_curated_rule_set_deployment_by_name | secops curated-rule rule-set-deployment get | @@ -94,288 +95,288 @@ Following shows mapping between SecOps [REST Resource](https://cloud.google.com/ | curatedRuleSetCategories.list | v1alpha | chronicle.rule_set.list_curated_rule_set_categories | secops curated-rule rule-set-category list | | curatedRules.get | v1alpha | chronicle.rule_set.get_curated_rule
chronicle.rule_set.get_curated_rule_by_name | secops curated-rule rule get | | curatedRules.list | v1alpha | chronicle.rule_set.list_curated_rules | secops curated-rule rule list | -| dashboardCharts.batchGet |v1alpha| | | -|dashboardCharts.get |v1alpha|chronicle.dashboard.get_chart |secops dashboard get-chart | -|dashboardQueries.execute |v1alpha|chronicle.dashboard_query.execute_query |secops dashboard-query execute | -|dashboardQueries.get |v1alpha|chronicle.dashboard_query.get_execute_query |secops dashboard-query get | -|dashboards.copy |v1alpha| | | -|dashboards.create |v1alpha| | | -|dashboards.delete |v1alpha| | | -|dashboards.get |v1alpha| | | -|dashboards.list |v1alpha| | | -|dataAccessLabels.create |v1alpha| | | -|dataAccessLabels.delete |v1alpha| | | -|dataAccessLabels.get |v1alpha| | | -|dataAccessLabels.list |v1alpha| | | -|dataAccessLabels.patch |v1alpha| | | -|dataAccessScopes.create |v1alpha| | | -|dataAccessScopes.delete |v1alpha| | | -|dataAccessScopes.get |v1alpha| | | -|dataAccessScopes.list |v1alpha| | | -|dataAccessScopes.patch |v1alpha| | | -|dataExports.cancel |v1alpha|chronicle.data_export.cancel_data_export |secops export cancel | -|dataExports.create |v1alpha|chronicle.data_export.create_data_export |secops export create | -|dataExports.fetchavailablelogtypes |v1alpha|chronicle.data_export.fetch_available_log_types |secops export log-types | -|dataExports.get |v1alpha|chronicle.data_export.get_data_export |secops export status | -|dataExports.list |v1alpha|chronicle.data_export.list_data_export |secops export list | -|dataExports.patch |v1alpha|chronicle.data_export.update_data_export |secops export update | -|dataTableOperationErrors.get |v1alpha| | | -|dataTables.create |v1alpha|chronicle.data_table.create_data_table |secops data-table create | -|dataTables.dataTableRows.bulkCreate |v1alpha|chronicle.data_table.create_data_table_rows |secops data-table add-rows | -|dataTables.dataTableRows.bulkCreateAsync |v1alpha| | | -|dataTables.dataTableRows.bulkGet |v1alpha| | | -|dataTables.dataTableRows.bulkReplace |v1alpha|chronicle.data_table.replace_data_table_rows |secops data-table replace-rows | -|dataTables.dataTableRows.bulkReplaceAsync |v1alpha| | | -|dataTables.dataTableRows.bulkUpdate |v1alpha|chronicle.data_table.update_data_table_rows |secops data-table update-rows | -|dataTables.dataTableRows.bulkUpdateAsync |v1alpha| | | -|dataTables.dataTableRows.create |v1alpha| | | -|dataTables.dataTableRows.delete |v1alpha|chronicle.data_table.delete_data_table_rows |secops data-table delete-rows | -|dataTables.dataTableRows.get |v1alpha| | | -|dataTables.dataTableRows.list |v1alpha|chronicle.data_table.list_data_table_rows |secops data-table list-rows | -|dataTables.dataTableRows.patch |v1alpha| | | -|dataTables.delete |v1alpha|chronicle.data_table.delete_data_table |secops data-table delete | -|dataTables.get |v1alpha|chronicle.data_table.get_data_table |secops data-table get | -|dataTables.list |v1alpha|chronicle.data_table.list_data_tables |secops data-table list | -|dataTables.patch |v1alpha| | | -|dataTables.upload |v1alpha| | | -|dataTaps.create |v1alpha| | | -|dataTaps.delete |v1alpha| | | -|dataTaps.get |v1alpha| | | -|dataTaps.list |v1alpha| | | -|dataTaps.patch |v1alpha| | | -|delete |v1alpha| | | -|enrichmentControls.create |v1alpha| | | -|enrichmentControls.delete |v1alpha| | | -|enrichmentControls.get |v1alpha| | | -|enrichmentControls.list |v1alpha| | | -|entities.get |v1alpha| | | -|entities.import |v1alpha|chronicle.log_ingest.import_entities |secops entity import | -|entities.modifyEntityRiskScore |v1alpha| | | -|entities.queryEntityRiskScoreModifications |v1alpha| | | -|entityRiskScores.query |v1alpha| | | -|errorNotificationConfigs.create |v1alpha| | | -|errorNotificationConfigs.delete |v1alpha| | | -|errorNotificationConfigs.get |v1alpha| | | -|errorNotificationConfigs.list |v1alpha| | | -|errorNotificationConfigs.patch |v1alpha| | | -|events.batchGet |v1alpha| | | -|events.get |v1alpha| | | -|events.import |v1alpha|chronicle.log_ingest.ingest_udm |secops log ingest-udm | -|extractSyslog |v1alpha| | | -|federationGroups.create |v1alpha| | | -|federationGroups.delete |v1alpha| | | -|federationGroups.get |v1alpha| | | -|federationGroups.list |v1alpha| | | -|federationGroups.patch |v1alpha| | | -|feedPacks.get |v1alpha| | | -|feedPacks.list |v1alpha| | | -|feedServiceAccounts.fetchServiceAccountForCustomer |v1alpha| | | -|feedSourceTypeSchemas.list |v1alpha| | | -|feedSourceTypeSchemas.logTypeSchemas.list |v1alpha| | | -|feeds.create |v1alpha|chronicle.feeds.create_feed |secops feed create | -|feeds.delete |v1alpha|chronicle.feeds.delete_feed |secops feed delete | -|feeds.disable |v1alpha|chronicle.feeds.disable_feed |secops feed disable | -|feeds.enable |v1alpha|chronicle.feeds.enable_feed |secops feed enable | -|feeds.generateSecret |v1alpha|chronicle.feeds.generate_secret |secops feed secret | -|feeds.get |v1alpha|chronicle.feeds.get_feed |secops feed get | -|feeds.importPushLogs |v1alpha| | | -|feeds.list |v1alpha|chronicle.feeds.list_feeds |secops feed list | -|feeds.patch |v1alpha|chronicle.feeds.update_feed |secops feed update | -|feeds.scheduleTransfer |v1alpha| | | -|fetchFederationAccess |v1alpha| | | -|findEntity |v1alpha| | | -|findEntityAlerts |v1alpha| | | -|findRelatedEntities |v1alpha| | | -|findUdmFieldValues |v1alpha| | | -|findingsGraph.exploreNode |v1alpha| | | -|findingsGraph.initializeGraph |v1alpha| | | -|findingsRefinements.computeFindingsRefinementActivity |v1alpha|chronicle.rule_exclusion.compute_rule_exclusion_activity |secops rule-exclusion compute-activity | -|findingsRefinements.create |v1alpha|chronicle.rule_exclusion.create_rule_exclusion |secops rule-exclusion create | -|findingsRefinements.get |v1alpha|chronicle.rule_exclusion.get_rule_exclusion |secops rule-exclusion get | -|findingsRefinements.getDeployment |v1alpha|chronicle.rule_exclusion.get_rule_exclusion_deployment |secops rule-exclusion get-deployment | -|findingsRefinements.list |v1alpha|chronicle.rule_exclusion.list_rule_exclusions |secops rule-exclusion list | -|findingsRefinements.patch |v1alpha|chronicle.rule_exclusion.patch_rule_exclusion |secops rule-exclusion update | -|findingsRefinements.updateDeployment |v1alpha|chronicle.rule_exclusion.update_rule_exclusion_deployment |secops rule-exclusion update-deployment| -|forwarders.collectors.create |v1alpha| | | -|forwarders.collectors.delete |v1alpha| | | -|forwarders.collectors.get |v1alpha| | | -|forwarders.collectors.list |v1alpha| | | -|forwarders.collectors.patch |v1alpha| | | -|forwarders.create |v1alpha|chronicle.log_ingest.create_forwarder |secops forwarder create | -|forwarders.delete |v1alpha|chronicle.log_ingest.delete_forwarder |secops forwarder delete | -|forwarders.generateForwarderFiles |v1alpha| | | -|forwarders.get |v1alpha|chronicle.log_ingest.get_forwarder |secops forwarder get | -|forwarders.importStatsEvents |v1alpha| | | -|forwarders.list |v1alpha|chronicle.log_ingest.list_forwarder |secops forwarder list | -|forwarders.patch |v1alpha|chronicle.log_ingest.update_forwarder |secops forwarder update | -|generateCollectionAgentAuth |v1alpha| | | -|generateSoarAuthJwt |v1alpha| | | -|generateUdmKeyValueMappings |v1alpha| | | -|generateWorkspaceConnectionToken |v1alpha| | | -|get |v1alpha| | | -|getBigQueryExport |v1alpha| | | -|getMultitenantDirectory |v1alpha| | | -|getRiskConfig |v1alpha| | | -|ingestionLogLabels.get |v1alpha| | | -|ingestionLogLabels.list |v1alpha| | | -|ingestionLogNamespaces.get |v1alpha| | | -|ingestionLogNamespaces.list |v1alpha| | | -|iocs.batchGet |v1alpha| | | -|iocs.findFirstAndLastSeen |v1alpha| | | -|iocs.get |v1alpha| | | -|iocs.getIocState |v1alpha| | | -|iocs.searchCuratedDetectionsForIoc |v1alpha| | | -|iocs.updateIocState |v1alpha| | | -|legacy.legacyBatchGetCases |v1alpha|chronicle.case.get_cases_from_list |secops case | -|legacy.legacyBatchGetCollections |v1alpha| | | -|legacy.legacyCreateOrUpdateCase |v1alpha| | | -|legacy.legacyCreateSoarAlert |v1alpha| | | -|legacy.legacyFetchAlertsView |v1alpha|chronicle.alert.get_alerts |secops alert | -|legacy.legacyFetchUdmSearchCsv |v1alpha|chronicle.udm_search.fetch_udm_search_csv |secops search --csv | -|legacy.legacyFetchUdmSearchView |v1alpha|chronicle.udm_search.fetch_udm_search_view |secops udm-search-view | -|legacy.legacyFindAssetEvents |v1alpha| | | -|legacy.legacyFindRawLogs |v1alpha| | | -|legacy.legacyFindUdmEvents |v1alpha| | | -|legacy.legacyGetAlert |v1alpha|chronicle.rule_alert.get_alert | | -|legacy.legacyGetCuratedRulesTrends |v1alpha| | | -|legacy.legacyGetDetection |v1alpha| | | -|legacy.legacyGetEventForDetection |v1alpha| | | -|legacy.legacyGetRuleCounts |v1alpha| | | -|legacy.legacyGetRulesTrends |v1alpha| | | -|legacy.legacyListCases |v1alpha|chronicle.case.get_cases |secops case --ids | -|legacy.legacyRunTestRule |v1alpha|chronicle.rule.run_rule_test |secops rule validate | -|legacy.legacySearchArtifactEvents |v1alpha| | | -|legacy.legacySearchArtifactIoCDetails |v1alpha| | | -|legacy.legacySearchAssetEvents |v1alpha| | | -|legacy.legacySearchCuratedDetections |v1alpha| | | -|legacy.legacySearchCustomerStats |v1alpha| | | -|legacy.legacySearchDetections |v1alpha|chronicle.rule_detection.list_detections | | -|legacy.legacySearchDomainsRecentlyRegistered |v1alpha| | | -|legacy.legacySearchDomainsTimingStats |v1alpha| | | -|legacy.legacySearchEnterpriseWideAlerts |v1alpha| | | -|legacy.legacySearchEnterpriseWideIoCs |v1alpha|chronicle.ioc.list_iocs |secops iocs | -|legacy.legacySearchFindings |v1alpha| | | -|legacy.legacySearchIngestionStats |v1alpha| | | -|legacy.legacySearchIoCInsights |v1alpha| | | -|legacy.legacySearchRawLogs |v1alpha| | | -|legacy.legacySearchRuleDetectionCountBuckets |v1alpha| | | -|legacy.legacySearchRuleDetectionEvents |v1alpha| | | -|legacy.legacySearchRuleResults |v1alpha| | | -|legacy.legacySearchRulesAlerts |v1alpha|chronicle.rule_alert.search_rule_alerts | | -|legacy.legacySearchUserEvents |v1alpha| | | -|legacy.legacyStreamDetectionAlerts |v1alpha| | | -|legacy.legacyTestRuleStreaming |v1alpha| | | -|legacy.legacyUpdateAlert |v1alpha|chronicle.rule_alert.update_alert | | -|listAllFindingsRefinementDeployments |v1alpha| | | -|logTypes.create |v1alpha| | | -|logTypes.generateEventTypesSuggestions |v1alpha| | | -|logTypes.get |v1alpha| | | -|logTypes.getLogTypeSetting |v1alpha| | | -|logTypes.legacySubmitParserExtension |v1alpha| | | -|logTypes.list |v1alpha| | | -|logTypes.logs.export |v1alpha| | | -|logTypes.logs.get |v1alpha| | | -|logTypes.logs.import |v1alpha|chronicle.log_ingest.ingest_log |secops log ingest | -|logTypes.logs.list |v1alpha| | | -|logTypes.parserExtensions.activate |v1alpha|chronicle.parser_extension.activate_parser_extension |secops parser-extension activate | -|logTypes.parserExtensions.create |v1alpha|chronicle.parser_extension.create_parser_extension |secops parser-extension create | -|logTypes.parserExtensions.delete |v1alpha|chronicle.parser_extension.delete_parser_extension |secops parser-extension delete | -|logTypes.parserExtensions.extensionValidationReports.get |v1alpha| | | -|logTypes.parserExtensions.extensionValidationReports.list |v1alpha| | | -|logTypes.parserExtensions.extensionValidationReports.validationErrors.list |v1alpha| | | -|logTypes.parserExtensions.get |v1alpha|chronicle.parser_extension.get_parser_extension |secops parser-extension get | -|logTypes.parserExtensions.list |v1alpha|chronicle.parser_extension.list_parser_extensions |secops parser-extension list | -|logTypes.parserExtensions.validationReports.get |v1alpha| | | -|logTypes.parserExtensions.validationReports.parsingErrors.list |v1alpha| | | -|logTypes.parsers.activate |v1alpha|chronicle.parser.activate_parser |secops parser activate | -|logTypes.parsers.activateReleaseCandidateParser |v1alpha|chronicle.parser.activate_release_candidate |secops parser activate-rc | -|logTypes.parsers.copy |v1alpha|chronicle.parser.copy_parser |secops parser copy | -|logTypes.parsers.create |v1alpha|chronicle.parser.create_parser |secops parser create | -|logTypes.parsers.deactivate |v1alpha|chronicle.parser.deactivate_parser |secops parser deactivate | -|logTypes.parsers.delete |v1alpha|chronicle.parser.delete_parser |secops parser delete | -|logTypes.parsers.get |v1alpha|chronicle.parser.get_parser |secops parser get | -|logTypes.parsers.list |v1alpha|chronicle.parser.list_parsers |secops parser list | -|logTypes.parsers.validationReports.get |v1alpha| | | -|logTypes.parsers.validationReports.parsingErrors.list |v1alpha| | | -|logTypes.patch |v1alpha| | | -|logTypes.runParser |v1alpha|chronicle.parser.run_parser |secops parser run | -|logTypes.updateLogTypeSetting |v1alpha| | | -|logs.classify |v1alpha| | | -| nativeDashboards.addChart | v1alpha |chronicle.dashboard.add_chart |secops dashboard add-chart | -| nativeDashboards.create | v1alpha |chronicle.dashboard.create_dashboard |secops dashboard create | -| nativeDashboards.delete | v1alpha |chronicle.dashboard.delete_dashboard |secops dashboard delete | -| nativeDashboards.duplicate | v1alpha |chronicle.dashboard.duplicate_dashboard |secops dashboard duplicate | -| nativeDashboards.duplicateChart | v1alpha | | | -| nativeDashboards.editChart | v1alpha |chronicle.dashboard.edit_chart |secops dashboard edit-chart | -| nativeDashboards.export | v1alpha |chronicle.dashboard.export_dashboard |secops dashboard export | -| nativeDashboards.get | v1alpha |chronicle.dashboard.get_dashboard |secops dashboard get | -| nativeDashboards.import | v1alpha |chronicle.dashboard.import_dashboard |secops dashboard import | -| nativeDashboards.list | v1alpha |chronicle.dashboard.list_dashboards |secops dashboard list | -| nativeDashboards.patch | v1alpha |chronicle.dashboard.update_dashboard |secops dashboard update | -| nativeDashboards.removeChart | v1alpha |chronicle.dashboard.remove_chart |secops dashboard remove-chart | -|operations.cancel |v1alpha| | | -|operations.delete |v1alpha| | | -|operations.get |v1alpha| | | -|operations.list |v1alpha| | | -|operations.streamSearch |v1alpha| | | -|queryProductSourceStats |v1alpha| | | -|referenceLists.create |v1alpha| | | -|referenceLists.get |v1alpha| | | -|referenceLists.list |v1alpha| | | -|referenceLists.patch |v1alpha| | | -|report |v1alpha| | | -|ruleExecutionErrors.list |v1alpha|chronicle.rule_detection.list_errors | | -|rules.create |v1alpha| | | -|rules.delete |v1alpha| | | -|rules.deployments.list |v1alpha| | | -|rules.get |v1alpha| | | -|rules.getDeployment |v1alpha| | | -|rules.list |v1alpha| | | -|rules.listRevisions |v1alpha| | | -|rules.patch |v1alpha| | | -|rules.retrohunts.create |v1alpha| | | -|rules.retrohunts.get |v1alpha| | | -|rules.retrohunts.list |v1alpha| | | -|rules.updateDeployment |v1alpha| | | -|searchEntities |v1alpha| | | -|searchRawLogs |v1alpha| | | -|summarizeEntitiesFromQuery |v1alpha|chronicle.entity.summarize_entity |secops entity | -|summarizeEntity |v1alpha|chronicle.entity.summarize_entity | | -|testFindingsRefinement |v1alpha| | | -|translateUdmQuery |v1alpha|chronicle.nl_search.translate_nl_to_udm | | -|translateYlRule |v1alpha| | | -|udmSearch |v1alpha|chronicle.search.search_udm |secops search | -|undelete |v1alpha| | | -|updateBigQueryExport |v1alpha| | | -|updateRiskConfig |v1alpha| | | -|users.clearConversationHistory |v1alpha| | | -|users.conversations.create |v1alpha|chronicle.gemini.create_conversation | | -|users.conversations.delete |v1alpha| | | -|users.conversations.get |v1alpha| | | -|users.conversations.list |v1alpha| | | -|users.conversations.messages.create |v1alpha|chronicle.gemini.query_gemini |secops gemini | -|users.conversations.messages.delete |v1alpha| | | -|users.conversations.messages.get |v1alpha| | | -|users.conversations.messages.list |v1alpha| | | -|users.conversations.messages.patch |v1alpha| | | -|users.conversations.patch |v1alpha| | | -|users.getPreferenceSet |v1alpha|chronicle.gemini.opt_in_to_gemini |secops gemini --opt-in | -|users.searchQueries.create |v1alpha| | | -|users.searchQueries.delete |v1alpha| | | -|users.searchQueries.get |v1alpha| | | -|users.searchQueries.list |v1alpha| | | -|users.searchQueries.patch |v1alpha| | | -|users.updatePreferenceSet |v1alpha| | | -|validateQuery |v1alpha|chronicle.validate.validate_query | | -|verifyReferenceList |v1alpha| | | -|verifyRuleText |v1alpha|chronicle.rule_validation.validate_rule |secops rule validate | -|watchlists.create |v1alpha| | | -|watchlists.delete |v1alpha| | | -|watchlists.entities.add |v1alpha| | | -|watchlists.entities.batchAdd |v1alpha| | | -|watchlists.entities.batchRemove |v1alpha| | | -|watchlists.entities.remove |v1alpha| | | -|watchlists.get |v1alpha| | | -|watchlists.list |v1alpha| | | -|watchlists.listEntities |v1alpha| | | -|watchlists.patch |v1alpha| | | +| dashboardCharts.batchGet | v1alpha | | | +| dashboardCharts.get | v1alpha | chronicle.dashboard.get_chart | secops dashboard get-chart | +| dashboardQueries.execute | v1alpha | chronicle.dashboard_query.execute_query | secops dashboard-query execute | +| dashboardQueries.get | v1alpha | chronicle.dashboard_query.get_execute_query | secops dashboard-query get | +| dashboards.copy | v1alpha | | | +| dashboards.create | v1alpha | | | +| dashboards.delete | v1alpha | | | +| dashboards.get | v1alpha | | | +| dashboards.list | v1alpha | | | +| dataAccessLabels.create | v1alpha | | | +| dataAccessLabels.delete | v1alpha | | | +| dataAccessLabels.get | v1alpha | | | +| dataAccessLabels.list | v1alpha | | | +| dataAccessLabels.patch | v1alpha | | | +| dataAccessScopes.create | v1alpha | | | +| dataAccessScopes.delete | v1alpha | | | +| dataAccessScopes.get | v1alpha | | | +| dataAccessScopes.list | v1alpha | | | +| dataAccessScopes.patch | v1alpha | | | +| dataExports.cancel | v1alpha | chronicle.data_export.cancel_data_export | secops export cancel | +| dataExports.create | v1alpha | chronicle.data_export.create_data_export | secops export create | +| dataExports.fetchavailablelogtypes | v1alpha | chronicle.data_export.fetch_available_log_types | secops export log-types | +| dataExports.get | v1alpha | chronicle.data_export.get_data_export | secops export status | +| dataExports.list | v1alpha | chronicle.data_export.list_data_export | secops export list | +| dataExports.patch | v1alpha | chronicle.data_export.update_data_export | secops export update | +| dataTableOperationErrors.get | v1alpha | | | +| dataTables.create | v1alpha | chronicle.data_table.create_data_table | secops data-table create | +| dataTables.dataTableRows.bulkCreate | v1alpha | chronicle.data_table.create_data_table_rows | secops data-table add-rows | +| dataTables.dataTableRows.bulkCreateAsync | v1alpha | | | +| dataTables.dataTableRows.bulkGet | v1alpha | | | +| dataTables.dataTableRows.bulkReplace | v1alpha | chronicle.data_table.replace_data_table_rows | secops data-table replace-rows | +| dataTables.dataTableRows.bulkReplaceAsync | v1alpha | | | +| dataTables.dataTableRows.bulkUpdate | v1alpha | chronicle.data_table.update_data_table_rows | secops data-table update-rows | +| dataTables.dataTableRows.bulkUpdateAsync | v1alpha | | | +| dataTables.dataTableRows.create | v1alpha | | | +| dataTables.dataTableRows.delete | v1alpha | chronicle.data_table.delete_data_table_rows | secops data-table delete-rows | +| dataTables.dataTableRows.get | v1alpha | | | +| dataTables.dataTableRows.list | v1alpha | chronicle.data_table.list_data_table_rows | secops data-table list-rows | +| dataTables.dataTableRows.patch | v1alpha | | | +| dataTables.delete | v1alpha | chronicle.data_table.delete_data_table | secops data-table delete | +| dataTables.get | v1alpha | chronicle.data_table.get_data_table | secops data-table get | +| dataTables.list | v1alpha | chronicle.data_table.list_data_tables | secops data-table list | +| dataTables.patch | v1alpha | | | +| dataTables.upload | v1alpha | | | +| dataTaps.create | v1alpha | | | +| dataTaps.delete | v1alpha | | | +| dataTaps.get | v1alpha | | | +| dataTaps.list | v1alpha | | | +| dataTaps.patch | v1alpha | | | +| delete | v1alpha | | | +| enrichmentControls.create | v1alpha | | | +| enrichmentControls.delete | v1alpha | | | +| enrichmentControls.get | v1alpha | | | +| enrichmentControls.list | v1alpha | | | +| entities.get | v1alpha | | | +| entities.import | v1alpha | chronicle.log_ingest.import_entities | secops entity import | +| entities.modifyEntityRiskScore | v1alpha | | | +| entities.queryEntityRiskScoreModifications | v1alpha | | | +| entityRiskScores.query | v1alpha | | | +| errorNotificationConfigs.create | v1alpha | | | +| errorNotificationConfigs.delete | v1alpha | | | +| errorNotificationConfigs.get | v1alpha | | | +| errorNotificationConfigs.list | v1alpha | | | +| errorNotificationConfigs.patch | v1alpha | | | +| events.batchGet | v1alpha | | | +| events.get | v1alpha | | | +| events.import | v1alpha | chronicle.log_ingest.ingest_udm | secops log ingest-udm | +| extractSyslog | v1alpha | | | +| federationGroups.create | v1alpha | | | +| federationGroups.delete | v1alpha | | | +| federationGroups.get | v1alpha | | | +| federationGroups.list | v1alpha | | | +| federationGroups.patch | v1alpha | | | +| feedPacks.get | v1alpha | | | +| feedPacks.list | v1alpha | | | +| feedServiceAccounts.fetchServiceAccountForCustomer | v1alpha | | | +| feedSourceTypeSchemas.list | v1alpha | | | +| feedSourceTypeSchemas.logTypeSchemas.list | v1alpha | | | +| feeds.create | v1alpha | chronicle.feeds.create_feed | secops feed create | +| feeds.delete | v1alpha | chronicle.feeds.delete_feed | secops feed delete | +| feeds.disable | v1alpha | chronicle.feeds.disable_feed | secops feed disable | +| feeds.enable | v1alpha | chronicle.feeds.enable_feed | secops feed enable | +| feeds.generateSecret | v1alpha | chronicle.feeds.generate_secret | secops feed secret | +| feeds.get | v1alpha | chronicle.feeds.get_feed | secops feed get | +| feeds.importPushLogs | v1alpha | | | +| feeds.list | v1alpha | chronicle.feeds.list_feeds | secops feed list | +| feeds.patch | v1alpha | chronicle.feeds.update_feed | secops feed update | +| feeds.scheduleTransfer | v1alpha | | | +| fetchFederationAccess | v1alpha | | | +| findEntity | v1alpha | | | +| findEntityAlerts | v1alpha | | | +| findRelatedEntities | v1alpha | | | +| findUdmFieldValues | v1alpha | | | +| findingsGraph.exploreNode | v1alpha | | | +| findingsGraph.initializeGraph | v1alpha | | | +| findingsRefinements.computeFindingsRefinementActivity | v1alpha | chronicle.rule_exclusion.compute_rule_exclusion_activity | secops rule-exclusion compute-activity | +| findingsRefinements.create | v1alpha | chronicle.rule_exclusion.create_rule_exclusion | secops rule-exclusion create | +| findingsRefinements.get | v1alpha | chronicle.rule_exclusion.get_rule_exclusion | secops rule-exclusion get | +| findingsRefinements.getDeployment | v1alpha | chronicle.rule_exclusion.get_rule_exclusion_deployment | secops rule-exclusion get-deployment | +| findingsRefinements.list | v1alpha | chronicle.rule_exclusion.list_rule_exclusions | secops rule-exclusion list | +| findingsRefinements.patch | v1alpha | chronicle.rule_exclusion.patch_rule_exclusion | secops rule-exclusion update | +| findingsRefinements.updateDeployment | v1alpha | chronicle.rule_exclusion.update_rule_exclusion_deployment | secops rule-exclusion update-deployment | +| forwarders.collectors.create | v1alpha | | | +| forwarders.collectors.delete | v1alpha | | | +| forwarders.collectors.get | v1alpha | | | +| forwarders.collectors.list | v1alpha | | | +| forwarders.collectors.patch | v1alpha | | | +| forwarders.create | v1alpha | chronicle.log_ingest.create_forwarder | secops forwarder create | +| forwarders.delete | v1alpha | chronicle.log_ingest.delete_forwarder | secops forwarder delete | +| forwarders.generateForwarderFiles | v1alpha | | | +| forwarders.get | v1alpha | chronicle.log_ingest.get_forwarder | secops forwarder get | +| forwarders.importStatsEvents | v1alpha | | | +| forwarders.list | v1alpha | chronicle.log_ingest.list_forwarder | secops forwarder list | +| forwarders.patch | v1alpha | chronicle.log_ingest.update_forwarder | secops forwarder update | +| generateCollectionAgentAuth | v1alpha | | | +| generateSoarAuthJwt | v1alpha | | | +| generateUdmKeyValueMappings | v1alpha | | | +| generateWorkspaceConnectionToken | v1alpha | | | +| get | v1alpha | | | +| getBigQueryExport | v1alpha | | | +| getMultitenantDirectory | v1alpha | | | +| getRiskConfig | v1alpha | | | +| ingestionLogLabels.get | v1alpha | | | +| ingestionLogLabels.list | v1alpha | | | +| ingestionLogNamespaces.get | v1alpha | | | +| ingestionLogNamespaces.list | v1alpha | | | +| iocs.batchGet | v1alpha | | | +| iocs.findFirstAndLastSeen | v1alpha | | | +| iocs.get | v1alpha | | | +| iocs.getIocState | v1alpha | | | +| iocs.searchCuratedDetectionsForIoc | v1alpha | | | +| iocs.updateIocState | v1alpha | | | +| legacy.legacyBatchGetCases | v1alpha | chronicle.case.get_cases_from_list | secops case | +| legacy.legacyBatchGetCollections | v1alpha | | | +| legacy.legacyCreateOrUpdateCase | v1alpha | | | +| legacy.legacyCreateSoarAlert | v1alpha | | | +| legacy.legacyFetchAlertsView | v1alpha | chronicle.alert.get_alerts | secops alert | +| legacy.legacyFetchUdmSearchCsv | v1alpha | chronicle.udm_search.fetch_udm_search_csv | secops search --csv | +| legacy.legacyFetchUdmSearchView | v1alpha | chronicle.udm_search.fetch_udm_search_view | secops udm-search-view | +| legacy.legacyFindAssetEvents | v1alpha | | | +| legacy.legacyFindRawLogs | v1alpha | | | +| legacy.legacyFindUdmEvents | v1alpha | | | +| legacy.legacyGetAlert | v1alpha | chronicle.rule_alert.get_alert | | +| legacy.legacyGetCuratedRulesTrends | v1alpha | | | +| legacy.legacyGetDetection | v1alpha | | | +| legacy.legacyGetEventForDetection | v1alpha | | | +| legacy.legacyGetRuleCounts | v1alpha | | | +| legacy.legacyGetRulesTrends | v1alpha | | | +| legacy.legacyListCases | v1alpha | chronicle.case.get_cases | secops case --ids | +| legacy.legacyRunTestRule | v1alpha | chronicle.rule.run_rule_test | secops rule validate | +| legacy.legacySearchArtifactEvents | v1alpha | | | +| legacy.legacySearchArtifactIoCDetails | v1alpha | | | +| legacy.legacySearchAssetEvents | v1alpha | | | +| legacy.legacySearchCuratedDetections | v1alpha | | | +| legacy.legacySearchCustomerStats | v1alpha | | | +| legacy.legacySearchDetections | v1alpha | chronicle.rule_detection.list_detections | | +| legacy.legacySearchDomainsRecentlyRegistered | v1alpha | | | +| legacy.legacySearchDomainsTimingStats | v1alpha | | | +| legacy.legacySearchEnterpriseWideAlerts | v1alpha | | | +| legacy.legacySearchEnterpriseWideIoCs | v1alpha | chronicle.ioc.list_iocs | secops iocs | +| legacy.legacySearchFindings | v1alpha | | | +| legacy.legacySearchIngestionStats | v1alpha | | | +| legacy.legacySearchIoCInsights | v1alpha | | | +| legacy.legacySearchRawLogs | v1alpha | | | +| legacy.legacySearchRuleDetectionCountBuckets | v1alpha | | | +| legacy.legacySearchRuleDetectionEvents | v1alpha | | | +| legacy.legacySearchRuleResults | v1alpha | | | +| legacy.legacySearchRulesAlerts | v1alpha | chronicle.rule_alert.search_rule_alerts | | +| legacy.legacySearchUserEvents | v1alpha | | | +| legacy.legacyStreamDetectionAlerts | v1alpha | | | +| legacy.legacyTestRuleStreaming | v1alpha | | | +| legacy.legacyUpdateAlert | v1alpha | chronicle.rule_alert.update_alert | | +| listAllFindingsRefinementDeployments | v1alpha | | | +| logTypes.create | v1alpha | | | +| logTypes.generateEventTypesSuggestions | v1alpha | | | +| logTypes.get | v1alpha | | | +| logTypes.getLogTypeSetting | v1alpha | | | +| logTypes.legacySubmitParserExtension | v1alpha | | | +| logTypes.list | v1alpha | | | +| logTypes.logs.export | v1alpha | | | +| logTypes.logs.get | v1alpha | | | +| logTypes.logs.import | v1alpha | chronicle.log_ingest.ingest_log | secops log ingest | +| logTypes.logs.list | v1alpha | | | +| logTypes.parserExtensions.activate | v1alpha | chronicle.parser_extension.activate_parser_extension | secops parser-extension activate | +| logTypes.parserExtensions.create | v1alpha | chronicle.parser_extension.create_parser_extension | secops parser-extension create | +| logTypes.parserExtensions.delete | v1alpha | chronicle.parser_extension.delete_parser_extension | secops parser-extension delete | +| logTypes.parserExtensions.extensionValidationReports.get | v1alpha | | | +| logTypes.parserExtensions.extensionValidationReports.list | v1alpha | | | +| logTypes.parserExtensions.extensionValidationReports.validationErrors.list | v1alpha | | | +| logTypes.parserExtensions.get | v1alpha | chronicle.parser_extension.get_parser_extension | secops parser-extension get | +| logTypes.parserExtensions.list | v1alpha | chronicle.parser_extension.list_parser_extensions | secops parser-extension list | +| logTypes.parserExtensions.validationReports.get | v1alpha | | | +| logTypes.parserExtensions.validationReports.parsingErrors.list | v1alpha | | | +| logTypes.parsers.activate | v1alpha | chronicle.parser.activate_parser | secops parser activate | +| logTypes.parsers.activateReleaseCandidateParser | v1alpha | chronicle.parser.activate_release_candidate | secops parser activate-rc | +| logTypes.parsers.copy | v1alpha | chronicle.parser.copy_parser | secops parser copy | +| logTypes.parsers.create | v1alpha | chronicle.parser.create_parser | secops parser create | +| logTypes.parsers.deactivate | v1alpha | chronicle.parser.deactivate_parser | secops parser deactivate | +| logTypes.parsers.delete | v1alpha | chronicle.parser.delete_parser | secops parser delete | +| logTypes.parsers.get | v1alpha | chronicle.parser.get_parser | secops parser get | +| logTypes.parsers.list | v1alpha | chronicle.parser.list_parsers | secops parser list | +| logTypes.parsers.validationReports.get | v1alpha | | | +| logTypes.parsers.validationReports.parsingErrors.list | v1alpha | | | +| logTypes.patch | v1alpha | | | +| logTypes.runParser | v1alpha | chronicle.parser.run_parser | secops parser run | +| logTypes.updateLogTypeSetting | v1alpha | | | +| logs.classify | v1alpha | | | +| nativeDashboards.addChart | v1alpha | chronicle.dashboard.add_chart | secops dashboard add-chart | +| nativeDashboards.create | v1alpha | chronicle.dashboard.create_dashboard | secops dashboard create | +| nativeDashboards.delete | v1alpha | chronicle.dashboard.delete_dashboard | secops dashboard delete | +| nativeDashboards.duplicate | v1alpha | chronicle.dashboard.duplicate_dashboard | secops dashboard duplicate | +| nativeDashboards.duplicateChart | v1alpha | | | +| nativeDashboards.editChart | v1alpha | chronicle.dashboard.edit_chart | secops dashboard edit-chart | +| nativeDashboards.export | v1alpha | chronicle.dashboard.export_dashboard | secops dashboard export | +| nativeDashboards.get | v1alpha | chronicle.dashboard.get_dashboard | secops dashboard get | +| nativeDashboards.import | v1alpha | chronicle.dashboard.import_dashboard | secops dashboard import | +| nativeDashboards.list | v1alpha | chronicle.dashboard.list_dashboards | secops dashboard list | +| nativeDashboards.patch | v1alpha | chronicle.dashboard.update_dashboard | secops dashboard update | +| nativeDashboards.removeChart | v1alpha | chronicle.dashboard.remove_chart | secops dashboard remove-chart | +| operations.cancel | v1alpha | | | +| operations.delete | v1alpha | | | +| operations.get | v1alpha | | | +| operations.list | v1alpha | | | +| operations.streamSearch | v1alpha | | | +| queryProductSourceStats | v1alpha | | | +| referenceLists.create | v1alpha | | | +| referenceLists.get | v1alpha | | | +| referenceLists.list | v1alpha | | | +| referenceLists.patch | v1alpha | | | +| report | v1alpha | | | +| ruleExecutionErrors.list | v1alpha | chronicle.rule_detection.list_errors | | +| rules.create | v1alpha | | | +| rules.delete | v1alpha | | | +| rules.deployments.list | v1alpha | | | +| rules.get | v1alpha | | | +| rules.getDeployment | v1alpha | | | +| rules.list | v1alpha | | | +| rules.listRevisions | v1alpha | | | +| rules.patch | v1alpha | | | +| rules.retrohunts.create | v1alpha | | | +| rules.retrohunts.get | v1alpha | | | +| rules.retrohunts.list | v1alpha | | | +| rules.updateDeployment | v1alpha | | | +| searchEntities | v1alpha | | | +| searchRawLogs | v1alpha | | | +| summarizeEntitiesFromQuery | v1alpha | chronicle.entity.summarize_entity | secops entity | +| summarizeEntity | v1alpha | chronicle.entity.summarize_entity | | +| testFindingsRefinement | v1alpha | | | +| translateUdmQuery | v1alpha | chronicle.nl_search.translate_nl_to_udm | | +| translateYlRule | v1alpha | | | +| udmSearch | v1alpha | chronicle.search.search_udm | secops search | +| undelete | v1alpha | | | +| updateBigQueryExport | v1alpha | | | +| updateRiskConfig | v1alpha | | | +| users.clearConversationHistory | v1alpha | | | +| users.conversations.create | v1alpha | chronicle.gemini.create_conversation | | +| users.conversations.delete | v1alpha | | | +| users.conversations.get | v1alpha | | | +| users.conversations.list | v1alpha | | | +| users.conversations.messages.create | v1alpha | chronicle.gemini.query_gemini | secops gemini | +| users.conversations.messages.delete | v1alpha | | | +| users.conversations.messages.get | v1alpha | | | +| users.conversations.messages.list | v1alpha | | | +| users.conversations.messages.patch | v1alpha | | | +| users.conversations.patch | v1alpha | | | +| users.getPreferenceSet | v1alpha | chronicle.gemini.opt_in_to_gemini | secops gemini --opt-in | +| users.searchQueries.create | v1alpha | | | +| users.searchQueries.delete | v1alpha | | | +| users.searchQueries.get | v1alpha | | | +| users.searchQueries.list | v1alpha | | | +| users.searchQueries.patch | v1alpha | | | +| users.updatePreferenceSet | v1alpha | | | +| validateQuery | v1alpha | chronicle.validate.validate_query | | +| verifyReferenceList | v1alpha | | | +| verifyRuleText | v1alpha | chronicle.rule_validation.validate_rule | secops rule validate | +| watchlists.create | v1alpha | | | +| watchlists.delete | v1alpha | | | +| watchlists.entities.add | v1alpha | | | +| watchlists.entities.batchAdd | v1alpha | | | +| watchlists.entities.batchRemove | v1alpha | | | +| watchlists.entities.remove | v1alpha | | | +| watchlists.get | v1alpha | | | +| watchlists.list | v1alpha | | | +| watchlists.listEntities | v1alpha | | | +| watchlists.patch | v1alpha | | | From 4c8c728e59152879b92589dd695f3fdd88768492 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sat, 22 Nov 2025 16:38:09 +0000 Subject: [PATCH 03/48] feat: Add get watchlist function --- api_module_mapping.md | 2 +- src/secops/chronicle/__init__.py | 1 + src/secops/chronicle/client.py | 18 ++++++++++++++++++ src/secops/chronicle/utils/request_utils.py | 3 ++- src/secops/chronicle/watchlist.py | 21 ++++++++++++++++++++- 5 files changed, 42 insertions(+), 3 deletions(-) diff --git a/api_module_mapping.md b/api_module_mapping.md index 8a252ab3..e02a653b 100644 --- a/api_module_mapping.md +++ b/api_module_mapping.md @@ -40,7 +40,7 @@ wrapper module and its respective CLI command (if available). | rules.updateDeployment | v1 | chronicle.rule.enable_rule | secops rule enable | | watchlists.create | v1 | | | | watchlists.delete | v1 | | | -| watchlists.get | v1 | | | +| watchlists.get | v1 | chronicle.watchlist.get_watchlist | | | watchlists.list | v1 | chronicle.watchlist.list_watchlists | | | watchlists.patch | v1 | | | | dataAccessLabels.create | v1beta | | | diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index b4acb8fe..1bbb4841 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -165,6 +165,7 @@ from secops.chronicle.validate import validate_query from secops.chronicle.watchlist import ( list_watchlists, + get_watchlist, ) __all__ = [ diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index b9e94094..b90c2ce1 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -260,6 +260,7 @@ from secops.chronicle.validate import validate_query as _validate_query from secops.chronicle.watchlist import ( list_watchlists as _list_watchlists, + get_watchlist as _get_watchlist, ) from secops.exceptions import SecOpsError @@ -576,6 +577,23 @@ def list_watchlists( """ return _list_watchlists(self, page_size, page_token) + def get_watchlist( + self, + watchlist_id: str, + ) -> Dict[str, Any]: + """Get a specific watchlist by ID. + + Args: + watchlist_id: ID of the watchlist to retrieve + + Returns: + Watchlist + + Raises: + APIError: If the API request fails + """ + return _get_watchlist(self, watchlist_id) + def get_stats( self, query: str, diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py index 773b6c05..79903249 100644 --- a/src/secops/chronicle/utils/request_utils.py +++ b/src/secops/chronicle/utils/request_utils.py @@ -17,6 +17,7 @@ from typing import Dict, Any, Optional from secops.exceptions import APIError + def paginated_request( client, base_url: str, @@ -77,4 +78,4 @@ def paginated_request( if not next_token: break - return {items_key: results} \ No newline at end of file + return {items_key: results} diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index 71dd0538..6bb710df 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -19,12 +19,13 @@ from secops.exceptions import APIError, SecOpsError from secops.chronicle.utils.request_utils import paginated_request + def list_watchlists( client, page_size: Optional[str] = None, page_token: Optional[str] = None, ) -> Dict[str, Any]: - """ Get a list of all watchlists + """Get a list of all watchlists Args: client: ChronicleClient instance @@ -45,3 +46,21 @@ def list_watchlists( page_size=page_size, page_token=page_token, ) + + +def get_watchlist(client, watchlist_id: str) -> Dict[str, Any]: + """Get a specific watchlist by ID + + Args: + client: ChronicleClient instance + watchlist_id: ID of the watchlist to retrieve + + Returns: + Watchlist + + Raises: + APIError: If the API request fails + """ + return client.session.get( + f"{client.base_v1_url}/{client.instance_id}/watchlists/{watchlist_id}", + ).json() From d6e61128b285856c15dfee1f8a90e29fc7e516a2 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sun, 23 Nov 2025 21:04:42 +0000 Subject: [PATCH 04/48] feat: Add delete watchlist function --- api_module_mapping.md | 2 +- src/secops/chronicle/__init__.py | 3 +++ src/secops/chronicle/client.py | 23 +++++++++++++++++++++++ src/secops/chronicle/watchlist.py | 28 +++++++++++++++++++++++++++- 4 files changed, 54 insertions(+), 2 deletions(-) diff --git a/api_module_mapping.md b/api_module_mapping.md index e02a653b..b568756c 100644 --- a/api_module_mapping.md +++ b/api_module_mapping.md @@ -39,7 +39,7 @@ wrapper module and its respective CLI command (if available). | rules.retrohunts.list | v1 | | | | rules.updateDeployment | v1 | chronicle.rule.enable_rule | secops rule enable | | watchlists.create | v1 | | | -| watchlists.delete | v1 | | | +| watchlists.delete | v1 | chronicle.watchlist.delete_watchlist | | | watchlists.get | v1 | chronicle.watchlist.get_watchlist | | | watchlists.list | v1 | chronicle.watchlist.list_watchlists | | | watchlists.patch | v1 | | | diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index 1bbb4841..f41453c1 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -166,6 +166,7 @@ from secops.chronicle.watchlist import ( list_watchlists, get_watchlist, + delete_watchlist, ) __all__ = [ @@ -310,4 +311,6 @@ "replace_data_table_rows", # Watchlist "list_watchlists", + "get_watchlist", + "delete_watchlist", ] diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index b90c2ce1..fdac2076 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -261,6 +261,7 @@ from secops.chronicle.watchlist import ( list_watchlists as _list_watchlists, get_watchlist as _get_watchlist, + delete_watchlist as _delete_watchlist, ) from secops.exceptions import SecOpsError @@ -594,6 +595,28 @@ def get_watchlist( """ return _get_watchlist(self, watchlist_id) + def delete_watchlist( + self, + watchlist_id: str, + force: Optional[bool] = None, + ) -> Dict[str, Any]: + """Delete a watchlist by ID. + + Args: + watchlist_id: ID of the watchlist to delete + force: Optional. If set to true, any entities under this + watchlist will also be deleted. + (Otherwise, the request will only work if the + watchlist has no entities.) + + Returns: + Deleted watchlist + + Raises: + APIError: If the API request fails + """ + return _delete_watchlist(self, watchlist_id, force) + def get_stats( self, query: str, diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index 6bb710df..ef2ee138 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -49,7 +49,7 @@ def list_watchlists( def get_watchlist(client, watchlist_id: str) -> Dict[str, Any]: - """Get a specific watchlist by ID + """Get a watchlist by ID Args: client: ChronicleClient instance @@ -64,3 +64,29 @@ def get_watchlist(client, watchlist_id: str) -> Dict[str, Any]: return client.session.get( f"{client.base_v1_url}/{client.instance_id}/watchlists/{watchlist_id}", ).json() + + +def delete_watchlist( + client, watchlist_id: str, force: Optional[bool] = None +) -> Dict[str, Any]: + """Delete a watchlist by ID + + Args: + client: ChronicleClient instance + watchlist_id: ID of the watchlist to delete + force: Optional. If set to true, any entities under this + watchlist will also be deleted. + (Otherwise, the request will only work if the + watchlist has no entities.) + + Returns: + Deleted watchlist + + Raises: + APIError: If the API request fails + """ + params = {"force": force} + return client.session.delete( + f"{client.base_v1_url}/{client.instance_id}/watchlists/{watchlist_id}", + params=params, + ).json() From a2384cfe133816ca440cdb73bcc19d98ee75824b Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 1 Dec 2025 20:35:30 +0000 Subject: [PATCH 05/48] feat: Add create watchlist function --- src/secops/chronicle/__init__.py | 2 ++ src/secops/chronicle/client.py | 26 +++++++++++++++++++++++ src/secops/chronicle/watchlist.py | 35 +++++++++++++++++++++++++++++++ 3 files changed, 63 insertions(+) diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index f41453c1..69e63a66 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -167,6 +167,7 @@ list_watchlists, get_watchlist, delete_watchlist, + create_watchlist, ) __all__ = [ @@ -313,4 +314,5 @@ "list_watchlists", "get_watchlist", "delete_watchlist", + "create_watchlist", ] diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index fdac2076..7c25d7ea 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -262,6 +262,7 @@ list_watchlists as _list_watchlists, get_watchlist as _get_watchlist, delete_watchlist as _delete_watchlist, + create_watchlist as _create_watchlist, ) from secops.exceptions import SecOpsError @@ -617,6 +618,31 @@ def delete_watchlist( """ return _delete_watchlist(self, watchlist_id, force) + def create_watchlist( + self, + name: str, + display_name: str, + multiplying_factor: float, + description: Optional[str] = None, + ) -> Dict[str, Any]: + """Create a watchlist + + Args: + name: Name of the watchlist + display_name: Display name of the watchlist + multiplying_factor: Multiplying factor for the watchlist + description: Optional. Description of the watchlist + + Returns: + Created watchlist + + Raises: + APIError: If the API request fails + """ + return _create_watchlist( + self, name, display_name, multiplying_factor, description + ) + def get_stats( self, query: str, diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index ef2ee138..0c31b091 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -90,3 +90,38 @@ def delete_watchlist( f"{client.base_v1_url}/{client.instance_id}/watchlists/{watchlist_id}", params=params, ).json() + + +def create_watchlist( + client, + name: str, + display_name: str, + multiplying_factor: float, + description: Optional[str] = None, +) -> Dict[str, Any]: + """Create a watchlist + + Args: + client: ChronicleClient instance + name: Name of the watchlist + display_name: Display name of the watchlist + multiplying_factor: Multiplying factor for the watchlist + description: Optional. Description of the watchlist + + Returns: + Created watchlist + + Raises: + APIError: If the API request fails + """ + + return client.session.post( + f"{client.base_v1_url}/{client.instance_id}/watchlists", + json={ + "name": name, + "displayName": display_name, + "multiplyingFactor": multiplying_factor, + "description": description, + "entityPopulationMechanism": {"manual": {}}, + }, + ).json() From b7187cb02e1a0ca2a00c5a33fe575f3ecd94014c Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 1 Dec 2025 20:52:39 +0000 Subject: [PATCH 06/48] feat: Update pagination helper to return a list if the API endpoint returns a list --- src/secops/chronicle/utils/request_utils.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py index 79903249..b6a5b730 100644 --- a/src/secops/chronicle/utils/request_utils.py +++ b/src/secops/chronicle/utils/request_utils.py @@ -14,7 +14,7 @@ # """Helper functions for Chronicle.""" -from typing import Dict, Any, Optional +from typing import Dict, Any, Optional, List, Union from secops.exceptions import APIError @@ -27,7 +27,7 @@ def paginated_request( page_size: Optional[int] = None, page_token: Optional[str] = None, extra_params: Optional[Dict[str, Any]] = None, -) -> Dict[str, list[Any]]: +) -> Union[Dict[str, List[Any]], List[Any]]: """ Helper to get items from endpoints that use pagination. @@ -43,7 +43,9 @@ def paginated_request( extra_params: extra query params to include on every request Returns: - List of items from the paginated collection. + Union[Dict[str, List[Any]], List[Any]]: List of items from the + paginated collection. If the API returns a dictionary, it will + return the dictionary. Otherwise, it will return the list of items. Raises: APIError: If the HTTP request fails. @@ -78,4 +80,7 @@ def paginated_request( if not next_token: break + # Return a list if the API returns a list, otherwise return a dict + if isinstance(data, list): + return results return {items_key: results} From dfdb98edb1d66f66dd1326e38359461274be47e7 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sat, 6 Dec 2025 13:53:00 +0000 Subject: [PATCH 07/48] chore: Remove unused imports --- src/secops/chronicle/watchlist.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index 0c31b091..d1ac7cb7 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -14,9 +14,8 @@ # """Watchlist functionality for Chronicle.""" -from typing import Dict, Any, List, Optional +from typing import Dict, Any, Optional -from secops.exceptions import APIError, SecOpsError from secops.chronicle.utils.request_utils import paginated_request From 9caa984f0429b0687012198d94a036312c6a2c38 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sat, 6 Dec 2025 13:53:07 +0000 Subject: [PATCH 08/48] chore: Update mappings --- api_module_mapping.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api_module_mapping.md b/api_module_mapping.md index b568756c..fcd73c5b 100644 --- a/api_module_mapping.md +++ b/api_module_mapping.md @@ -38,11 +38,11 @@ wrapper module and its respective CLI command (if available). | rules.retrohunts.get | v1 | chronicle.rule_retrohunt.get_retrohunt | | | rules.retrohunts.list | v1 | | | | rules.updateDeployment | v1 | chronicle.rule.enable_rule | secops rule enable | -| watchlists.create | v1 | | | +| watchlists.create | v1 | chronicle.watchlist.create_watchlist | | | watchlists.delete | v1 | chronicle.watchlist.delete_watchlist | | | watchlists.get | v1 | chronicle.watchlist.get_watchlist | | | watchlists.list | v1 | chronicle.watchlist.list_watchlists | | -| watchlists.patch | v1 | | | +| watchlists.patch | v1 | | | | dataAccessLabels.create | v1beta | | | | dataAccessLabels.delete | v1beta | | | | dataAccessLabels.get | v1beta | | | From a01ea2a6c58ece1b0c018d3960134697ba3cf607 Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Mon, 1 Dec 2025 21:32:48 +0000 Subject: [PATCH 09/48] fix Issue-148 and apply black formatting --- src/secops/chronicle/parser.py | 67 ++++++++++++++-------------------- 1 file changed, 28 insertions(+), 39 deletions(-) diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index 082cfd40..ec05bac2 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -14,10 +14,10 @@ # """Parser management functionality for Chronicle.""" -from typing import Dict, Any, List, Optional -from secops.exceptions import APIError import base64 +from typing import Any, Dict, List, Optional +from secops.exceptions import APIError # Constants for size limits MAX_LOG_SIZE = 10 * 1024 * 1024 # 10MB per log @@ -26,7 +26,9 @@ def activate_parser( - client, log_type: str, id: str # pylint: disable=redefined-builtin + client, + log_type: str, + id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: """Activate a custom parser. @@ -55,7 +57,9 @@ def activate_parser( def activate_release_candidate_parser( - client, log_type: str, id: str # pylint: disable=redefined-builtin + client, + log_type: str, + id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: """Activate the release candidate parser making it live for that customer. @@ -84,7 +88,9 @@ def activate_release_candidate_parser( def copy_parser( - client, log_type: str, id: str # pylint: disable=redefined-builtin + client, + log_type: str, + id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: """Makes a copy of a prebuilt parser. @@ -100,8 +106,7 @@ def copy_parser( APIError: If the API request fails """ url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - f"/parsers/{id}:copy" + f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}:copy" ) body = {} response = client.session.post(url, json=body) @@ -148,7 +153,9 @@ def create_parser( def deactivate_parser( - client, log_type: str, id: str # pylint: disable=redefined-builtin + client, + log_type: str, + id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: """Deactivate a custom parser. @@ -196,10 +203,7 @@ def delete_parser( Raises: APIError: If the API request fails """ - url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - f"/parsers/{id}" - ) + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}" params = {"force": force} response = client.session.delete(url, params=params) @@ -210,7 +214,9 @@ def delete_parser( def get_parser( - client, log_type: str, id: str # pylint: disable=redefined-builtin + client, + log_type: str, + id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: """Get a Parser by ID. @@ -225,10 +231,7 @@ def get_parser( Raises: APIError: If the API request fails """ - url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - f"/parsers/{id}" - ) + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}" response = client.session.get(url) if response.status_code != 200: @@ -263,10 +266,7 @@ def list_parsers( parsers = [] while more: - url = ( - f"{client.base_url}/{client.instance_id}" - f"/logTypes/{log_type}/parsers" - ) + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers" params = { "pageSize": page_size, @@ -284,8 +284,8 @@ def list_parsers( if "parsers" in data: parsers.extend(data["parsers"]) - if "next_page_token" in data: - params["pageToken"] = data["next_page_token"] + if "nextPageToken" in data: + page_token = data["nextPageToken"] else: more = False @@ -364,27 +364,19 @@ def run_parser( # Check number of logs if len(logs) > MAX_LOGS: - raise ValueError( - f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}" - ) + raise ValueError(f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}") # Validate parser_extension_code type if provided - if parser_extension_code is not None and not isinstance( - parser_extension_code, str - ): + if parser_extension_code is not None and not isinstance(parser_extension_code, str): raise TypeError( "parser_extension_code must be a string or None, got " f"{type(parser_extension_code).__name__}" ) # Build request - url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}:runParser" - ) + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}:runParser" - parser = { - "cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8") - } + parser = {"cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8")} parser_extension = None if parser_extension_code: @@ -397,10 +389,7 @@ def run_parser( body = { "parser": parser, "parser_extension": parser_extension, - "log": [ - base64.b64encode(log.encode("utf-8")).decode("utf-8") - for log in logs - ], + "log": [base64.b64encode(log.encode("utf-8")).decode("utf-8") for log in logs], "statedump_allowed": statedump_allowed, } From e70359611fec6d43aaa7f4401f0a862918ed49f0 Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Tue, 2 Dec 2025 15:45:53 +0000 Subject: [PATCH 10/48] run black formatter --- src/secops/chronicle/parser.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index ec05bac2..e28444af 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -105,9 +105,7 @@ def copy_parser( Raises: APIError: If the API request fails """ - url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}:copy" - ) + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}:copy" body = {} response = client.session.post(url, json=body) @@ -364,19 +362,27 @@ def run_parser( # Check number of logs if len(logs) > MAX_LOGS: - raise ValueError(f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}") + raise ValueError( + f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}" + ) # Validate parser_extension_code type if provided - if parser_extension_code is not None and not isinstance(parser_extension_code, str): + if parser_extension_code is not None and not isinstance( + parser_extension_code, str + ): raise TypeError( "parser_extension_code must be a string or None, got " f"{type(parser_extension_code).__name__}" ) # Build request - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}:runParser" + url = ( + f"{client.base_url}/{client.instance_id}/logTypes/{log_type}:runParser" + ) - parser = {"cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8")} + parser = { + "cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8") + } parser_extension = None if parser_extension_code: @@ -389,7 +395,10 @@ def run_parser( body = { "parser": parser, "parser_extension": parser_extension, - "log": [base64.b64encode(log.encode("utf-8")).decode("utf-8") for log in logs], + "log": [ + base64.b64encode(log.encode("utf-8")).decode("utf-8") + for log in logs + ], "statedump_allowed": statedump_allowed, } From 98319fea3fb15b32d71979e50c7dadb935a2ed40 Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Tue, 2 Dec 2025 16:37:41 +0000 Subject: [PATCH 11/48] break long urls below 80 char line limit --- src/secops/chronicle/parser.py | 46 +++++++++++++++------------------- 1 file changed, 20 insertions(+), 26 deletions(-) diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index e28444af..7fba12b3 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -44,8 +44,8 @@ def activate_parser( APIError: If the API request fails """ url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - f"/parsers/{id}:activate" + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}/parsers/{id}:activate" ) body = {} response = client.session.post(url, json=body) @@ -75,8 +75,8 @@ def activate_release_candidate_parser( APIError: If the API request fails """ url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - f"/parsers/{id}:activateReleaseCandidateParser" + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}/parsers/{id}:activateReleaseCandidateParser" ) body = {} response = client.session.post(url, json=body) @@ -105,7 +105,10 @@ def copy_parser( Raises: APIError: If the API request fails """ - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}:copy" + url = ( + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}/parsers/{id}:copy" + ) body = {} response = client.session.post(url, json=body) @@ -169,8 +172,8 @@ def deactivate_parser( APIError: If the API request fails """ url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - f"/parsers/{id}:deactivate" + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}/parsers/{id}:deactivate" ) body = {} response = client.session.post(url, json=body) @@ -201,7 +204,7 @@ def delete_parser( Raises: APIError: If the API request fails """ - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}" + url = f"{client.base_url}/{client.instance_id}" f"/logTypes/{log_type}/parsers/{id}" params = {"force": force} response = client.session.delete(url, params=params) @@ -229,7 +232,8 @@ def get_parser( Raises: APIError: If the API request fails """ - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers/{id}" + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" + url += f"/parsers/{id}" response = client.session.get(url) if response.status_code != 200: @@ -264,7 +268,8 @@ def list_parsers( parsers = [] while more: - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}/parsers" + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" + url += "/parsers" params = { "pageSize": page_size, @@ -362,27 +367,19 @@ def run_parser( # Check number of logs if len(logs) > MAX_LOGS: - raise ValueError( - f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}" - ) + raise ValueError(f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}") # Validate parser_extension_code type if provided - if parser_extension_code is not None and not isinstance( - parser_extension_code, str - ): + if parser_extension_code is not None and not isinstance(parser_extension_code, str): raise TypeError( "parser_extension_code must be a string or None, got " f"{type(parser_extension_code).__name__}" ) # Build request - url = ( - f"{client.base_url}/{client.instance_id}/logTypes/{log_type}:runParser" - ) + url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}:runParser" - parser = { - "cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8") - } + parser = {"cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8")} parser_extension = None if parser_extension_code: @@ -395,10 +392,7 @@ def run_parser( body = { "parser": parser, "parser_extension": parser_extension, - "log": [ - base64.b64encode(log.encode("utf-8")).decode("utf-8") - for log in logs - ], + "log": [base64.b64encode(log.encode("utf-8")).decode("utf-8") for log in logs], "statedump_allowed": statedump_allowed, } From 6da83aebd86ee42f16618b439ae3951ddcfdff18 Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Tue, 2 Dec 2025 16:37:53 +0000 Subject: [PATCH 12/48] break long urls below 80 char line limit --- src/secops/chronicle/parser.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index 7fba12b3..253c6ae0 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -204,7 +204,8 @@ def delete_parser( Raises: APIError: If the API request fails """ - url = f"{client.base_url}/{client.instance_id}" f"/logTypes/{log_type}/parsers/{id}" + url = f"{client.base_url}/{client.instance_id}" + url += f"/logTypes/{log_type}/parsers/{id}" params = {"force": force} response = client.session.delete(url, params=params) @@ -232,8 +233,8 @@ def get_parser( Raises: APIError: If the API request fails """ - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - url += f"/parsers/{id}" + url = f"{client.base_url}/{client.instance_id}" + url += f"/logTypes/{log_type}/parsers/{id}" response = client.session.get(url) if response.status_code != 200: @@ -268,8 +269,8 @@ def list_parsers( parsers = [] while more: - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}" - url += "/parsers" + url = f"{client.base_url}/{client.instance_id}" + url += f"/logTypes/{log_type}/parsers" params = { "pageSize": page_size, @@ -377,7 +378,8 @@ def run_parser( ) # Build request - url = f"{client.base_url}/{client.instance_id}/logTypes/{log_type}:runParser" + url = f"{client.base_url}/{client.instance_id}" + url += f"/logTypes/{log_type}:runParser" parser = {"cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8")} From 7a09a9a41bc508983c98af402441d0a76c312f64 Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Tue, 2 Dec 2025 17:00:39 +0000 Subject: [PATCH 13/48] Use ChronicleClient type hint --- src/secops/chronicle/parser.py | 40 +++++++++++++++++++++------------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index 253c6ae0..d9419742 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -15,8 +15,9 @@ """Parser management functionality for Chronicle.""" import base64 -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union +from secops.chronicle.client import ChronicleClient from secops.exceptions import APIError # Constants for size limits @@ -26,7 +27,7 @@ def activate_parser( - client, + client: ChronicleClient, log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -57,7 +58,7 @@ def activate_parser( def activate_release_candidate_parser( - client, + client: ChronicleClient, log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -88,7 +89,7 @@ def activate_release_candidate_parser( def copy_parser( - client, + client: ChronicleClient, log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -119,7 +120,7 @@ def copy_parser( def create_parser( - client, + client: ChronicleClient, log_type: str, parser_code: str, validated_on_empty_logs: bool = True, @@ -154,7 +155,7 @@ def create_parser( def deactivate_parser( - client, + client: ChronicleClient, log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -185,7 +186,7 @@ def deactivate_parser( def delete_parser( - client, + client: ChronicleClient, log_type: str, id: str, # pylint: disable=redefined-builtin force: bool = False, @@ -216,7 +217,7 @@ def delete_parser( def get_parser( - client, + client: ChronicleClient, log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -244,10 +245,10 @@ def get_parser( def list_parsers( - client, + client: ChronicleClient, log_type: str = "-", page_size: int = 100, - page_token: str = None, + page_token: Optional[Union[str, None]] = None, filter: str = None, # pylint: disable=redefined-builtin ) -> List[Any]: """List parsers. @@ -297,7 +298,7 @@ def list_parsers( def run_parser( - client: "ChronicleClient", + client: ChronicleClient, log_type: str, parser_code: str, parser_extension_code: Optional[str], @@ -368,10 +369,14 @@ def run_parser( # Check number of logs if len(logs) > MAX_LOGS: - raise ValueError(f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}") + raise ValueError( + f"Number of logs ({len(logs)}) exceeds maximum of {MAX_LOGS}" + ) # Validate parser_extension_code type if provided - if parser_extension_code is not None and not isinstance(parser_extension_code, str): + if parser_extension_code is not None and not isinstance( + parser_extension_code, str + ): raise TypeError( "parser_extension_code must be a string or None, got " f"{type(parser_extension_code).__name__}" @@ -381,7 +386,9 @@ def run_parser( url = f"{client.base_url}/{client.instance_id}" url += f"/logTypes/{log_type}:runParser" - parser = {"cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8")} + parser = { + "cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8") + } parser_extension = None if parser_extension_code: @@ -394,7 +401,10 @@ def run_parser( body = { "parser": parser, "parser_extension": parser_extension, - "log": [base64.b64encode(log.encode("utf-8")).decode("utf-8") for log in logs], + "log": [ + base64.b64encode(log.encode("utf-8")).decode("utf-8") + for log in logs + ], "statedump_allowed": statedump_allowed, } From 5f4a115df78368df34c21cc78302908869e50e85 Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Tue, 2 Dec 2025 17:03:01 +0000 Subject: [PATCH 14/48] Revert client: ChronicleClient type hint. :-( --- src/secops/chronicle/parser.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index d9419742..77a14040 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -17,7 +17,6 @@ import base64 from typing import Any, Dict, List, Optional, Union -from secops.chronicle.client import ChronicleClient from secops.exceptions import APIError # Constants for size limits @@ -27,7 +26,7 @@ def activate_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -58,7 +57,7 @@ def activate_parser( def activate_release_candidate_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -89,7 +88,7 @@ def activate_release_candidate_parser( def copy_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -120,7 +119,7 @@ def copy_parser( def create_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, parser_code: str, validated_on_empty_logs: bool = True, @@ -155,7 +154,7 @@ def create_parser( def deactivate_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -186,7 +185,7 @@ def deactivate_parser( def delete_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, id: str, # pylint: disable=redefined-builtin force: bool = False, @@ -217,7 +216,7 @@ def delete_parser( def get_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, id: str, # pylint: disable=redefined-builtin ) -> Dict[str, Any]: @@ -245,7 +244,7 @@ def get_parser( def list_parsers( - client: ChronicleClient, + client: "ChronicleClient", log_type: str = "-", page_size: int = 100, page_token: Optional[Union[str, None]] = None, @@ -298,7 +297,7 @@ def list_parsers( def run_parser( - client: ChronicleClient, + client: "ChronicleClient", log_type: str, parser_code: str, parser_extension_code: Optional[str], From e88f45642514f884e30bfb8042ac2d7e321ccb0a Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Tue, 2 Dec 2025 17:15:17 +0000 Subject: [PATCH 15/48] update unit tests for paginated lists --- tests/chronicle/test_parser.py | 163 +++++++++++++++++++++++++++------ 1 file changed, 136 insertions(+), 27 deletions(-) diff --git a/tests/chronicle/test_parser.py b/tests/chronicle/test_parser.py index 99dc5986..e734972f 100644 --- a/tests/chronicle/test_parser.py +++ b/tests/chronicle/test_parser.py @@ -15,10 +15,15 @@ """Tests for Chronicle parser functions.""" import base64 -import pytest from unittest.mock import Mock, patch + +import pytest + from secops.chronicle.client import ChronicleClient from secops.chronicle.parser import ( + MAX_LOG_SIZE, + MAX_LOGS, + MAX_TOTAL_SIZE, activate_parser, activate_release_candidate_parser, copy_parser, @@ -28,9 +33,6 @@ get_parser, list_parsers, run_parser, - MAX_LOG_SIZE, - MAX_LOGS, - MAX_TOTAL_SIZE, ) from secops.exceptions import APIError @@ -100,7 +102,9 @@ def test_activate_parser_error(chronicle_client, mock_error_response): # --- activate_release_candidate_parser Tests --- -def test_activate_release_candidate_parser_success(chronicle_client, mock_response): +def test_activate_release_candidate_parser_success( + chronicle_client, mock_response +): """Test activate_release_candidate_parser function for success.""" log_type = "SOME_LOG_TYPE" parser_id = "pa_67890" @@ -118,7 +122,9 @@ def test_activate_release_candidate_parser_success(chronicle_client, mock_respon assert result == {} -def test_activate_release_candidate_parser_error(chronicle_client, mock_error_response): +def test_activate_release_candidate_parser_error( + chronicle_client, mock_error_response +): """Test activate_release_candidate_parser function for API error.""" log_type = "SOME_LOG_TYPE" parser_id = "pa_67890" @@ -127,7 +133,9 @@ def test_activate_release_candidate_parser_error(chronicle_client, mock_error_re chronicle_client.session, "post", return_value=mock_error_response ): with pytest.raises(APIError) as exc_info: - activate_release_candidate_parser(chronicle_client, log_type, parser_id) + activate_release_candidate_parser( + chronicle_client, log_type, parser_id + ) assert "Failed to activate parser: Error message" in str(exc_info.value) @@ -166,7 +174,9 @@ def test_copy_parser_error(chronicle_client, mock_error_response): # --- create_parser Tests --- -def test_create_parser_success_default_validation(chronicle_client, mock_response): +def test_create_parser_success_default_validation( + chronicle_client, mock_response +): """Test create_parser function for success with default validated_on_empty_logs.""" log_type = "NIX_SYSTEM" parser_code = "filter {}" @@ -186,14 +196,18 @@ def test_create_parser_success_default_validation(chronicle_client, mock_respons mock_post.assert_called_once_with( expected_url, json={ - "cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8"), + "cbn": base64.b64encode(parser_code.encode("utf-8")).decode( + "utf-8" + ), "validated_on_empty_logs": True, }, ) assert result == expected_parser_info -def test_create_parser_success_with_validation_false(chronicle_client, mock_response): +def test_create_parser_success_with_validation_false( + chronicle_client, mock_response +): """Test create_parser function for success with validated_on_empty_logs=False.""" log_type = "NIX_SYSTEM" parser_code = "filter {}" @@ -208,14 +222,19 @@ def test_create_parser_success_with_validation_false(chronicle_client, mock_resp chronicle_client.session, "post", return_value=mock_response ) as mock_post: result = create_parser( - chronicle_client, log_type, parser_code, validated_on_empty_logs=False + chronicle_client, + log_type, + parser_code, + validated_on_empty_logs=False, ) expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers" mock_post.assert_called_once_with( expected_url, json={ - "cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8"), + "cbn": base64.b64encode(parser_code.encode("utf-8")).decode( + "utf-8" + ), "validated_on_empty_logs": False, }, ) @@ -262,7 +281,9 @@ def test_deactivate_parser_error(chronicle_client, mock_error_response): ): with pytest.raises(APIError) as exc_info: deactivate_parser(chronicle_client, log_type, parser_id) - assert "Failed to deactivate parser: Error message" in str(exc_info.value) + assert "Failed to deactivate parser: Error message" in str( + exc_info.value + ) # --- delete_parser Tests --- @@ -278,7 +299,9 @@ def test_delete_parser_success_no_force(chronicle_client, mock_response): result = delete_parser(chronicle_client, log_type, parser_id) expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers/{parser_id}" - mock_delete.assert_called_once_with(expected_url, params={"force": False}) + mock_delete.assert_called_once_with( + expected_url, params={"force": False} + ) assert result == {} @@ -291,10 +314,14 @@ def test_delete_parser_success_with_force(chronicle_client, mock_response): with patch.object( chronicle_client.session, "delete", return_value=mock_response ) as mock_delete: - result = delete_parser(chronicle_client, log_type, parser_id, force=True) + result = delete_parser( + chronicle_client, log_type, parser_id, force=True + ) expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers/{parser_id}" - mock_delete.assert_called_once_with(expected_url, params={"force": True}) + mock_delete.assert_called_once_with( + expected_url, params={"force": True} + ) assert result == {} @@ -362,7 +389,8 @@ def test_list_parsers_single_page_success(chronicle_client, mock_response): expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers" mock_get.assert_called_once_with( - expected_url, params={"pageSize": 100, "pageToken": None, "filter": None} + expected_url, + params={"pageSize": 100, "pageToken": None, "filter": None}, ) assert result == expected_parsers @@ -381,7 +409,8 @@ def test_list_parsers_no_parsers_success(chronicle_client, mock_response): expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers" mock_get.assert_called_once_with( - expected_url, params={"pageSize": 100, "pageToken": None, "filter": None} + expected_url, + params={"pageSize": 100, "pageToken": None, "filter": None}, ) assert result == [] @@ -430,6 +459,72 @@ def test_list_parsers_with_optional_params(chronicle_client, mock_response): assert result == expected_parsers +def test_list_parsers_multi_page_pagination(chronicle_client, mock_response): + """Test list_parsers function with multi-page pagination (Issue 148). + + This test validates that the pagination fix correctly handles the + 'nextPageToken' field (not 'next_page_token') returned by the API. + """ + log_type = "WINDOWS" + + # First page of parsers with nextPageToken + first_page_parsers = [ + {"name": "pa_windows_1", "id": "pa_windows_1"}, + {"name": "pa_windows_2", "id": "pa_windows_2"}, + ] + + # Second page of parsers without nextPageToken (last page) + second_page_parsers = [ + {"name": "pa_windows_3", "id": "pa_windows_3"}, + ] + + # Mock responses for each page + first_response = Mock() + first_response.status_code = 200 + first_response.json.return_value = { + "parsers": first_page_parsers, + "nextPageToken": "page2_token", + } + + second_response = Mock() + second_response.status_code = 200 + second_response.json.return_value = { + "parsers": second_page_parsers, + # No nextPageToken - this is the last page + } + + with patch.object( + chronicle_client.session, + "get", + side_effect=[first_response, second_response], + ) as mock_get: + result = list_parsers(chronicle_client, log_type=log_type, page_size=2) + + # Verify we made two API calls (one per page) + assert mock_get.call_count == 2 + + # Verify first call + expected_url = ( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}" + f"/logTypes/{log_type}/parsers" + ) + first_call = mock_get.call_args_list[0] + assert first_call[0][0] == expected_url + assert first_call[1]["params"]["pageSize"] == 2 + assert first_call[1]["params"]["pageToken"] is None + + # Verify second call uses the nextPageToken from first response + second_call = mock_get.call_args_list[1] + assert second_call[0][0] == expected_url + assert second_call[1]["params"]["pageSize"] == 2 + assert second_call[1]["params"]["pageToken"] == "page2_token" + + # Verify all parsers from both pages are returned + expected_all_parsers = first_page_parsers + second_page_parsers + assert result == expected_all_parsers + assert len(result) == 3 + + # --- run_parser Tests --- def test_run_parser_success(chronicle_client, mock_response): """Test run_parser function for success.""" @@ -473,9 +568,11 @@ def test_run_parser_success(chronicle_client, mock_response): assert request_body["parser"]["cbn"] == base64.b64encode( parser_code.encode("utf8") ).decode("utf-8") - assert request_body["parser_extension"]["cbn_snippet"] == base64.b64encode( - parser_extension_code.encode("utf8") - ).decode("utf-8") + assert request_body["parser_extension"][ + "cbn_snippet" + ] == base64.b64encode(parser_extension_code.encode("utf8")).decode( + "utf-8" + ) assert len(request_body["log"]) == 2 assert request_body["log"][0] == base64.b64encode( logs[0].encode("utf8") @@ -584,7 +681,9 @@ def test_run_parser_error(chronicle_client, mock_error_response): logs=logs, ) # Check for the new detailed error message format - assert "Failed to evaluate parser for log type 'WINDOWS'" in str(exc_info.value) + assert "Failed to evaluate parser for log type 'WINDOWS'" in str( + exc_info.value + ) assert "Bad request" in str(exc_info.value) @@ -718,7 +817,9 @@ def test_run_parser_validation_invalid_extension_type(chronicle_client): parser_extension_code=123, # type: ignore logs=["test log"], ) - assert "parser_extension_code must be a string or None" in str(exc_info.value) + assert "parser_extension_code must be a string or None" in str( + exc_info.value + ) def test_run_parser_detailed_error_400(chronicle_client, mock_response): @@ -726,7 +827,9 @@ def test_run_parser_detailed_error_400(chronicle_client, mock_response): mock_response.status_code = 400 mock_response.text = "Invalid log type: INVALID_TYPE" - with patch.object(chronicle_client.session, "post", return_value=mock_response): + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ): with pytest.raises(APIError) as exc_info: run_parser( chronicle_client, @@ -736,7 +839,9 @@ def test_run_parser_detailed_error_400(chronicle_client, mock_response): logs=["test log"], ) error_msg = str(exc_info.value) - assert "Failed to evaluate parser for log type 'INVALID_TYPE'" in error_msg + assert ( + "Failed to evaluate parser for log type 'INVALID_TYPE'" in error_msg + ) assert "Bad request" in error_msg assert "Log type 'INVALID_TYPE' may not be valid" in error_msg @@ -746,7 +851,9 @@ def test_run_parser_detailed_error_404(chronicle_client, mock_response): mock_response.status_code = 404 mock_response.text = "Not found" - with patch.object(chronicle_client.session, "post", return_value=mock_response): + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ): with pytest.raises(APIError) as exc_info: run_parser( chronicle_client, @@ -764,7 +871,9 @@ def test_run_parser_detailed_error_413(chronicle_client, mock_response): mock_response.status_code = 413 mock_response.text = "Request entity too large" - with patch.object(chronicle_client.session, "post", return_value=mock_response): + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ): with pytest.raises(APIError) as exc_info: run_parser( chronicle_client, From 2cfa97dd78c37690c5436875de2da9e791a3e765 Mon Sep 17 00:00:00 2001 From: MisterSeajay Date: Tue, 2 Dec 2025 17:30:35 +0000 Subject: [PATCH 16/48] make long url string construction consistent --- src/secops/chronicle/parser.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index 77a14040..3bc6c1b0 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -204,8 +204,10 @@ def delete_parser( Raises: APIError: If the API request fails """ - url = f"{client.base_url}/{client.instance_id}" - url += f"/logTypes/{log_type}/parsers/{id}" + url = ( + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}/parsers/{id}" + ) params = {"force": force} response = client.session.delete(url, params=params) @@ -233,8 +235,10 @@ def get_parser( Raises: APIError: If the API request fails """ - url = f"{client.base_url}/{client.instance_id}" - url += f"/logTypes/{log_type}/parsers/{id}" + url = ( + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}/parsers/{id}" + ) response = client.session.get(url) if response.status_code != 200: @@ -269,8 +273,10 @@ def list_parsers( parsers = [] while more: - url = f"{client.base_url}/{client.instance_id}" - url += f"/logTypes/{log_type}/parsers" + url = ( + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}/parsers" + ) params = { "pageSize": page_size, @@ -382,8 +388,10 @@ def run_parser( ) # Build request - url = f"{client.base_url}/{client.instance_id}" - url += f"/logTypes/{log_type}:runParser" + url = ( + f"{client.base_url}/{client.instance_id}" + f"/logTypes/{log_type}:runParser" + ) parser = { "cbn": base64.b64encode(parser_code.encode("utf-8")).decode("utf-8") From ccaee1990affa74726710dd4340f32bc579a9477 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 4 Dec 2025 14:02:58 +0530 Subject: [PATCH 17/48] chore: added page_size condition. Added documentation. Fixed tests. --- README.md | 9 +++- src/secops/chronicle/client.py | 18 +++++--- src/secops/chronicle/parser.py | 32 +++++++++----- tests/chronicle/test_parser.py | 77 ++++++++++++++++++++++++++-------- 4 files changed, 101 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index 493dce8e..acaccf99 100644 --- a/README.md +++ b/README.md @@ -1221,13 +1221,20 @@ print(f"Parser ID: {parser_id}") Retrieve, list, copy, activate/deactivate, and delete parsers: ```python -# List all parsers +# List all parsers (returns complete list) parsers = chronicle.list_parsers() for parser in parsers: parser_id = parser.get("name", "").split("/")[-1] state = parser.get("state") print(f"Parser ID: {parser_id}, State: {state}") +# Manual pagination: get raw API response with nextPageToken +response = chronicle.list_parsers(page_size=50) +parsers = response.get("parsers", []) +next_token = response.get("nextPageToken") +# Use next_token for subsequent calls: +# response = chronicle.list_parsers(page_size=50, page_token=next_token) + log_type = "WINDOWS_AD" # Get specific parser diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 7c25d7ea..46907b74 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -1959,20 +1959,26 @@ def get_parser( def list_parsers( self, log_type: str = "-", - page_size: int = 100, - page_token: str = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, filter: str = None, # pylint: disable=redefined-builtin - ) -> List[Any]: + ) -> Union[List[Any], Dict[str, Any]]: """List parsers. Args: log_type: Log type to filter by - page_size: The maximum number of parsers to return - page_token: A page token, received from a previous ListParsers call + page_size: The maximum number of parsers to return per page. + If provided, returns raw API response with pagination info. + If None (default), auto-paginates and returns all parsers. + page_token: A page token, received from a previous ListParsers + call. filter: Optional filter expression Returns: - List of parser dictionaries + If page_size is None: List of all parsers + (auto-paginated) + If page_size is provided: List of parsers with next page token if + available. Raises: APIError: If the API request fails diff --git a/src/secops/chronicle/parser.py b/src/secops/chronicle/parser.py index 3bc6c1b0..0401e837 100644 --- a/src/secops/chronicle/parser.py +++ b/src/secops/chronicle/parser.py @@ -250,21 +250,25 @@ def get_parser( def list_parsers( client: "ChronicleClient", log_type: str = "-", - page_size: int = 100, - page_token: Optional[Union[str, None]] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, filter: str = None, # pylint: disable=redefined-builtin -) -> List[Any]: +) -> Union[List[Any], Dict[str, Any]]: """List parsers. Args: client: ChronicleClient instance log_type: Log type to filter by - page_size: The maximum number of parsers to return - page_token: A page token, received from a previous ListParsers call + page_size: The maximum number of parsers to return per page. + If provided, returns raw API response with pagination info. + If None (default), auto-paginates and returns all parsers. + page_token: A page token, received from a previous ListParsers call. filter: Optional filter expression Returns: - List of parser dictionaries + If page_size is None: List of all parsers. + If page_size is provided: List of parsers with next page token if + available. Raises: APIError: If the API request fails @@ -278,11 +282,14 @@ def list_parsers( f"/logTypes/{log_type}/parsers" ) - params = { - "pageSize": page_size, - "pageToken": page_token, - "filter": filter, - } + params = {} + + if page_size: + params["pageSize"] = page_size + if page_token: + params["pageToken"] = page_token + if filter: + params["filter"] = filter response = client.session.get(url, params=params) @@ -291,6 +298,9 @@ def list_parsers( data = response.json() + if page_size is not None: + return data + if "parsers" in data: parsers.extend(data["parsers"]) diff --git a/tests/chronicle/test_parser.py b/tests/chronicle/test_parser.py index e734972f..ee7b6307 100644 --- a/tests/chronicle/test_parser.py +++ b/tests/chronicle/test_parser.py @@ -390,7 +390,7 @@ def test_list_parsers_single_page_success(chronicle_client, mock_response): expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers" mock_get.assert_called_once_with( expected_url, - params={"pageSize": 100, "pageToken": None, "filter": None}, + params={}, ) assert result == expected_parsers @@ -410,7 +410,7 @@ def test_list_parsers_no_parsers_success(chronicle_client, mock_response): expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers" mock_get.assert_called_once_with( expected_url, - params={"pageSize": 100, "pageToken": None, "filter": None}, + params={}, ) assert result == [] @@ -427,14 +427,20 @@ def test_list_parsers_error(chronicle_client, mock_error_response): assert "Failed to list parsers: Error message" in str(exc_info.value) -def test_list_parsers_with_optional_params(chronicle_client, mock_response): - """Test list_parsers function with custom page_size, page_token, and filter.""" +def test_list_parsers_with_page_size_returns_raw_response( + chronicle_client, mock_response +): + """Test list_parsers returns raw API response when page_size is provided.""" log_type = "CUSTOM_LOG_TYPE" page_size = 50 page_token = "custom_token_xyz" filter_query = "name=contains('custom')" expected_parsers = [{"name": "pa_custom_1"}] - mock_response.json.return_value = {"parsers": expected_parsers} + expected_response = { + "parsers": expected_parsers, + "nextPageToken": "next_token_abc", + } + mock_response.json.return_value = expected_response with patch.object( chronicle_client.session, "get", return_value=mock_response @@ -447,7 +453,10 @@ def test_list_parsers_with_optional_params(chronicle_client, mock_response): filter=filter_query, ) - expected_url = f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logTypes/{log_type}/parsers" + expected_url = ( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}" + f"/logTypes/{log_type}/parsers" + ) mock_get.assert_called_once_with( expected_url, params={ @@ -456,14 +465,16 @@ def test_list_parsers_with_optional_params(chronicle_client, mock_response): "filter": filter_query, }, ) - assert result == expected_parsers + # With page_size provided, returns raw response dict + assert result == expected_response + assert "nextPageToken" in result -def test_list_parsers_multi_page_pagination(chronicle_client, mock_response): - """Test list_parsers function with multi-page pagination (Issue 148). +def test_list_parsers_auto_pagination(chronicle_client): + """Test list_parsers auto-paginates when page_size is None (default). - This test validates that the pagination fix correctly handles the - 'nextPageToken' field (not 'next_page_token') returned by the API. + This test validates that the pagination correctly handles the + 'nextPageToken' field returned by the API and fetches all pages. """ log_type = "WINDOWS" @@ -498,33 +509,65 @@ def test_list_parsers_multi_page_pagination(chronicle_client, mock_response): "get", side_effect=[first_response, second_response], ) as mock_get: - result = list_parsers(chronicle_client, log_type=log_type, page_size=2) + # No page_size means auto-pagination + result = list_parsers(chronicle_client, log_type=log_type) # Verify we made two API calls (one per page) assert mock_get.call_count == 2 - # Verify first call + # Verify first call uses default page size of 100 expected_url = ( f"{chronicle_client.base_url}/{chronicle_client.instance_id}" f"/logTypes/{log_type}/parsers" ) first_call = mock_get.call_args_list[0] assert first_call[0][0] == expected_url - assert first_call[1]["params"]["pageSize"] == 2 - assert first_call[1]["params"]["pageToken"] is None # Verify second call uses the nextPageToken from first response second_call = mock_get.call_args_list[1] assert second_call[0][0] == expected_url - assert second_call[1]["params"]["pageSize"] == 2 assert second_call[1]["params"]["pageToken"] == "page2_token" - # Verify all parsers from both pages are returned + # Verify all parsers from both pages are returned as a list expected_all_parsers = first_page_parsers + second_page_parsers assert result == expected_all_parsers assert len(result) == 3 +def test_list_parsers_manual_pagination_single_page( + chronicle_client, mock_response +): + """Test list_parsers returns raw response for manual pagination.""" + log_type = "MANUAL_LOG_TYPE" + page_size = 10 + expected_parsers = [{"name": "pa_manual_1"}] + expected_response = { + "parsers": expected_parsers, + "nextPageToken": "next_page_token", + } + mock_response.json.return_value = expected_response + + with patch.object( + chronicle_client.session, "get", return_value=mock_response + ) as mock_get: + result = list_parsers( + chronicle_client, log_type=log_type, page_size=page_size + ) + + expected_url = ( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}" + f"/logTypes/{log_type}/parsers" + ) + mock_get.assert_called_once_with( + expected_url, + params={"pageSize": page_size}, + ) + # Returns raw response dict, not just the parsers list + assert result == expected_response + assert "parsers" in result + assert "nextPageToken" in result + + # --- run_parser Tests --- def test_run_parser_success(chronicle_client, mock_response): """Test run_parser function for success.""" From 9f8c6af63fbb3d08e88798dd77b658412566fa7c Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 4 Dec 2025 17:24:52 +0530 Subject: [PATCH 18/48] chore: fixed cli for parser list --- src/secops/cli/commands/parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/secops/cli/commands/parser.py b/src/secops/cli/commands/parser.py index ae98771a..0ca058c8 100644 --- a/src/secops/cli/commands/parser.py +++ b/src/secops/cli/commands/parser.py @@ -345,12 +345,12 @@ def handle_parser_run_command(args, chronicle): else: # If no parser code provided, # try to find an active parser for the log type - parsers = chronicle.list_parsers( + parser_list_response = chronicle.list_parsers( args.log_type, page_size=1, - page_token=None, filter="STATE=ACTIVE", ) + parsers = parser_list_response.get("parsers", []) if len(parsers) < 1: raise SecOpsError( "No parser file provided and an active parser could not " From f252f4e6c08e0f04803df175f222ad7950955810 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Mon, 8 Dec 2025 14:08:15 +0530 Subject: [PATCH 19/48] chore: added changelog. updated project version --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c16b02a9..d1b0ae70 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.27.2] - 2025-12-08 +### Updated +- Parser list method to handle pagination properly + - Method auto paginates and returns all when no page size is provided. + - When page size is provided, method returns response with next page token. + ## [0.27.1] - 2025-12-05 ### Updated - Updated Chronicle client to expose API version param for following: diff --git a/pyproject.toml b/pyproject.toml index 3266dd1d..20f35768 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "secops" -version = "0.27.1" +version = "0.27.2" description = "Python SDK for wrapping the Google SecOps API for common use cases" readme = "README.md" requires-python = ">=3.7" From 3d5cd689774f2ee3b9295f6491bd68935441c2b8 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sat, 6 Dec 2025 14:06:39 +0000 Subject: [PATCH 20/48] refactor: Update to match new base url generation --- src/secops/chronicle/watchlist.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index d1ac7cb7..8c81c602 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -16,6 +16,7 @@ from typing import Dict, Any, Optional +from secops.chronicle.models import APIVersion from secops.chronicle.utils.request_utils import paginated_request @@ -39,7 +40,7 @@ def list_watchlists( """ return paginated_request( client, - base_url=client.base_v1_url, + base_url=client.base_url(APIVersion.V1), path="watchlists", items_key="watchlists", page_size=page_size, @@ -61,7 +62,7 @@ def get_watchlist(client, watchlist_id: str) -> Dict[str, Any]: APIError: If the API request fails """ return client.session.get( - f"{client.base_v1_url}/{client.instance_id}/watchlists/{watchlist_id}", + f"{client.base_url(APIVersion.V1)}/{client.instance_id}/watchlists/{watchlist_id}", ).json() @@ -86,7 +87,7 @@ def delete_watchlist( """ params = {"force": force} return client.session.delete( - f"{client.base_v1_url}/{client.instance_id}/watchlists/{watchlist_id}", + f"{client.base_url(APIVersion.V1)}/{client.instance_id}/watchlists/{watchlist_id}", params=params, ).json() @@ -115,7 +116,7 @@ def create_watchlist( """ return client.session.post( - f"{client.base_v1_url}/{client.instance_id}/watchlists", + f"{client.base_url(APIVersion.V1)}/{client.instance_id}/watchlists", json={ "name": name, "displayName": display_name, From 73fecb4c6d0af69f1eb1bad2a0f4a80ca4f30dfe Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 8 Dec 2025 21:08:24 +0000 Subject: [PATCH 21/48] feature: Implement helper for standard non-paginated requests --- src/secops/chronicle/utils/request_utils.py | 64 ++++++++++++++++++++- src/secops/chronicle/watchlist.py | 35 +++++++---- 2 files changed, 86 insertions(+), 13 deletions(-) diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py index b6a5b730..d0054404 100644 --- a/src/secops/chronicle/utils/request_utils.py +++ b/src/secops/chronicle/utils/request_utils.py @@ -15,10 +15,12 @@ """Helper functions for Chronicle.""" from typing import Dict, Any, Optional, List, Union + from secops.exceptions import APIError +from secops.chronicle.models import APIVersion -def paginated_request( +def chronicle_paginated_request( client, base_url: str, path: str, @@ -84,3 +86,63 @@ def paginated_request( if isinstance(data, list): return results return {items_key: results} + + +def chronicle_request( + client, + method: str, + endpoint_path: str, + *, + api_version: str = APIVersion.V1, + params: Optional[Dict[str, Any]] = None, + json: Optional[Dict[str, Any]] = None, + expected_status: int = 200, + error_message: Optional[str] = None, +) -> Dict[str, Any]: + """Perform an HTTP request and return JSON, raising APIError on failure. + + Args: + client: requests.Session (or compatible) instance + method: HTTP method, e.g. 'GET', 'POST', 'PATCH' + endpoint_path: URL path after {base_url}/{instance_id}/ + api_version: API version to use + params: Optional query parameters + json: Optional JSON body + expected_status: Expected HTTP status code (default: 200) + error_message: Optional base error message to include on failure + + Returns: + Parsed JSON response. + + Raises: + APIError: If the request fails, returns a non-JSON body, or status + code does not match expected_status. + """ + url = f"{client.base_url(api_version)}/{client.instance_id}/{endpoint_path}" + response = client.session.request( + method=method, url=url, params=params, json=json + ) + + # Try to parse JSON even on error, so we can get more details + try: + data = response.json() + except ValueError: + data = None + + if response.status_code != expected_status: + base_msg = error_message or "API request failed" + if data is not None: + raise APIError( + f"{base_msg}: status={response.status_code}, response={data}" + ) from None + + raise APIError( + f"{base_msg}: status={response.status_code}, response_text={response.text}" + ) from None + + if data is None: + raise APIError( + f"Expected JSON response from {url} but got non-JSON body: {response.text}" + ) + + return data diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index 8c81c602..50081f04 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -17,7 +17,10 @@ from typing import Dict, Any, Optional from secops.chronicle.models import APIVersion -from secops.chronicle.utils.request_utils import paginated_request +from secops.chronicle.utils.request_utils import ( + chronicle_paginated_request, + chronicle_request, +) def list_watchlists( @@ -38,7 +41,7 @@ def list_watchlists( Raises: APIError: If the API request fails """ - return paginated_request( + return chronicle_paginated_request( client, base_url=client.base_url(APIVersion.V1), path="watchlists", @@ -61,9 +64,12 @@ def get_watchlist(client, watchlist_id: str) -> Dict[str, Any]: Raises: APIError: If the API request fails """ - return client.session.get( - f"{client.base_url(APIVersion.V1)}/{client.instance_id}/watchlists/{watchlist_id}", - ).json() + return chronicle_request( + client, + method="GET", + endpoint_path=f"watchlists/{watchlist_id}", + api_version=APIVersion.V1, + ) def delete_watchlist( @@ -86,10 +92,13 @@ def delete_watchlist( APIError: If the API request fails """ params = {"force": force} - return client.session.delete( - f"{client.base_url(APIVersion.V1)}/{client.instance_id}/watchlists/{watchlist_id}", + return chronicle_request( + client, + method="DELETE", + endpoint_path=f"watchlists/{watchlist_id}", + api_version=APIVersion.V1, params=params, - ).json() + ) def create_watchlist( @@ -114,9 +123,11 @@ def create_watchlist( Raises: APIError: If the API request fails """ - - return client.session.post( - f"{client.base_url(APIVersion.V1)}/{client.instance_id}/watchlists", + return chronicle_request( + client, + method="POST", + endpoint_path="watchlists", + api_version=APIVersion.V1, json={ "name": name, "displayName": display_name, @@ -124,4 +135,4 @@ def create_watchlist( "description": description, "entityPopulationMechanism": {"manual": {}}, }, - ).json() + ) From f4911925b85364cd2bd8054025c6ef96d71057bd Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 8 Dec 2025 21:08:47 +0000 Subject: [PATCH 22/48] feature: Tests for watchlist functions --- tests/chronicle/test_watchlist.py | 298 ++++++++++++++++++++++++++++++ 1 file changed, 298 insertions(+) create mode 100644 tests/chronicle/test_watchlist.py diff --git a/tests/chronicle/test_watchlist.py b/tests/chronicle/test_watchlist.py new file mode 100644 index 00000000..893d0e8b --- /dev/null +++ b/tests/chronicle/test_watchlist.py @@ -0,0 +1,298 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Tests for Chronicle watchlist functions.""" + +from typing import Any, Dict +from unittest.mock import Mock, patch + +import pytest + +from secops.chronicle.client import ChronicleClient +from secops.chronicle.models import APIVersion +from secops.chronicle.watchlist import ( + list_watchlists, + get_watchlist, + delete_watchlist, + create_watchlist, +) +from secops.exceptions import APIError + + +@pytest.fixture +def chronicle_client(): + """Create a Chronicle client for testing.""" + with patch("secops.auth.SecOpsAuth") as mock_auth: + mock_session = Mock() + mock_session.headers = {} + mock_auth.return_value.session = mock_session + return ChronicleClient( + customer_id="test-customer", + project_id="test-project", + default_api_version=APIVersion.V1, + ) + + +@pytest.fixture +def mock_response() -> Mock: + """Create a mock API response object.""" + mock = Mock() + mock.status_code = 200 + mock.json.return_value = {} + return mock + + +@pytest.fixture +def mock_error_response() -> Mock: + """Create a mock error API response object.""" + mock = Mock() + mock.status_code = 400 + mock.text = "Error message" + mock.raise_for_status.side_effect = Exception("API Error") + return mock + + +# -- list_watchlists tests -- + + +def test_list_watchlists_success(chronicle_client): + """Test list_watchlists delegates to chronicle_paginated_request.""" + expected: Dict[str, Any] = { + "watchlists": [ + {"name": "watchlist1"}, + {"name": "watchlist2"}, + ] + } + + with patch( + "secops.chronicle.watchlist.chronicle_paginated_request", + return_value=expected, + ) as mock_paginated: + result = list_watchlists( + chronicle_client, + page_size="10", + page_token="next-token", + ) + + assert result == expected + + mock_paginated.assert_called_once_with( + chronicle_client, + base_url=chronicle_client.base_url(APIVersion.V1), + path="watchlists", + items_key="watchlists", + page_size="10", + page_token="next-token", + ) + + +def test_list_watchlists_default_args(chronicle_client): + """Test list_watchlists with default pagination args.""" + expected: Dict[str, Any] = {"watchlists": []} + + with patch( + "secops.chronicle.watchlist.chronicle_paginated_request", + return_value=expected, + ) as mock_paginated: + result = list_watchlists(chronicle_client) + + assert result == expected + + mock_paginated.assert_called_once_with( + chronicle_client, + base_url=chronicle_client.base_url(APIVersion.V1), + path="watchlists", + items_key="watchlists", + page_size=None, + page_token=None, + ) + + +def test_list_watchlists_error(chronicle_client): + """Test list_watchlists propagates APIError from helper.""" + with patch( + "secops.chronicle.watchlist.chronicle_paginated_request", + side_effect=APIError("Failed to list watchlists"), + ): + with pytest.raises(APIError) as exc_info: + list_watchlists(chronicle_client) + + assert "Failed to list watchlists" in str(exc_info.value) + + +# -- get_watchlist tests -- + + +def test_get_watchlist_success(chronicle_client): + """Test get_watchlist returns expected result.""" + expected = { + "name": "test-watchlist-id", + "displayName": "test-watchlist", + "multiplyingFactor": 1, + "description": "test-description", + } + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = get_watchlist(chronicle_client, "test-watchlist-id") + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="GET", + endpoint_path="watchlists/test-watchlist-id", + api_version=APIVersion.V1, + ) + + +def test_get_watchlist_error(chronicle_client): + """Test get_watchlist raises APIError on error.""" + with patch( + "secops.chronicle.watchlist.chronicle_request", + side_effect=APIError("Failed to get watchlist test-watchlist-id"), + ): + with pytest.raises(APIError) as exc_info: + get_watchlist(chronicle_client, "test-watchlist-id") + + assert "Failed to get watchlist" in str(exc_info.value) + + +# -- delete_watchlist tests -- + + +def test_delete_watchlist_success(chronicle_client): + """Test delete_watchlist calls helper and returns response JSON.""" + expected: Dict[str, Any] = {} + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = delete_watchlist(chronicle_client, "watchlist-123") + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="DELETE", + endpoint_path="watchlists/watchlist-123", + api_version=APIVersion.V1, + params={"force": None}, + ) + + +def test_delete_watchlist_force_true(chronicle_client): + """Test delete_watchlist with force=True.""" + expected: Dict[str, Any] = {} + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = delete_watchlist( + chronicle_client, + "watchlist-123", + force=True, + ) + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="DELETE", + endpoint_path="watchlists/watchlist-123", + api_version=APIVersion.V1, + params={"force": True}, + ) + + +# -- create_watchlist tests -- + + +def test_create_watchlist_success(chronicle_client): + """Test create_watchlist calls helper and returns response JSON.""" + expected = { + "name": "watchlist-123", + "displayName": "My Watchlist", + "multiplyingFactor": 1.5, + "description": "Test description", + } + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = create_watchlist( + chronicle_client, + name="watchlist-123", + display_name="My Watchlist", + multiplying_factor=1.5, + description="Test description", + ) + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="POST", + endpoint_path="watchlists", + api_version=APIVersion.V1, + json={ + "name": "watchlist-123", + "displayName": "My Watchlist", + "multiplyingFactor": 1.5, + "description": "Test description", + "entityPopulationMechanism": {"manual": {}}, + }, + ) + + +def test_create_watchlist_without_description(chronicle_client): + """Test create_watchlist when description is None.""" + expected = { + "name": "watchlist-123", + "displayName": "My Watchlist", + "multiplyingFactor": 2.0, + "description": None, + } + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = create_watchlist( + chronicle_client, + name="watchlist-123", + display_name="My Watchlist", + multiplying_factor=2.0, + ) + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="POST", + endpoint_path="watchlists", + api_version=APIVersion.V1, + json={ + "name": "watchlist-123", + "displayName": "My Watchlist", + "multiplyingFactor": 2.0, + "description": None, + "entityPopulationMechanism": {"manual": {}}, + }, + ) From 1a75c46efa89141336163a310916be7a7b055581 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 8 Dec 2025 21:36:12 +0000 Subject: [PATCH 23/48] feature: Implement CLI arguments for watchlists --- api_module_mapping.md | 8 ++++---- src/secops/chronicle/models.py | 1 + src/secops/cli/cli_client.py | 2 ++ 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/api_module_mapping.md b/api_module_mapping.md index fcd73c5b..d2c7681b 100644 --- a/api_module_mapping.md +++ b/api_module_mapping.md @@ -38,10 +38,10 @@ wrapper module and its respective CLI command (if available). | rules.retrohunts.get | v1 | chronicle.rule_retrohunt.get_retrohunt | | | rules.retrohunts.list | v1 | | | | rules.updateDeployment | v1 | chronicle.rule.enable_rule | secops rule enable | -| watchlists.create | v1 | chronicle.watchlist.create_watchlist | | -| watchlists.delete | v1 | chronicle.watchlist.delete_watchlist | | -| watchlists.get | v1 | chronicle.watchlist.get_watchlist | | -| watchlists.list | v1 | chronicle.watchlist.list_watchlists | | +| watchlists.create | v1 | chronicle.watchlist.create_watchlist | secops watchlist create | +| watchlists.delete | v1 | chronicle.watchlist.delete_watchlist | secops watchlist delete | +| watchlists.get | v1 | chronicle.watchlist.get_watchlist | secops watchlist get | +| watchlists.list | v1 | chronicle.watchlist.list_watchlists | secops watchlist list | | watchlists.patch | v1 | | | | dataAccessLabels.create | v1beta | | | | dataAccessLabels.delete | v1beta | | | diff --git a/src/secops/chronicle/models.py b/src/secops/chronicle/models.py index 229fac30..e17f334f 100644 --- a/src/secops/chronicle/models.py +++ b/src/secops/chronicle/models.py @@ -26,6 +26,7 @@ if sys.version_info >= (3, 11): from enum import StrEnum else: + class StrEnum(str, Enum): """String enum implementation for Python versions before 3.11.""" diff --git a/src/secops/cli/cli_client.py b/src/secops/cli/cli_client.py index ddbfdd61..35776113 100644 --- a/src/secops/cli/cli_client.py +++ b/src/secops/cli/cli_client.py @@ -31,6 +31,7 @@ from secops.cli.commands.dashboard_query import setup_dashboard_query_command from secops.cli.commands.forwarder import setup_forwarder_command from secops.cli.commands.curated_rule import setup_curated_rules_command +from secops.cli.commands.watchlist import setup_watchlist_command from secops.cli.utils.common_args import ( add_common_args, add_chronicle_args, @@ -179,6 +180,7 @@ def build_parser() -> argparse.ArgumentParser: setup_help_command(subparsers) setup_dashboard_command(subparsers) setup_dashboard_query_command(subparsers) + setup_watchlist_command(subparsers) return parser From 599005d3abab2f59c5aa08ef30daa840d528e0dc Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 8 Dec 2025 21:46:11 +0000 Subject: [PATCH 24/48] feature: Implement CLI arguments for watchlists --- src/secops/cli/commands/watchlist.py | 138 ++++++++++++ tests/cli/test_watchlist_cli_integration.py | 223 ++++++++++++++++++++ 2 files changed, 361 insertions(+) create mode 100644 src/secops/cli/commands/watchlist.py create mode 100644 tests/cli/test_watchlist_cli_integration.py diff --git a/src/secops/cli/commands/watchlist.py b/src/secops/cli/commands/watchlist.py new file mode 100644 index 00000000..689b8b10 --- /dev/null +++ b/src/secops/cli/commands/watchlist.py @@ -0,0 +1,138 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Google SecOps CLI watchlist commands""" + +import sys + +from secops.cli.utils.formatters import output_formatter +from secops.cli.utils.common_args import ( + add_time_range_args, + add_pagination_args, +) + + +def setup_watchlist_command(subparsers): + """Setup watchlist command""" + watchlist_parser = subparsers.add_parser( + "watchlist", + help="Manage Chronicle watchlists", + ) + lvl1 = watchlist_parser.add_subparsers( + dest="watchlist_command", help="Watchlist command" + ) + + # list command + list_parser = lvl1.add_parser("list", help="List watchlists") + add_time_range_args(list_parser) + add_pagination_args(list_parser) + list_parser.set_defaults(func=handle_watchlist_list_command) + + # get command + get_parser = lvl1.add_parser("get", help="Get watchlist by ID") + get_parser.add_argument( + "--watchlist-id", + type=str, + help="ID of watchlist to get", + dest="watchlist_id", + required=True, + ) + get_parser.set_defaults(func=handle_watchlist_get_command) + + # delete command + delete_parser = lvl1.add_parser("delete", help="Delete watchlist by ID") + delete_parser.add_argument( + "--watchlist-id", + type=str, + help="ID of the watchlist to delete", + dest="watchlist_id", + required=True, + ) + delete_parser.set_defaults(func=handle_watchlist_delete_command) + + # create command + create_parser = lvl1.add_parser("create", help="Create watchlist") + create_parser.add_argument( + "--name", type=str, help="Watchlist name", dest="name", required=True + ) + create_parser.add_argument( + "--display-name", + type=str, + help="Watchlist display name", + dest="display_name", + required=True, + ) + create_parser.add_argument( + "--multiplying-factor", + type=float, + help="Watchlist multiplying factor", + dest="multiplying_factor", + required=True, + ) + create_parser.add_argument( + "--description", + type=str, + help="Watchlist description", + dest="description", + required=False, + ) + create_parser.set_defaults(func=handle_watchlist_create_command) + + +def handle_watchlist_list_command(args, chronicle): + """List watchlists""" + try: + out = chronicle.list_watchlists( + page_size=getattr(args, "page_size", None), + page_token=getattr(args, "page_token", None), + ) + output_formatter(out, getattr(args, "output", "json")) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error listing watchlists: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_watchlist_get_command(args, chronicle): + """Get watchlist by ID""" + try: + out = chronicle.get_watchlist(args.watchlist_id) + output_formatter(out, getattr(args, "output", "json")) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error getting watchlist: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_watchlist_delete_command(args, chronicle): + """Delete watchlist by ID""" + try: + out = chronicle.delete_watchlist(args.watchlist_id) + output_formatter(out, getattr(args, "output", "json")) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error deleting watchlist: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_watchlist_create_command(args, chronicle): + """Create watchlist""" + try: + out = chronicle.create_watchlist( + name=args.name, + display_name=args.display_name, + multiplying_factor=args.multiplying_factor, + description=args.description, + ) + output_formatter(out, getattr(args, "output", "json")) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error creating watchlist: {e}", file=sys.stderr) + sys.exit(1) diff --git a/tests/cli/test_watchlist_cli_integration.py b/tests/cli/test_watchlist_cli_integration.py new file mode 100644 index 00000000..746350bd --- /dev/null +++ b/tests/cli/test_watchlist_cli_integration.py @@ -0,0 +1,223 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""CLI Integration tests for watchlist functionality in Chronicle. + +These tests require valid credentials and API access. +""" + +import json +import subprocess +from datetime import datetime, timezone + +import pytest + + +@pytest.mark.integration +def test_cli_watchlist_list_and_get(cli_env, common_args): + """Test CLI commands for listing and getting watchlists. + + Args: + cli_env: Environment variables for CLI execution. + common_args: Common CLI arguments. + """ + print("\nTesting watchlist list and get commands") + + # 1. List watchlists + print("1. Listing watchlists") + list_cmd = ( + ["secops"] + + common_args + + ["watchlist", "list"] + ) + + list_result = subprocess.run( + list_cmd, + env=cli_env, + capture_output=True, + text=True, + ) + + # Ensure command succeeded + assert list_result.returncode == 0, f"Command failed: {list_result.stderr}" + + # Parse output + data = json.loads(list_result.stdout) + assert isinstance(data, dict), "Expected dict response from watchlist list" + assert "watchlists" in data, "Missing 'watchlists' key in response" + + watchlists = data["watchlists"] + assert isinstance(watchlists, list), "Expected 'watchlists' to be a list" + assert len(watchlists) > 0, "Expected at least one watchlist" + + first_watchlist = watchlists[0] + assert "name" in first_watchlist, "Missing 'name' in watchlist" + assert "displayName" in first_watchlist, "Missing 'displayName' in watchlist" + + # Extract watchlist ID (name is a resource path, ID is last component) + watchlist_name = first_watchlist["name"] + watchlist_id = watchlist_name.split("/")[-1] + display_name = first_watchlist["displayName"] + + print(f"Found watchlist: {display_name} (ID: {watchlist_id})") + + # 2. Get specific watchlist by ID + print("\n2. Getting specific watchlist by ID") + get_cmd = ( + ["secops"] + + common_args + + [ + "watchlist", + "get", + "--watchlist-id", + watchlist_id, + ] + ) + + get_result = subprocess.run( + get_cmd, + env=cli_env, + capture_output=True, + text=True, + ) + + assert get_result.returncode == 0, f"Command failed: {get_result.stderr}" + + watchlist_data = json.loads(get_result.stdout) + assert isinstance(watchlist_data, dict), "Expected dict response from watchlist get" + assert watchlist_data.get("name") == watchlist_name, "Watchlist name doesn't match" + assert ( + watchlist_data.get("displayName") == display_name + ), "Watchlist display name doesn't match" + + +@pytest.mark.integration +def test_cli_watchlist_create_and_delete(cli_env, common_args): + """Test CLI commands for creating and deleting a watchlist. + + Args: + cli_env: Environment variables for CLI execution. + common_args: Common CLI arguments. + """ + print("\nTesting watchlist create and delete commands") + + # Use a timestamped name to avoid collisions + ts = datetime.now(timezone.utc).strftime("%Y%m%d%H%M%S") + watchlist_name = f"secops-test-watchlist-{ts}" + display_name = f"SecOps Test Watchlist {ts}" + multiplying_factor = 1.5 + description = "Integration test watchlist" + + # 1. Create watchlist + print("1. Creating watchlist") + create_cmd = ( + ["secops"] + + common_args + + [ + "watchlist", + "create", + "--name", + watchlist_name, + "--display-name", + display_name, + "--multiplying-factor", + str(multiplying_factor), + "--description", + description, + ] + ) + + create_result = subprocess.run( + create_cmd, + env=cli_env, + capture_output=True, + text=True, + ) + + assert create_result.returncode == 0, f"Create failed: {create_result.stderr}" + + created_data = json.loads(create_result.stdout) + assert isinstance(created_data, dict), "Expected dict response from watchlist create" + assert created_data.get("name"), "Missing 'name' in created watchlist" + assert ( + created_data.get("displayName") == display_name + ), "Created watchlist display name mismatch" + + created_name = created_data["name"] + created_id = created_name.split("/")[-1] + print(f"Created watchlist: {display_name} (ID: {created_id})") + + # 2. Get created watchlist to verify + print("\n2. Verifying created watchlist via get command") + get_cmd = ( + ["secops"] + + common_args + + [ + "watchlist", + "get", + "--watchlist-id", + created_id, + ] + ) + + get_result = subprocess.run( + get_cmd, + env=cli_env, + capture_output=True, + text=True, + ) + + assert get_result.returncode == 0, f"Get failed: {get_result.stderr}" + + get_data = json.loads(get_result.stdout) + assert get_data.get("name") == created_name, "Get watchlist name mismatch" + assert ( + get_data.get("displayName") == display_name + ), "Get watchlist display name mismatch" + + # 3. Delete created watchlist + print("\n3. Deleting created watchlist") + delete_cmd = ( + ["secops"] + + common_args + + [ + "watchlist", + "delete", + "--watchlist-id", + created_id, + ] + ) + + delete_result = subprocess.run( + delete_cmd, + env=cli_env, + capture_output=True, + text=True, + ) + + assert delete_result.returncode == 0, f"Delete failed: {delete_result.stderr}" + + # Response from delete may be empty or contain metadata; just ensure it's valid JSON + if delete_result.stdout.strip(): + delete_data = json.loads(delete_result.stdout) + assert isinstance( + delete_data, dict + ), "Expected dict or empty response from delete" + + print(f"Successfully deleted watchlist {created_id}") + + +if __name__ == "__main__": + # Allow running directly + pytest.main(["-v", __file__, "-m", "integration"]) From 5e3be2ef0f532ef6b0e1090fae582305a9fe03d7 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Tue, 9 Dec 2025 20:57:11 +0000 Subject: [PATCH 25/48] chore: Update READMEs --- CLI.md | 30 ++++++++++++++++++++++++++++++ README.md | 39 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/CLI.md b/CLI.md index 63a9efca..670c10c0 100644 --- a/CLI.md +++ b/CLI.md @@ -485,6 +485,36 @@ secops parser-extension activate --log-type OKTA --id "1234567890" secops parser-extension delete --log-type OKTA --id "1234567890" ``` +### Watchlist Management + +List watchlists: + +```bash +# List all watchlists +secops watchlist list + +# List watchlist with pagination +secops watchlist list --page-size 50 +``` + +Get watchlist details: + +```bash +secops watchlist get --watchlist-id "abc-123-def" +``` + +Create a new watchlist: + +```bash +secops watchlist create --name "my_watchlist" --display-name "my_watchlist" --description "My watchlist description" --multiplying-factor 1.5 +``` + +Delete a watchlist: + +```bash +secops watchlist delete --watchlist-id "abc-123-def" +``` + ### Rule Management List detection rules: diff --git a/README.md b/README.md index acaccf99..7b8031e7 100644 --- a/README.md +++ b/README.md @@ -1458,6 +1458,45 @@ extension_id = "1234567890" chronicle.delete_parser_extension(log_type, extension_id) ``` +## Watchlist Management + +### Creating a Watchlist + +Create a new watchlist: + +```python +watchlist = chronicle.create_watchlist( + name="my_watchlist", + display_name="my_watchlist", + multiplying_factor=1.5, + description="My new watchlist" +) +``` + +### Deleting a Watchlist + +Delete a watchlist by ID: + +```python +chronicle.delete_watchlist("acb-123-def", force=True) +``` + +### Getting a Watchlist + +Get a watchlist by ID: + +```python +watchlist = chronicle.get_watchlist("acb-123-def") +``` + +### List all Watchlists + +List all watchlists: + +```python +watchlists = chronicle.list_watchlists() +``` + ## Rule Management The SDK provides comprehensive support for managing Chronicle detection rules: From 2fde8b611d04ed9b9883e0c67e6ad62a00799cea Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Tue, 9 Dec 2025 21:06:09 +0000 Subject: [PATCH 26/48] chore: Fix PyLint issues --- src/secops/chronicle/utils/request_utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py index d0054404..45dca789 100644 --- a/src/secops/chronicle/utils/request_utils.py +++ b/src/secops/chronicle/utils/request_utils.py @@ -137,12 +137,14 @@ def chronicle_request( ) from None raise APIError( - f"{base_msg}: status={response.status_code}, response_text={response.text}" + f"{base_msg}: status={response.status_code}," + f" response_text={response.text}" ) from None if data is None: raise APIError( - f"Expected JSON response from {url} but got non-JSON body: {response.text}" + f"Expected JSON response from {url}" + f" but got non-JSON body: {response.text}" ) return data From 61959a97a58ec793beb7ab1717abe7d7f0f44761 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Wed, 10 Dec 2025 20:59:56 +0000 Subject: [PATCH 27/48] fix: Missing imports --- src/secops/chronicle/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 0b954ed9..6523f17b 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -18,7 +18,7 @@ from collections.abc import Iterator from datetime import datetime from enum import Enum -from typing import Any, Literal, Union +from typing import Any, Literal, Union, Optional, Dict from google.auth.transport import requests as google_auth_requests From b40b57a1f03c02184cd456658fbb884e36899601 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Wed, 10 Dec 2025 21:14:21 +0000 Subject: [PATCH 28/48] fix: Missing imports --- src/secops/cli/cli_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/secops/cli/cli_client.py b/src/secops/cli/cli_client.py index 8570c2b2..ec3620df 100644 --- a/src/secops/cli/cli_client.py +++ b/src/secops/cli/cli_client.py @@ -30,6 +30,7 @@ from secops.cli.commands.search import setup_search_command from secops.cli.commands.stats import setup_stats_command from secops.cli.commands.udm_search import setup_udm_search_view_command +from secops.cli.commands.watchlist import setup_watchlist_command from secops.cli.utils.common_args import add_chronicle_args, add_common_args from secops.cli.utils.config_utils import load_config from secops.exceptions import AuthenticationError, SecOpsError From 19daf69a05f4ef654ed54cd0242e6f10447a5798 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Tue, 2 Dec 2025 14:18:11 +0530 Subject: [PATCH 29/48] feat: added support for log processing pipeline methods --- examples/log_processing_pipelines_example.py | 404 ++++++++++++++++++ src/secops/chronicle/__init__.py | 23 + src/secops/chronicle/client.py | 216 ++++++++++ .../chronicle/log_processing_pipelines.py | 370 ++++++++++++++++ 4 files changed, 1013 insertions(+) create mode 100644 examples/log_processing_pipelines_example.py create mode 100644 src/secops/chronicle/log_processing_pipelines.py diff --git a/examples/log_processing_pipelines_example.py b/examples/log_processing_pipelines_example.py new file mode 100644 index 00000000..9a6a1c02 --- /dev/null +++ b/examples/log_processing_pipelines_example.py @@ -0,0 +1,404 @@ +#!/usr/bin/env python3 +"""Example usage of the Google SecOps SDK for Log Processing Pipelines.""" + +import argparse +import json +import time +import uuid + +from secops import SecOpsClient + + +def get_client(project_id, customer_id, region): + """Initialize and return the Chronicle client. + + Args: + project_id: Google Cloud Project ID. + customer_id: Chronicle Customer ID (UUID). + region: Chronicle region (us or eu). + + Returns: + Chronicle client instance. + """ + client = SecOpsClient() + chronicle = client.chronicle( + customer_id=customer_id, project_id=project_id, region=region + ) + return chronicle + + +def example_list_pipelines(chronicle): + """Example 1: List Log Processing Pipelines.""" + print("\n=== Example 1: List Log Processing Pipelines ===") + + try: + # List all pipelines + response = chronicle.list_log_processing_pipelines() + pipelines = response.get("logProcessingPipelines", []) + + print(f"\nFound {len(pipelines)} pipeline(s)") + + if pipelines: + print("\nSample pipeline details:") + sample_pipeline = pipelines[0] + print(f"Name: {sample_pipeline.get('name')}") + print(f"Display Name: {sample_pipeline.get('displayName')}") + print(f"Description: {sample_pipeline.get('description', 'N/A')}") + + # Extract pipeline ID from the name + pipeline_id = sample_pipeline.get("name", "").split("/")[-1] + print(f"Pipeline ID: {pipeline_id}") + + # Print processor count + processors = sample_pipeline.get("processors", []) + print(f"Number of processors: {len(processors)}") + else: + print("No pipelines found in your Chronicle instance.") + + except Exception as e: + print(f"Error listing pipelines: {e}") + + +def example_create_and_get_pipeline(chronicle): + """Example 2: Create and Get Pipeline.""" + print("\n=== Example 2: Create and Get Pipeline ===") + + # Generate unique pipeline name + unique_id = str(uuid.uuid4())[:8] + display_name = f"Test Pipeline {unique_id}" + + # Define a simple filter processor pipeline + pipeline_config = { + "displayName": display_name, + "description": "Example pipeline created by SDK", + "processors": [ + { + "filterProcessor": { + "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, + "errorMode": "ERROR_MODE_UNSPECIFIED", + } + } + ], + "customMetadata": [ + {"key": "environment", "value": "test"}, + {"key": "created_by", "value": "sdk_example"}, + ], + } + + created_pipeline = None + + try: + # Create the pipeline + print(f"\nCreating pipeline: {display_name}") + created_pipeline = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config + ) + + # Extract pipeline ID from the name + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + + print(f"Pipeline created successfully!") + print(f"Pipeline ID: {pipeline_id}") + print(f"Display Name: {created_pipeline.get('displayName')}") + + # Wait for pipeline to be fully created + time.sleep(2) + + # Get the pipeline to verify it was created + print(f"\nRetrieving pipeline details for ID: {pipeline_id}") + retrieved_pipeline = chronicle.get_log_processing_pipeline(pipeline_id) + + print("Pipeline details retrieved:") + print(f"Name: {retrieved_pipeline.get('name')}") + print(f"Display Name: {retrieved_pipeline.get('displayName')}") + print(f"Description: {retrieved_pipeline.get('description', 'N/A')}") + + except Exception as e: + print(f"Error creating or getting pipeline: {e}") + + finally: + # Clean up: delete the pipeline if it was created + if created_pipeline: + try: + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + print(f"\nCleaning up: Deleting pipeline ID: {pipeline_id}") + chronicle.delete_log_processing_pipeline(pipeline_id) + print("Pipeline deleted successfully") + except Exception as e: + print(f"Warning: Failed to delete test pipeline: {e}") + + +def example_update_pipeline(chronicle): + """Example 3: Update (Patch) Pipeline.""" + print("\n=== Example 3: Update Pipeline ===") + + # Generate unique pipeline name + unique_id = str(uuid.uuid4())[:8] + display_name = f"Test Pipeline {unique_id}" + + # Initial pipeline configuration + pipeline_config = { + "displayName": display_name, + "description": "Original description", + "processors": [ + { + "filterProcessor": { + "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, + "errorMode": "ERROR_MODE_UNSPECIFIED", + } + } + ], + } + + created_pipeline = None + + try: + # Create the pipeline + print(f"\nCreating pipeline to update: {display_name}") + created_pipeline = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config + ) + + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + print(f"Pipeline created with ID: {pipeline_id}") + + # Wait for pipeline to be fully created + time.sleep(2) + + # Update the pipeline with new display name and description + updated_pipeline_config = { + "name": created_pipeline.get("name"), + "displayName": f"Updated {display_name}", + "description": "Updated description via SDK", + } + + print("\nUpdating pipeline...") + updated_pipeline = chronicle.patch_log_processing_pipeline( + pipeline_id=pipeline_id, + pipeline=updated_pipeline_config, + update_mask="displayName,description", + ) + + print("Pipeline updated successfully!") + print(f"New Display Name: {updated_pipeline.get('displayName')}") + print(f"New Description: {updated_pipeline.get('description', 'N/A')}") + + except Exception as e: + print(f"Error updating pipeline: {e}") + + finally: + # Clean up: delete the pipeline if it was created + if created_pipeline: + try: + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + print(f"\nCleaning up: Deleting pipeline ID: {pipeline_id}") + chronicle.delete_log_processing_pipeline(pipeline_id) + print("Pipeline deleted successfully") + except Exception as e: + print(f"Warning: Failed to delete test pipeline: {e}") + + +def example_stream_association(chronicle): + """Example 4: Associate and Dissociate Streams.""" + print("\n=== Example 4: Associate and Dissociate Streams ===") + + # Generate unique pipeline name + unique_id = str(uuid.uuid4())[:8] + display_name = f"Test Pipeline {unique_id}" + + # Pipeline configuration + pipeline_config = { + "displayName": display_name, + "description": "Pipeline for stream association example", + "processors": [ + { + "filterProcessor": { + "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, + "errorMode": "ERROR_MODE_UNSPECIFIED", + } + } + ], + } + + created_pipeline = None + + try: + # Create the pipeline + print(f"\nCreating pipeline: {display_name}") + created_pipeline = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config + ) + + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + print(f"Pipeline created with ID: {pipeline_id}") + + # Wait for pipeline to be fully created + time.sleep(2) + + # Define streams to associate + # Note: Replace with actual log type or feed ID from environment + streams = [{"logType": "WINEVTLOG"}] + + print("\nAssociating streams with pipeline...") + print(f"Streams: {json.dumps(streams, indent=2)}") + + chronicle.associate_streams(pipeline_id=pipeline_id, streams=streams) + print("Streams associated successfully!") + + # Wait a moment + time.sleep(2) + + # Dissociate the streams + print("\nDissociating streams from pipeline...") + chronicle.dissociate_streams(pipeline_id=pipeline_id, streams=streams) + print("Streams dissociated successfully!") + + except Exception as e: + print(f"Error in stream association operations: {e}") + print( + "Note: Make sure the log type or feed ID exists " + "in your environment." + ) + + finally: + # Clean up: delete the pipeline if it was created + if created_pipeline: + try: + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + print(f"\nCleaning up: Deleting pipeline ID: {pipeline_id}") + chronicle.delete_log_processing_pipeline(pipeline_id) + print("Pipeline deleted successfully") + except Exception as e: + print(f"Warning: Failed to delete test pipeline: {e}") + + +def example_test_pipeline(chronicle): + """Example 5: Test Pipeline with Sample Logs.""" + print("\n=== Example 5: Test Pipeline ===") + + # Define a pipeline configuration to test + pipeline_config = { + "displayName": "Test Pipeline (Not Created)", + "processors": [ + { + "filterProcessor": { + "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, + "errorMode": "ERROR_MODE_UNSPECIFIED", + } + } + ], + } + + # Sample input logs + input_logs = [ + {"logText": "Sample log entry 1"}, + {"logText": "Sample log entry 2"}, + ] + + try: + print("\nTesting pipeline configuration...") + print(f"Pipeline: {pipeline_config['displayName']}") + print(f"Number of input logs: {len(input_logs)}") + + result = chronicle.test_pipeline( + pipeline=pipeline_config, input_logs=input_logs + ) + + processed_logs = result.get("logs", []) + print(f"\nProcessed {len(processed_logs)} log(s)") + + if processed_logs: + print("\nFirst processed log:") + print(json.dumps(processed_logs[0], indent=2)) + + except Exception as e: + print(f"Error testing pipeline: {e}") + print( + "Note: This example uses simplified log structure. " + "Actual logs may need more fields." + ) + + +def example_fetch_associated_pipeline(chronicle): + """Example 6: Fetch Pipeline Associated with a Stream.""" + print("\n=== Example 6: Fetch Associated Pipeline ===") + + # Define a stream to query + # Note: Replace with actual log type or feed ID from your environment + stream = {"logType": "WINEVTLOG"} + + try: + print(f"\nFetching pipeline for stream: {json.dumps(stream)}") + result = chronicle.fetch_associated_pipeline(stream=stream) + + if result: + print("\nAssociated pipeline found:") + print(f"Name: {result.get('name')}") + print(f"Display Name: {result.get('displayName')}") + print(f"Description: {result.get('description', 'N/A')}") + else: + print("No pipeline associated with this stream.") + + except Exception as e: + print(f"Error fetching associated pipeline: {e}") + print( + "Note: Make sure the stream exists and has an " + "associated pipeline." + ) + + +# Map of example functions +EXAMPLES = { + "1": example_list_pipelines, + "2": example_create_and_get_pipeline, + "3": example_update_pipeline, + "4": example_stream_association, + "5": example_test_pipeline, + "6": example_fetch_associated_pipeline, +} + + +def main(): + """Main function to run examples.""" + parser = argparse.ArgumentParser( + description="Run Chronicle Log Processing Pipeline examples" + ) + parser.add_argument( + "--project_id", required=True, help="Google Cloud Project ID" + ) + parser.add_argument( + "--customer_id", required=True, help="Chronicle Customer ID (UUID)" + ) + parser.add_argument( + "--region", default="us", help="Chronicle region (us or eu)" + ) + parser.add_argument( + "--example", + "-e", + help=( + "Example number to run (1-6). " + "If not specified, runs all examples." + ), + ) + + args = parser.parse_args() + + # Initialize the client + chronicle = get_client(args.project_id, args.customer_id, args.region) + + if args.example: + if args.example not in EXAMPLES: + print( + f"Invalid example number. Available examples: " + f"{', '.join(EXAMPLES.keys())}" + ) + return + EXAMPLES[args.example](chronicle) + else: + # Run all examples in order + for example_num in sorted(EXAMPLES.keys()): + EXAMPLES[example_num](chronicle) + + +if __name__ == "__main__": + main() diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index 69e63a66..b8fe7dc4 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -78,6 +78,18 @@ is_valid_log_type, search_log_types, ) +from secops.chronicle.log_processing_pipelines import ( + associate_streams, + create_log_processing_pipeline, + delete_log_processing_pipeline, + dissociate_streams, + fetch_associated_pipeline, + fetch_sample_logs_by_streams, + get_log_processing_pipeline, + list_log_processing_pipelines, + patch_log_processing_pipeline, + test_pipeline, +) from secops.chronicle.models import ( AlertCount, AlertState, @@ -310,6 +322,17 @@ "update_data_table", "update_data_table_rows", "replace_data_table_rows", + # Log Processing Pipelines + "list_log_processing_pipelines", + "get_log_processing_pipeline", + "create_log_processing_pipeline", + "patch_log_processing_pipeline", + "delete_log_processing_pipeline", + "associate_streams", + "dissociate_streams", + "fetch_associated_pipeline", + "fetch_sample_logs_by_streams", + "test_pipeline", # Watchlist "list_watchlists", "get_watchlist", diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 6523f17b..aa5dc39a 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -114,6 +114,36 @@ ) from secops.chronicle.log_types import is_valid_log_type as _is_valid_log_type from secops.chronicle.log_types import search_log_types as _search_log_types +from secops.chronicle.log_processing_pipelines import ( + associate_streams as _associate_streams, +) +from secops.chronicle.log_processing_pipelines import ( + create_log_processing_pipeline as _create_log_processing_pipeline, +) +from secops.chronicle.log_processing_pipelines import ( + delete_log_processing_pipeline as _delete_log_processing_pipeline, +) +from secops.chronicle.log_processing_pipelines import ( + dissociate_streams as _dissociate_streams, +) +from secops.chronicle.log_processing_pipelines import ( + fetch_associated_pipeline as _fetch_associated_pipeline, +) +from secops.chronicle.log_processing_pipelines import ( + fetch_sample_logs_by_streams as _fetch_sample_logs_by_streams, +) +from secops.chronicle.log_processing_pipelines import ( + get_log_processing_pipeline as _get_log_processing_pipeline, +) +from secops.chronicle.log_processing_pipelines import ( + list_log_processing_pipelines as _list_log_processing_pipelines, +) +from secops.chronicle.log_processing_pipelines import ( + patch_log_processing_pipeline as _patch_log_processing_pipeline, +) +from secops.chronicle.log_processing_pipelines import ( + test_pipeline as _test_pipeline, +) from secops.chronicle.models import ( APIVersion, CaseList, @@ -1391,6 +1421,192 @@ def delete_feed( """ return _delete_feed(self, feed_id, api_version) + # Log Processing Pipeline methods + + def list_log_processing_pipelines( + self, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + filter_expr: Optional[str] = None, + ) -> Dict[str, Any]: + """Lists log processing pipelines. + + Args: + page_size: Maximum number of pipelines to return. + page_token: Page token for pagination. + filter_expr: Filter expression to restrict results. + + Returns: + Dictionary containing pipelines and pagination info. + + Raises: + APIError: If the API request fails. + """ + return _list_log_processing_pipelines( + self, page_size, page_token, filter_expr + ) + + def get_log_processing_pipeline(self, pipeline_id: str) -> Dict[str, Any]: + """Gets a log processing pipeline by ID. + + Args: + pipeline_id: ID of the pipeline to retrieve. + + Returns: + Dictionary containing pipeline information. + + Raises: + APIError: If the API request fails. + """ + return _get_log_processing_pipeline(self, pipeline_id) + + def create_log_processing_pipeline( + self, + pipeline: Dict[str, Any], + pipeline_id: Optional[str] = None, + ) -> Dict[str, Any]: + """Creates a new log processing pipeline. + + Args: + pipeline: Pipeline configuration dict. + pipeline_id: Optional ID for the pipeline. + + Returns: + Dictionary containing the created pipeline. + + Raises: + APIError: If the API request fails. + """ + return _create_log_processing_pipeline(self, pipeline, pipeline_id) + + def patch_log_processing_pipeline( + self, + pipeline_id: str, + pipeline: Dict[str, Any], + update_mask: Optional[str] = None, + ) -> Dict[str, Any]: + """Updates a log processing pipeline. + + Args: + pipeline_id: ID of the pipeline to update. + pipeline: Pipeline configuration with fields to update. + update_mask: Optional comma-separated list of fields. + + Returns: + Dictionary containing the updated pipeline. + + Raises: + APIError: If the API request fails. + """ + return _patch_log_processing_pipeline( + self, pipeline_id, pipeline, update_mask + ) + + def delete_log_processing_pipeline( + self, pipeline_id: str, etag: Optional[str] = None + ) -> Dict[str, Any]: + """Deletes a log processing pipeline. + + Args: + pipeline_id: ID of the pipeline to delete. + etag: Optional etag for optimistic concurrency control. + + Returns: + Empty dictionary on success. + + Raises: + APIError: If the API request fails. + """ + return _delete_log_processing_pipeline(self, pipeline_id, etag) + + def associate_streams( + self, pipeline_id: str, streams: List[Dict[str, Any]] + ) -> Dict[str, Any]: + """Associates streams with a pipeline. + + Args: + pipeline_id: ID of the pipeline. + streams: List of stream dicts. + + Returns: + Empty dictionary on success. + + Raises: + APIError: If the API request fails. + """ + return _associate_streams(self, pipeline_id, streams) + + def dissociate_streams( + self, pipeline_id: str, streams: List[Dict[str, Any]] + ) -> Dict[str, Any]: + """Dissociates streams from a pipeline. + + Args: + pipeline_id: ID of the pipeline. + streams: List of stream dicts. + + Returns: + Empty dictionary on success. + + Raises: + APIError: If the API request fails. + """ + return _dissociate_streams(self, pipeline_id, streams) + + def fetch_associated_pipeline( + self, stream: Dict[str, Any] + ) -> Dict[str, Any]: + """Fetches the pipeline associated with a stream. + + Args: + stream: Stream dict (logType or feedId). + + Returns: + Dictionary containing the associated pipeline. + + Raises: + APIError: If the API request fails. + """ + return _fetch_associated_pipeline(self, stream) + + def fetch_sample_logs_by_streams( + self, + streams: List[Dict[str, Any]], + sample_logs_count: Optional[int] = None, + ) -> Dict[str, Any]: + """Fetches sample logs for specified streams. + + Args: + streams: List of stream dicts. + sample_logs_count: Number of sample logs per stream. + + Returns: + Dictionary containing sample logs. + + Raises: + APIError: If the API request fails. + """ + return _fetch_sample_logs_by_streams(self, streams, sample_logs_count) + + def test_pipeline( + self, + pipeline: Dict[str, Any], + input_logs: List[Dict[str, Any]], + ) -> Dict[str, Any]: + """Tests a pipeline with input logs. + + Args: + pipeline: Pipeline configuration to test. + input_logs: List of log objects to process. + + Returns: + Dictionary containing processed logs. + + Raises: + APIError: If the API request fails. + """ + return _test_pipeline(self, pipeline, input_logs) + def list_rules( self, view: str | None = "FULL", diff --git a/src/secops/chronicle/log_processing_pipelines.py b/src/secops/chronicle/log_processing_pipelines.py new file mode 100644 index 00000000..b3ce3af0 --- /dev/null +++ b/src/secops/chronicle/log_processing_pipelines.py @@ -0,0 +1,370 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Provides log processing pipeline management for Chronicle.""" + +from typing import Any, Dict, List, Optional + +from secops.exceptions import APIError + + +def list_log_processing_pipelines( + client, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + filter_expr: Optional[str] = None, +) -> Dict[str, Any]: + """Lists log processing pipelines. + + Args: + client: ChronicleClient instance. + page_size: Maximum number of pipelines to return. If not + specified, server determines the number. + page_token: Page token from a previous list call to retrieve + the next page. + filter_expr: Filter expression (AIP-160) to restrict results. + + Returns: + Dictionary containing: + - logProcessingPipelines: List of pipeline dicts + - nextPageToken: Token for next page (if more results exist) + + Raises: + APIError: If the API request fails. + """ + url = f"{client.base_url}/{client.instance_id}/logProcessingPipelines" + + params: Dict[str, Any] = {} + if page_size is not None: + params["pageSize"] = page_size + if page_token: + params["pageToken"] = page_token + if filter_expr: + params["filter"] = filter_expr + + response = client.session.get(url, params=params) + if response.status_code != 200: + raise APIError( + f"Failed to list log processing pipelines: {response.text}" + ) + + return response.json() + + +def get_log_processing_pipeline(client, pipeline_id: str) -> Dict[str, Any]: + """Gets a log processing pipeline by ID. + + Args: + client: ChronicleClient instance. + pipeline_id: ID of the pipeline to retrieve. + + Returns: + Dictionary containing pipeline information. + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}" + ) + + response = client.session.get(url) + if response.status_code != 200: + raise APIError( + f"Failed to get log processing pipeline: {response.text}" + ) + + return response.json() + + +def create_log_processing_pipeline( + client, + pipeline: Dict[str, Any], + pipeline_id: Optional[str] = None, +) -> Dict[str, Any]: + """Creates a new log processing pipeline. + + Args: + client: ChronicleClient instance. + pipeline: LogProcessingPipeline configuration dict containing: + - displayName: Display name for the pipeline + - description: Optional description + - processors: List of processor configurations + - customMetadata: Optional custom metadata list + pipeline_id: Optional ID for the pipeline. If omitted, server + assigns a unique ID. + + Returns: + Dictionary containing the created pipeline. + + Raises: + APIError: If the API request fails. + """ + url = f"{client.base_url}/{client.instance_id}/logProcessingPipelines" + + params: Dict[str, Any] = {} + if pipeline_id: + params["logProcessingPipelineId"] = pipeline_id + + response = client.session.post(url, json=pipeline, params=params) + if response.status_code != 200: + raise APIError( + f"Failed to create log processing pipeline: {response.text}" + ) + + return response.json() + + +def patch_log_processing_pipeline( + client, + pipeline_id: str, + pipeline: Dict[str, Any], + update_mask: Optional[str] = None, +) -> Dict[str, Any]: + """Updates a log processing pipeline. + + Args: + client: ChronicleClient instance. + pipeline_id: ID of the pipeline to update. + pipeline: LogProcessingPipeline configuration dict with fields + to update. + update_mask: Optional comma-separated list of fields to update + (e.g., "displayName,description"). If not included, all + fields with default/non-default values will be overwritten. + + Returns: + Dictionary containing the updated pipeline. + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}" + ) + + params: Dict[str, Any] = {} + if update_mask: + params["updateMask"] = update_mask + + response = client.session.patch(url, json=pipeline, params=params) + if response.status_code != 200: + raise APIError( + f"Failed to patch log processing pipeline: {response.text}" + ) + + return response.json() + + +def delete_log_processing_pipeline( + client, pipeline_id: str, etag: Optional[str] = None +) -> Dict[str, Any]: + """Deletes a log processing pipeline. + + Args: + client: ChronicleClient instance. + pipeline_id: ID of the pipeline to delete. + etag: Optional etag value. If provided, deletion only succeeds + if the resource's current etag matches this value. + + Returns: + Empty dictionary on successful deletion. + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}" + ) + + params: Dict[str, Any] = {} + if etag: + params["etag"] = etag + + response = client.session.delete(url, params=params) + if response.status_code != 200: + raise APIError( + f"Failed to delete log processing pipeline: {response.text}" + ) + + return response.json() + + +def associate_streams( + client, pipeline_id: str, streams: List[Dict[str, Any]] +) -> Dict[str, Any]: + """Associates streams with a log processing pipeline. + + Args: + client: ChronicleClient instance. + pipeline_id: ID of the pipeline to associate streams with. + streams: List of stream dicts. Each stream can be: + - {"logType": "LOG_TYPE_NAME"} or + - {"feedId": "FEED_ID"} + + Returns: + Empty dictionary on success. + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}:associateStreams" + ) + + body = {"streams": streams} + + response = client.session.post(url, json=body) + if response.status_code != 200: + raise APIError(f"Failed to associate streams: {response.text}") + + return response.json() + + +def dissociate_streams( + client, pipeline_id: str, streams: List[Dict[str, Any]] +) -> Dict[str, Any]: + """Dissociates streams from a log processing pipeline. + + Args: + client: ChronicleClient instance. + pipeline_id: ID of the pipeline to dissociate streams from. + streams: List of stream dicts. Each stream can be: + - {"logType": "LOG_TYPE_NAME"} or + - {"feedId": "FEED_ID"} + + Returns: + Empty dictionary on success. + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}:dissociateStreams" + ) + + body = {"streams": streams} + + response = client.session.post(url, json=body) + if response.status_code != 200: + raise APIError(f"Failed to dissociate streams: {response.text}") + + return response.json() + + +def fetch_associated_pipeline(client, stream: Dict[str, Any]) -> Dict[str, Any]: + """Fetches the pipeline associated with a specific stream. + + Args: + client: ChronicleClient instance. + stream: Stream dict, can be: + - {"logType": "LOG_TYPE_NAME"} or + - {"feedId": "FEED_ID"} + + Returns: + Dictionary containing the associated pipeline. + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines:fetchAssociatedPipeline" + ) + + params = {"stream": stream} + + response = client.session.get(url, params=params) + if response.status_code != 200: + raise APIError(f"Failed to fetch associated pipeline: {response.text}") + + return response.json() + + +def fetch_sample_logs_by_streams( + client, + streams: List[Dict[str, Any]], + sample_logs_count: Optional[int] = None, +) -> Dict[str, Any]: + """Fetches sample logs for specified streams. + + Args: + client: ChronicleClient instance. + streams: List of stream dicts. Each stream can be: + - {"logType": "LOG_TYPE_NAME"} or + - {"feedId": "FEED_ID"} + sample_logs_count: Number of sample logs to fetch per stream. + Default is 100. Max is 1000 or 4MB per stream. + + Returns: + Dictionary containing: + - logs: List of log objects + - sampleLogs: List of base64-encoded log strings (deprecated) + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines:fetchSampleLogsByStreams" + ) + + body: Dict[str, Any] = {"streams": streams} + if sample_logs_count is not None: + body["sampleLogsCount"] = sample_logs_count + + response = client.session.post(url, json=body) + if response.status_code != 200: + raise APIError( + f"Failed to fetch sample logs by streams: {response.text}" + ) + + return response.json() + + +def test_pipeline( + client, + pipeline: Dict[str, Any], + input_logs: List[Dict[str, Any]], +) -> Dict[str, Any]: + """Tests a log processing pipeline with input logs. + + Args: + client: ChronicleClient instance. + pipeline: LogProcessingPipeline configuration to test. + input_logs: List of log objects to process through the pipeline. + + Returns: + Dictionary containing: + - logs: List of processed log objects + + Raises: + APIError: If the API request fails. + """ + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines:testPipeline" + ) + + body = {"logProcessingPipeline": pipeline, "inputLogs": input_logs} + + response = client.session.post(url, json=body) + if response.status_code != 200: + raise APIError(f"Failed to test pipeline: {response.text}") + + return response.json() From 8a2f74a76f87a9778dc036272a47ebe2db0e0375 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 11 Dec 2025 11:51:49 +0530 Subject: [PATCH 30/48] chore: updated for p310 syntax --- src/secops/chronicle/client.py | 50 ++++++++-------- .../chronicle/log_processing_pipelines.py | 60 +++++++++---------- 2 files changed, 55 insertions(+), 55 deletions(-) diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index aa5dc39a..01c8aa5a 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -1425,10 +1425,10 @@ def delete_feed( def list_log_processing_pipelines( self, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - filter_expr: Optional[str] = None, - ) -> Dict[str, Any]: + page_size: int | None = None, + page_token: str | None = None, + filter_expr: str | None = None, + ) -> dict[str, Any]: """Lists log processing pipelines. Args: @@ -1446,7 +1446,7 @@ def list_log_processing_pipelines( self, page_size, page_token, filter_expr ) - def get_log_processing_pipeline(self, pipeline_id: str) -> Dict[str, Any]: + def get_log_processing_pipeline(self, pipeline_id: str) -> dict[str, Any]: """Gets a log processing pipeline by ID. Args: @@ -1462,9 +1462,9 @@ def get_log_processing_pipeline(self, pipeline_id: str) -> Dict[str, Any]: def create_log_processing_pipeline( self, - pipeline: Dict[str, Any], - pipeline_id: Optional[str] = None, - ) -> Dict[str, Any]: + pipeline: dict[str, Any], + pipeline_id: str | None = None, + ) -> dict[str, Any]: """Creates a new log processing pipeline. Args: @@ -1482,9 +1482,9 @@ def create_log_processing_pipeline( def patch_log_processing_pipeline( self, pipeline_id: str, - pipeline: Dict[str, Any], - update_mask: Optional[str] = None, - ) -> Dict[str, Any]: + pipeline: dict[str, Any], + update_mask: str | None = None, + ) -> dict[str, Any]: """Updates a log processing pipeline. Args: @@ -1503,8 +1503,8 @@ def patch_log_processing_pipeline( ) def delete_log_processing_pipeline( - self, pipeline_id: str, etag: Optional[str] = None - ) -> Dict[str, Any]: + self, pipeline_id: str, etag: str | None = None + ) -> dict[str, Any]: """Deletes a log processing pipeline. Args: @@ -1520,8 +1520,8 @@ def delete_log_processing_pipeline( return _delete_log_processing_pipeline(self, pipeline_id, etag) def associate_streams( - self, pipeline_id: str, streams: List[Dict[str, Any]] - ) -> Dict[str, Any]: + self, pipeline_id: str, streams: list[dict[str, Any]] + ) -> dict[str, Any]: """Associates streams with a pipeline. Args: @@ -1537,8 +1537,8 @@ def associate_streams( return _associate_streams(self, pipeline_id, streams) def dissociate_streams( - self, pipeline_id: str, streams: List[Dict[str, Any]] - ) -> Dict[str, Any]: + self, pipeline_id: str, streams: list[dict[str, Any]] + ) -> dict[str, Any]: """Dissociates streams from a pipeline. Args: @@ -1554,8 +1554,8 @@ def dissociate_streams( return _dissociate_streams(self, pipeline_id, streams) def fetch_associated_pipeline( - self, stream: Dict[str, Any] - ) -> Dict[str, Any]: + self, stream: dict[str, Any] + ) -> dict[str, Any]: """Fetches the pipeline associated with a stream. Args: @@ -1571,9 +1571,9 @@ def fetch_associated_pipeline( def fetch_sample_logs_by_streams( self, - streams: List[Dict[str, Any]], - sample_logs_count: Optional[int] = None, - ) -> Dict[str, Any]: + streams: list[dict[str, Any]], + sample_logs_count: int | None = None, + ) -> dict[str, Any]: """Fetches sample logs for specified streams. Args: @@ -1590,9 +1590,9 @@ def fetch_sample_logs_by_streams( def test_pipeline( self, - pipeline: Dict[str, Any], - input_logs: List[Dict[str, Any]], - ) -> Dict[str, Any]: + pipeline: dict[str, Any], + input_logs: list[dict[str, Any]], + ) -> dict[str, Any]: """Tests a pipeline with input logs. Args: diff --git a/src/secops/chronicle/log_processing_pipelines.py b/src/secops/chronicle/log_processing_pipelines.py index b3ce3af0..815f06b3 100644 --- a/src/secops/chronicle/log_processing_pipelines.py +++ b/src/secops/chronicle/log_processing_pipelines.py @@ -14,17 +14,17 @@ # """Provides log processing pipeline management for Chronicle.""" -from typing import Any, Dict, List, Optional +from typing import Any from secops.exceptions import APIError def list_log_processing_pipelines( client, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - filter_expr: Optional[str] = None, -) -> Dict[str, Any]: + page_size: int | None = None, + page_token: str | None = None, + filter_expr: str | None = None, +) -> dict[str, Any]: """Lists log processing pipelines. Args: @@ -45,7 +45,7 @@ def list_log_processing_pipelines( """ url = f"{client.base_url}/{client.instance_id}/logProcessingPipelines" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if page_size is not None: params["pageSize"] = page_size if page_token: @@ -62,7 +62,7 @@ def list_log_processing_pipelines( return response.json() -def get_log_processing_pipeline(client, pipeline_id: str) -> Dict[str, Any]: +def get_log_processing_pipeline(client, pipeline_id: str) -> dict[str, Any]: """Gets a log processing pipeline by ID. Args: @@ -91,9 +91,9 @@ def get_log_processing_pipeline(client, pipeline_id: str) -> Dict[str, Any]: def create_log_processing_pipeline( client, - pipeline: Dict[str, Any], - pipeline_id: Optional[str] = None, -) -> Dict[str, Any]: + pipeline: dict[str, Any], + pipeline_id: str | None = None, +) -> dict[str, Any]: """Creates a new log processing pipeline. Args: @@ -114,7 +114,7 @@ def create_log_processing_pipeline( """ url = f"{client.base_url}/{client.instance_id}/logProcessingPipelines" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if pipeline_id: params["logProcessingPipelineId"] = pipeline_id @@ -130,9 +130,9 @@ def create_log_processing_pipeline( def patch_log_processing_pipeline( client, pipeline_id: str, - pipeline: Dict[str, Any], - update_mask: Optional[str] = None, -) -> Dict[str, Any]: + pipeline: dict[str, Any], + update_mask: str | None = None, +) -> dict[str, Any]: """Updates a log processing pipeline. Args: @@ -155,7 +155,7 @@ def patch_log_processing_pipeline( f"logProcessingPipelines/{pipeline_id}" ) - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if update_mask: params["updateMask"] = update_mask @@ -169,8 +169,8 @@ def patch_log_processing_pipeline( def delete_log_processing_pipeline( - client, pipeline_id: str, etag: Optional[str] = None -) -> Dict[str, Any]: + client, pipeline_id: str, etag: str | None = None +) -> dict[str, Any]: """Deletes a log processing pipeline. Args: @@ -190,7 +190,7 @@ def delete_log_processing_pipeline( f"logProcessingPipelines/{pipeline_id}" ) - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if etag: params["etag"] = etag @@ -204,8 +204,8 @@ def delete_log_processing_pipeline( def associate_streams( - client, pipeline_id: str, streams: List[Dict[str, Any]] -) -> Dict[str, Any]: + client, pipeline_id: str, streams: list[dict[str, Any]] +) -> dict[str, Any]: """Associates streams with a log processing pipeline. Args: @@ -236,8 +236,8 @@ def associate_streams( def dissociate_streams( - client, pipeline_id: str, streams: List[Dict[str, Any]] -) -> Dict[str, Any]: + client, pipeline_id: str, streams: list[dict[str, Any]] +) -> dict[str, Any]: """Dissociates streams from a log processing pipeline. Args: @@ -267,7 +267,7 @@ def dissociate_streams( return response.json() -def fetch_associated_pipeline(client, stream: Dict[str, Any]) -> Dict[str, Any]: +def fetch_associated_pipeline(client, stream: dict[str, Any]) -> dict[str, Any]: """Fetches the pipeline associated with a specific stream. Args: @@ -298,9 +298,9 @@ def fetch_associated_pipeline(client, stream: Dict[str, Any]) -> Dict[str, Any]: def fetch_sample_logs_by_streams( client, - streams: List[Dict[str, Any]], - sample_logs_count: Optional[int] = None, -) -> Dict[str, Any]: + streams: list[dict[str, Any]], + sample_logs_count: int | None = None, +) -> dict[str, Any]: """Fetches sample logs for specified streams. Args: @@ -324,7 +324,7 @@ def fetch_sample_logs_by_streams( f"logProcessingPipelines:fetchSampleLogsByStreams" ) - body: Dict[str, Any] = {"streams": streams} + body: dict[str, Any] = {"streams": streams} if sample_logs_count is not None: body["sampleLogsCount"] = sample_logs_count @@ -339,9 +339,9 @@ def fetch_sample_logs_by_streams( def test_pipeline( client, - pipeline: Dict[str, Any], - input_logs: List[Dict[str, Any]], -) -> Dict[str, Any]: + pipeline: dict[str, Any], + input_logs: list[dict[str, Any]], +) -> dict[str, Any]: """Tests a log processing pipeline with input logs. Args: From fd3d470916a2e3c374c21ffa5e7d5f5f21bf1cb2 Mon Sep 17 00:00:00 2001 From: Michel Oosterhof Date: Tue, 9 Dec 2025 09:21:59 +0800 Subject: [PATCH 31/48] Fix: Display help message for commands without arguments --- src/secops/cli/commands/config.py | 3 ++- src/secops/cli/commands/curated_rule.py | 13 ++++++++++--- src/secops/cli/commands/dashboard.py | 4 +++- src/secops/cli/commands/dashboard_query.py | 4 +++- src/secops/cli/commands/data_table.py | 1 + src/secops/cli/commands/export.py | 1 + src/secops/cli/commands/feed.py | 1 + src/secops/cli/commands/forwarder.py | 3 +++ src/secops/cli/commands/log.py | 3 ++- src/secops/cli/commands/parser.py | 1 + src/secops/cli/commands/parser_extension.py | 3 ++- src/secops/cli/commands/reference_list.py | 1 + src/secops/cli/commands/rule.py | 1 + src/secops/cli/commands/rule_exclusion.py | 1 + 14 files changed, 32 insertions(+), 8 deletions(-) diff --git a/src/secops/cli/commands/config.py b/src/secops/cli/commands/config.py index 4ac10859..a8d76e8a 100644 --- a/src/secops/cli/commands/config.py +++ b/src/secops/cli/commands/config.py @@ -33,8 +33,9 @@ def setup_config_command(subparsers): "config", help="Manage CLI configuration" ) config_subparsers = config_parser.add_subparsers( - help="Config command", required=True + help="Config command" ) + config_parser.set_defaults(func=lambda args: config_parser.print_help()) # Set config command set_parser = config_subparsers.add_parser( diff --git a/src/secops/cli/commands/curated_rule.py b/src/secops/cli/commands/curated_rule.py index cc19ae51..872c7596 100644 --- a/src/secops/cli/commands/curated_rule.py +++ b/src/secops/cli/commands/curated_rule.py @@ -96,7 +96,8 @@ def setup_curated_rules_command(subparsers): # ---- rule-set ---- rule_set = lvl1.add_parser("rule-set", help="Manage curated rule sets") - rule_set_subparser = rule_set.add_subparsers(dest="rset_cmd", required=True) + rule_set_subparser = rule_set.add_subparsers(dest="rset_cmd") + rule_set.set_defaults(func=lambda args, _: rule_set.print_help()) rule_set_list = rule_set_subparser.add_parser( "list", help="List curated rule sets" @@ -117,7 +118,10 @@ def setup_curated_rules_command(subparsers): "rule-set-category", help="Manage curated rule set categories" ) rule_set_cat_subparser = rule_set_cat.add_subparsers( - dest="rcat_cmd", required=True + dest="rcat_cmd" + ) + rule_set_cat.set_defaults( + func=lambda args, _: rule_set_cat.print_help() ) rule_set_cat_list = rule_set_cat_subparser.add_parser( @@ -141,7 +145,10 @@ def setup_curated_rules_command(subparsers): "rule-set-deployment", help="Manage curated rule set deployments" ) rule_set_deployment_subparser = rule_set_deployment.add_subparsers( - dest="rdep_cmd", required=True + dest="rdep_cmd" + ) + rule_set_deployment.set_defaults( + func=lambda args, _: rule_set_deployment.print_help() ) rule_set_deployment_list = rule_set_deployment_subparser.add_parser( diff --git a/src/secops/cli/commands/dashboard.py b/src/secops/cli/commands/dashboard.py index db34d0c6..3331ab39 100644 --- a/src/secops/cli/commands/dashboard.py +++ b/src/secops/cli/commands/dashboard.py @@ -30,7 +30,9 @@ def setup_dashboard_command(subparsers): dashboard_subparsers = dashboard_parser.add_subparsers( dest="dashboard_command", help="Dashboard command to execute", - required=True, + ) + dashboard_parser.set_defaults( + func=lambda args, _: dashboard_parser.print_help() ) # List dashboards diff --git a/src/secops/cli/commands/dashboard_query.py b/src/secops/cli/commands/dashboard_query.py index 7078082e..829b4925 100644 --- a/src/secops/cli/commands/dashboard_query.py +++ b/src/secops/cli/commands/dashboard_query.py @@ -28,7 +28,9 @@ def setup_dashboard_query_command(subparsers): dashboard_query_subparsers = dashboard_query_parser.add_subparsers( dest="dashboard_query_command", help="Dashboard query command to execute", - required=True, + ) + dashboard_query_parser.set_defaults( + func=lambda args, _: dashboard_query_parser.print_help() ) # Execute query diff --git a/src/secops/cli/commands/data_table.py b/src/secops/cli/commands/data_table.py index 3ef8b123..3190126c 100644 --- a/src/secops/cli/commands/data_table.py +++ b/src/secops/cli/commands/data_table.py @@ -27,6 +27,7 @@ def setup_data_table_command(subparsers): dt_subparsers = dt_parser.add_subparsers( dest="dt_command", help="Data table command" ) + dt_parser.set_defaults(func=lambda args, _: dt_parser.print_help()) # List data tables command list_parser = dt_subparsers.add_parser("list", help="List data tables") diff --git a/src/secops/cli/commands/export.py b/src/secops/cli/commands/export.py index db43ace3..5ffd2f28 100644 --- a/src/secops/cli/commands/export.py +++ b/src/secops/cli/commands/export.py @@ -30,6 +30,7 @@ def setup_export_command(subparsers): export_subparsers = export_parser.add_subparsers( dest="export_command", help="Export command" ) + export_parser.set_defaults(func=lambda args, _: export_parser.print_help()) # List available log types command log_types_parser = export_subparsers.add_parser( diff --git a/src/secops/cli/commands/feed.py b/src/secops/cli/commands/feed.py index 3e25016a..c15a6022 100644 --- a/src/secops/cli/commands/feed.py +++ b/src/secops/cli/commands/feed.py @@ -25,6 +25,7 @@ def setup_feed_command(subparsers): feed_subparsers = feed_parser.add_subparsers( dest="feed_command", help="Feed command" ) + feed_parser.set_defaults(func=lambda args, _: feed_parser.print_help()) # List feeds command list_parser = feed_subparsers.add_parser("list", help="List feeds") diff --git a/src/secops/cli/commands/forwarder.py b/src/secops/cli/commands/forwarder.py index 0836fc4a..f441d076 100644 --- a/src/secops/cli/commands/forwarder.py +++ b/src/secops/cli/commands/forwarder.py @@ -29,6 +29,9 @@ def setup_forwarder_command(subparsers): forwarder_subparsers = forwarder_parser.add_subparsers( dest="forwarder_command", help="Forwarder command" ) + forwarder_parser.set_defaults( + func=lambda args, _: forwarder_parser.print_help() + ) # Create forwarder command create_parser = forwarder_subparsers.add_parser( diff --git a/src/secops/cli/commands/log.py b/src/secops/cli/commands/log.py index 8bcd693d..044759d9 100644 --- a/src/secops/cli/commands/log.py +++ b/src/secops/cli/commands/log.py @@ -24,8 +24,9 @@ def setup_log_command(subparsers): """Set up the log command parser.""" log_parser = subparsers.add_parser("log", help="Ingest logs") log_subparsers = log_parser.add_subparsers( - help="Log command", required=True + help="Log command" ) + log_parser.set_defaults(func=lambda args, _: log_parser.print_help()) # Ingest log command ingest_parser = log_subparsers.add_parser("ingest", help="Ingest raw logs") diff --git a/src/secops/cli/commands/parser.py b/src/secops/cli/commands/parser.py index 1e8ffd82..a9574bea 100644 --- a/src/secops/cli/commands/parser.py +++ b/src/secops/cli/commands/parser.py @@ -18,6 +18,7 @@ def setup_parser_command(subparsers): parser_subparsers = parser_parser.add_subparsers( dest="parser_command", help="Parser command" ) + parser_parser.set_defaults(func=lambda args, _: parser_parser.print_help()) # --- Activate Parser Command --- activate_parser_sub = parser_subparsers.add_parser( diff --git a/src/secops/cli/commands/parser_extension.py b/src/secops/cli/commands/parser_extension.py index 4e542dff..c6cdabb0 100644 --- a/src/secops/cli/commands/parser_extension.py +++ b/src/secops/cli/commands/parser_extension.py @@ -32,7 +32,8 @@ def setup_parser_extension_command(subparsers: Any) -> None: "parser-extension", help="Manage parser extensions", ) - parser_ext_sub = parser_ext.add_subparsers(dest="subcommand", required=True) + parser_ext_sub = parser_ext.add_subparsers(dest="subcommand") + parser_ext.set_defaults(func=lambda args, _: parser_ext.print_help()) # Create parser extension create = parser_ext_sub.add_parser( diff --git a/src/secops/cli/commands/reference_list.py b/src/secops/cli/commands/reference_list.py index 74b8d042..98d989fc 100644 --- a/src/secops/cli/commands/reference_list.py +++ b/src/secops/cli/commands/reference_list.py @@ -31,6 +31,7 @@ def setup_reference_list_command(subparsers): rl_subparsers = rl_parser.add_subparsers( dest="rl_command", help="Reference list command" ) + rl_parser.set_defaults(func=lambda args, _: rl_parser.print_help()) # List reference lists command list_parser = rl_subparsers.add_parser("list", help="List reference lists") diff --git a/src/secops/cli/commands/rule.py b/src/secops/cli/commands/rule.py index ab460b7d..78c825c1 100644 --- a/src/secops/cli/commands/rule.py +++ b/src/secops/cli/commands/rule.py @@ -32,6 +32,7 @@ def setup_rule_command(subparsers): rule_subparsers = rule_parser.add_subparsers( dest="rule_command", help="Rule command" ) + rule_parser.set_defaults(func=lambda args, _: rule_parser.print_help()) # List rules command list_parser = rule_subparsers.add_parser("list", help="List rules") diff --git a/src/secops/cli/commands/rule_exclusion.py b/src/secops/cli/commands/rule_exclusion.py index aeb2b07c..a634f7bd 100644 --- a/src/secops/cli/commands/rule_exclusion.py +++ b/src/secops/cli/commands/rule_exclusion.py @@ -32,6 +32,7 @@ def setup_rule_exclusion_command(subparsers): re_subparsers = re_parser.add_subparsers( dest="re_command", help="Rule exclusion command" ) + re_parser.set_defaults(func=lambda args, _: re_parser.print_help()) # Create rule exclusion command create_parser = re_subparsers.add_parser( From fcb4d01a3037de9dd44932d85c3461f2c2c2e4ea Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Wed, 10 Dec 2025 18:08:17 +0530 Subject: [PATCH 32/48] chore: added help print for remaining commands --- src/secops/cli/commands/curated_rule.py | 14 ++++++-------- src/secops/cli/commands/search.py | 10 +++++++--- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/secops/cli/commands/curated_rule.py b/src/secops/cli/commands/curated_rule.py index 872c7596..f2ebd57a 100644 --- a/src/secops/cli/commands/curated_rule.py +++ b/src/secops/cli/commands/curated_rule.py @@ -29,11 +29,13 @@ def setup_curated_rules_command(subparsers): top = subparsers.add_parser( "curated-rule", help="Manage curated rules and rule sets" ) - lvl1 = top.add_subparsers(dest="curated_cmd", required=True) + lvl1 = top.add_subparsers(dest="curated_cmd") + top.set_defaults(func=lambda args, _: top.print_help()) # ---- rules ---- rules = lvl1.add_parser("rule", help="Manage curated rules") - rules_sp = rules.add_subparsers(dest="rule_cmd", required=True) + rules_sp = rules.add_subparsers(dest="rule_cmd") + rules.set_defaults(func=lambda args, _: rules.print_help()) rules_list = rules_sp.add_parser("list", help="List curated rules") add_pagination_args(rules_list) @@ -117,12 +119,8 @@ def setup_curated_rules_command(subparsers): rule_set_cat = lvl1.add_parser( "rule-set-category", help="Manage curated rule set categories" ) - rule_set_cat_subparser = rule_set_cat.add_subparsers( - dest="rcat_cmd" - ) - rule_set_cat.set_defaults( - func=lambda args, _: rule_set_cat.print_help() - ) + rule_set_cat_subparser = rule_set_cat.add_subparsers(dest="rcat_cmd") + rule_set_cat.set_defaults(func=lambda args, _: rule_set_cat.print_help()) rule_set_cat_list = rule_set_cat_subparser.add_parser( "list", help="List curated rule set categories" diff --git a/src/secops/cli/commands/search.py b/src/secops/cli/commands/search.py index acc15b53..2c9c257a 100644 --- a/src/secops/cli/commands/search.py +++ b/src/secops/cli/commands/search.py @@ -31,13 +31,17 @@ def setup_search_command(subparsers): subparsers: Subparsers object to add to """ search_parser = subparsers.add_parser("search", help="Search UDM events") - search_parser.add_argument("--query", help="UDM query string") - search_parser.add_argument( + + # Create mutually exclusive group for query types + query_group = search_parser.add_mutually_exclusive_group(required=True) + query_group.add_argument("--query", help="UDM query string") + query_group.add_argument( "--nl-query", "--nl_query", dest="nl_query", help="Natural language query", ) + search_parser.add_argument( "--max-events", "--max_events", @@ -81,7 +85,7 @@ def handle_search_command(args, chronicle): start_time, end_time = get_time_range(args) try: - if args.csv and args.fields: + if args.csv and args.fields and args.query: fields = [f.strip() for f in args.fields.split(",")] result = chronicle.fetch_udm_search_csv( query=args.query, From d7bbf529c7108f28dccfa825cfc6c31c6f7bbdae Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 11 Dec 2025 12:11:27 +0530 Subject: [PATCH 33/48] chore: fixed search arg check. format and linting fix. --- src/secops/cli/commands/config.py | 4 +--- src/secops/cli/commands/log.py | 4 +--- src/secops/cli/commands/search.py | 9 ++++++++- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/secops/cli/commands/config.py b/src/secops/cli/commands/config.py index a8d76e8a..04e99930 100644 --- a/src/secops/cli/commands/config.py +++ b/src/secops/cli/commands/config.py @@ -32,9 +32,7 @@ def setup_config_command(subparsers): config_parser = subparsers.add_parser( "config", help="Manage CLI configuration" ) - config_subparsers = config_parser.add_subparsers( - help="Config command" - ) + config_subparsers = config_parser.add_subparsers(help="Config command") config_parser.set_defaults(func=lambda args: config_parser.print_help()) # Set config command diff --git a/src/secops/cli/commands/log.py b/src/secops/cli/commands/log.py index 044759d9..0b8890cc 100644 --- a/src/secops/cli/commands/log.py +++ b/src/secops/cli/commands/log.py @@ -23,9 +23,7 @@ def setup_log_command(subparsers): """Set up the log command parser.""" log_parser = subparsers.add_parser("log", help="Ingest logs") - log_subparsers = log_parser.add_subparsers( - help="Log command" - ) + log_subparsers = log_parser.add_subparsers(help="Log command") log_parser.set_defaults(func=lambda args, _: log_parser.print_help()) # Ingest log command diff --git a/src/secops/cli/commands/search.py b/src/secops/cli/commands/search.py index 2c9c257a..cd58514b 100644 --- a/src/secops/cli/commands/search.py +++ b/src/secops/cli/commands/search.py @@ -33,7 +33,7 @@ def setup_search_command(subparsers): search_parser = subparsers.add_parser("search", help="Search UDM events") # Create mutually exclusive group for query types - query_group = search_parser.add_mutually_exclusive_group(required=True) + query_group = search_parser.add_mutually_exclusive_group() query_group.add_argument("--query", help="UDM query string") query_group.add_argument( "--nl-query", @@ -82,6 +82,13 @@ def handle_search_command(args, chronicle): args: Command line arguments chronicle: Chronicle client """ + # Require query or nl_query + if not args.query and not args.nl_query: + print( + "\nError: One of --query or --nl-query is required", file=sys.stderr + ) + sys.exit(1) + start_time, end_time = get_time_range(args) try: From ec8c3eaf18a712b4f97c6cf02f201a1dafd778ef Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 11 Dec 2025 14:18:31 +0530 Subject: [PATCH 34/48] chore: added changelog. updated project version --- CHANGELOG.md | 4 ++++ pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9adeed2e..5d985ea5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.28.1] - 2025-12-11 +### Updated +- CLI to show help when required sub-command/argument not provided. + ## [0.28.0] - 2025-12-10 ### Updated - Minimum python version support to 3.10 from 3.9 as python 3.9 has reached its end of life. diff --git a/pyproject.toml b/pyproject.toml index 59308c5f..4388a25d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "secops" -version = "0.28.0" +version = "0.28.1" description = "Python SDK for wrapping the Google SecOps API for common use cases" readme = "README.md" requires-python = ">=3.10" From 6f0c26318a192d2100ca22cbeb3d9e461a533726 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 11 Dec 2025 18:02:42 +0530 Subject: [PATCH 35/48] chore: added CLI support --- src/secops/cli/cli_client.py | 4 + src/secops/cli/commands/log_processing.py | 342 ++++++++++++++++++++++ src/secops/cli/utils/input_utils.py | 79 +++++ 3 files changed, 425 insertions(+) create mode 100644 src/secops/cli/commands/log_processing.py create mode 100644 src/secops/cli/utils/input_utils.py diff --git a/src/secops/cli/cli_client.py b/src/secops/cli/cli_client.py index ec3620df..e3185129 100644 --- a/src/secops/cli/cli_client.py +++ b/src/secops/cli/cli_client.py @@ -22,6 +22,9 @@ from secops.cli.commands.help import setup_help_command from secops.cli.commands.iocs import setup_iocs_command from secops.cli.commands.log import setup_log_command +from secops.cli.commands.log_processing import ( + setup_log_processing_command, +) from secops.cli.commands.parser import setup_parser_command from secops.cli.commands.parser_extension import setup_parser_extension_command from secops.cli.commands.reference_list import setup_reference_list_command @@ -159,6 +162,7 @@ def build_parser() -> argparse.ArgumentParser: setup_entity_command(subparsers) setup_iocs_command(subparsers) setup_log_command(subparsers) + setup_log_processing_command(subparsers) setup_parser_command(subparsers) setup_parser_extension_command(subparsers) setup_feed_command(subparsers) diff --git a/src/secops/cli/commands/log_processing.py b/src/secops/cli/commands/log_processing.py new file mode 100644 index 00000000..25b5ca7d --- /dev/null +++ b/src/secops/cli/commands/log_processing.py @@ -0,0 +1,342 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Google SecOps CLI log processing pipeline commands""" + +import sys + +from secops.cli.utils.formatters import output_formatter +from secops.cli.utils.input_utils import load_json_or_file + + +def setup_log_processing_command(subparsers): + """Set up the log-processing command parser.""" + log_processing_parser = subparsers.add_parser( + "log-processing", help="Manage log processing pipelines" + ) + log_processing_subparsers = log_processing_parser.add_subparsers( + dest="log_processing_command", help="Log processing command" + ) + log_processing_parser.set_defaults( + func=lambda args, _: log_processing_parser.print_help() + ) + + # List pipelines command + list_parser = log_processing_subparsers.add_parser( + "list", help="List log processing pipelines" + ) + list_parser.add_argument( + "--page-size", + "--page_size", + dest="page_size", + type=int, + help="Maximum number of pipelines to return", + ) + list_parser.add_argument( + "--page-token", + "--page_token", + dest="page_token", + help="Page token for pagination", + ) + list_parser.add_argument( + "--filter", help="Filter expression to restrict results" + ) + list_parser.set_defaults(func=handle_list_command) + + # Get pipeline command + get_parser = log_processing_subparsers.add_parser( + "get", help="Get a log processing pipeline" + ) + get_parser.add_argument("--id", required=True, help="Pipeline ID") + get_parser.set_defaults(func=handle_get_command) + + # Create pipeline command + create_parser = log_processing_subparsers.add_parser( + "create", help="Create a log processing pipeline" + ) + create_parser.add_argument( + "--pipeline", + required=True, + help="Pipeline config as JSON string or file path", + ) + create_parser.add_argument("--id", help="Optional pipeline ID") + create_parser.set_defaults(func=handle_create_command) + + # Update pipeline command + update_parser = log_processing_subparsers.add_parser( + "update", help="Update a log processing pipeline" + ) + update_parser.add_argument("--id", required=True, help="Pipeline ID") + update_parser.add_argument( + "--pipeline", + required=True, + help="Pipeline config as JSON string or file path", + ) + update_parser.add_argument( + "--update-mask", + "--update_mask", + dest="update_mask", + help="Comma-separated list of fields to update", + ) + update_parser.set_defaults(func=handle_update_command) + + # Delete pipeline command + delete_parser = log_processing_subparsers.add_parser( + "delete", help="Delete a log processing pipeline" + ) + delete_parser.add_argument("--id", required=True, help="Pipeline ID") + delete_parser.add_argument( + "--etag", help="Optional etag for concurrency control" + ) + delete_parser.set_defaults(func=handle_delete_command) + + # Associate streams command + associate_streams_parser = log_processing_subparsers.add_parser( + "associate-streams", help="Associate streams with a pipeline" + ) + associate_streams_parser.add_argument( + "--id", required=True, help="Pipeline ID" + ) + associate_streams_parser.add_argument( + "--streams", + required=True, + help="JSON array of stream objects or file path", + ) + associate_streams_parser.set_defaults(func=handle_associate_streams_command) + + # Dissociate streams command + dissociate_streams_parser = log_processing_subparsers.add_parser( + "dissociate-streams", help="Dissociate streams from a pipeline" + ) + dissociate_streams_parser.add_argument( + "--id", required=True, help="Pipeline ID" + ) + dissociate_streams_parser.add_argument( + "--streams", + required=True, + help="JSON array of stream objects or file path", + ) + dissociate_streams_parser.set_defaults( + func=handle_dissociate_streams_command + ) + + # Fetch associated pipeline command + fetch_associated_parser = log_processing_subparsers.add_parser( + "fetch-associated", help="Fetch pipeline associated with a stream" + ) + fetch_associated_parser.add_argument( + "--stream", + required=True, + help="Stream object as JSON string or file path", + ) + fetch_associated_parser.set_defaults(func=handle_fetch_associated_command) + + # Fetch sample logs command + fetch_sample_logs_parser = log_processing_subparsers.add_parser( + "fetch-sample-logs", help="Fetch sample logs by streams" + ) + fetch_sample_logs_parser.add_argument( + "--streams", + required=True, + help="JSON array of stream objects or file path", + ) + fetch_sample_logs_parser.add_argument( + "--count", type=int, help="Number of sample logs per stream (max 1000)" + ) + fetch_sample_logs_parser.set_defaults(func=handle_fetch_sample_logs_command) + + # Test pipeline command + test_parser = log_processing_subparsers.add_parser( + "test", help="Test a pipeline with input logs" + ) + test_parser.add_argument( + "--pipeline", + required=True, + help="Pipeline config as JSON or file path", + ) + test_parser.add_argument( + "--input-logs", + "--input_logs", + dest="input_logs", + required=True, + help="Input logs as JSON array or file path", + ) + test_parser.set_defaults(func=handle_test_command) + + +def handle_list_command(args, chronicle): + """Handle list log processing pipelines command.""" + try: + result = chronicle.list_log_processing_pipelines( + page_size=args.page_size, + page_token=args.page_token, + filter_expr=args.filter, + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_get_command(args, chronicle): + """Handle get log processing pipeline command.""" + try: + result = chronicle.get_log_processing_pipeline(args.id) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_create_command(args, chronicle): + """Handle create log processing pipeline command.""" + try: + pipeline_config = load_json_or_file(args.pipeline) + + if not isinstance(pipeline_config, dict): + print("Error: pipeline must be a JSON object", file=sys.stderr) + sys.exit(1) + + result = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config, pipeline_id=args.id + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_update_command(args, chronicle): + """Handle update log processing pipeline command.""" + try: + pipeline_config = load_json_or_file(args.pipeline) + + if not isinstance(pipeline_config, dict): + print("Error: pipeline must be a JSON object", file=sys.stderr) + sys.exit(1) + + result = chronicle.patch_log_processing_pipeline( + pipeline_id=args.id, + pipeline=pipeline_config, + update_mask=args.update_mask, + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_delete_command(args, chronicle): + """Handle delete log processing pipeline command.""" + try: + result = chronicle.delete_log_processing_pipeline( + pipeline_id=args.id, etag=args.etag + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_associate_streams_command(args, chronicle): + """Handle associate streams command.""" + try: + streams = load_json_or_file(args.streams) + + if not isinstance(streams, list): + print("Error: streams must be a JSON array", file=sys.stderr) + sys.exit(1) + + result = chronicle.associate_streams( + pipeline_id=args.id, streams=streams + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_dissociate_streams_command(args, chronicle): + """Handle dissociate streams command.""" + try: + streams = load_json_or_file(args.streams) + + if not isinstance(streams, list): + print("Error: streams must be a JSON array", file=sys.stderr) + sys.exit(1) + + result = chronicle.dissociate_streams( + pipeline_id=args.id, streams=streams + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_fetch_associated_command(args, chronicle): + """Handle fetch associated pipeline command.""" + try: + stream = load_json_or_file(args.stream) + + if not isinstance(stream, dict): + print("Error: stream must be a JSON object", file=sys.stderr) + sys.exit(1) + + result = chronicle.fetch_associated_pipeline(stream=stream) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_fetch_sample_logs_command(args, chronicle): + """Handle fetch sample logs by streams command.""" + try: + streams = load_json_or_file(args.streams) + + if not isinstance(streams, list): + print("Error: streams must be a JSON array", file=sys.stderr) + sys.exit(1) + + result = chronicle.fetch_sample_logs_by_streams( + streams=streams, sample_logs_count=args.count + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + +def handle_test_command(args, chronicle): + """Handle test pipeline command.""" + try: + pipeline = load_json_or_file(args.pipeline) + input_logs = load_json_or_file(args.input_logs) + + if not isinstance(pipeline, dict): + print("Error: pipeline must be a JSON object", file=sys.stderr) + sys.exit(1) + + if not isinstance(input_logs, list): + print("Error: input_logs must be a JSON array", file=sys.stderr) + sys.exit(1) + + result = chronicle.test_pipeline( + pipeline=pipeline, input_logs=input_logs + ) + output_formatter(result, args.output) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) diff --git a/src/secops/cli/utils/input_utils.py b/src/secops/cli/utils/input_utils.py new file mode 100644 index 00000000..f810c72a --- /dev/null +++ b/src/secops/cli/utils/input_utils.py @@ -0,0 +1,79 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Google SecOps CLI input utilities""" + +import json +import sys +from pathlib import Path +from typing import Any + + +def load_json_or_file(value: str) -> Any: + """Load JSON from string or file path. + + Args: + value: JSON string or file path + + Returns: + Parsed JSON object (dict, list, etc.) + + Raises: + SystemExit: If file not found or JSON parsing fails + """ + try: + file_path = Path(value) + if file_path.exists() and file_path.is_file(): + with open(file_path, encoding="utf-8") as f: + return json.load(f) + except json.JSONDecodeError as e: + print(f"Error parsing JSON from file: {e}", file=sys.stderr) + sys.exit(1) + except Exception as e: + print(f"Error reading file: {e}", file=sys.stderr) + sys.exit(1) + + try: + return json.loads(value) + except json.JSONDecodeError as e: + print( + f"Error: Not a valid JSON string or file path: {value}", + file=sys.stderr, + ) + print(f"JSON parse error: {e}", file=sys.stderr) + sys.exit(1) + + +def load_string_or_file(value: str) -> str: + """Load string content from direct value or file path. + + Args: + value: String content or file path + + Returns: + String content + + Raises: + SystemExit: If file exists but cannot be read + """ + try: + file_path = Path(value) + if file_path.exists() and file_path.is_file(): + with open(file_path, encoding="utf-8") as f: + return f.read() + except Exception as e: + print(f"Error reading file: {e}", file=sys.stderr) + sys.exit(1) + + return value From 555ac13301b3d6d167c945f076d8192a5a4084cd Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Fri, 12 Dec 2025 18:25:01 +0530 Subject: [PATCH 36/48] chore: fixed examples. --- examples/log_processing_pipelines_example.py | 78 ++++++++++++++++--- .../chronicle/log_processing_pipelines.py | 16 ++-- 2 files changed, 77 insertions(+), 17 deletions(-) diff --git a/examples/log_processing_pipelines_example.py b/examples/log_processing_pipelines_example.py index 9a6a1c02..97a7c5f4 100644 --- a/examples/log_processing_pipelines_example.py +++ b/examples/log_processing_pipelines_example.py @@ -2,9 +2,11 @@ """Example usage of the Google SecOps SDK for Log Processing Pipelines.""" import argparse +import base64 import json import time import uuid +from datetime import datetime, timezone from secops import SecOpsClient @@ -74,8 +76,11 @@ def example_create_and_get_pipeline(chronicle): "processors": [ { "filterProcessor": { - "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, - "errorMode": "ERROR_MODE_UNSPECIFIED", + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", } } ], @@ -143,8 +148,11 @@ def example_update_pipeline(chronicle): "processors": [ { "filterProcessor": { - "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, - "errorMode": "ERROR_MODE_UNSPECIFIED", + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", } } ], @@ -170,6 +178,7 @@ def example_update_pipeline(chronicle): "name": created_pipeline.get("name"), "displayName": f"Updated {display_name}", "description": "Updated description via SDK", + "processors": created_pipeline.get("processors"), } print("\nUpdating pipeline...") @@ -213,8 +222,11 @@ def example_stream_association(chronicle): "processors": [ { "filterProcessor": { - "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, - "errorMode": "ERROR_MODE_UNSPECIFIED", + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", } } ], @@ -282,17 +294,30 @@ def example_test_pipeline(chronicle): "processors": [ { "filterProcessor": { - "include": {"logMatchType": "LOG_MATCH_TYPE_UNSPECIFIED"}, - "errorMode": "ERROR_MODE_UNSPECIFIED", + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", } } ], } - # Sample input logs + # Sample input logs with proper Log resource structure + current_time = datetime.now(timezone.utc).isoformat() + input_logs = [ - {"logText": "Sample log entry 1"}, - {"logText": "Sample log entry 2"}, + { + "data": base64.b64encode(b"Sample log entry 1").decode("utf-8"), + "logEntryTime": current_time, + "collectionTime": current_time, + }, + { + "data": base64.b64encode(b"Sample log entry 2").decode("utf-8"), + "logEntryTime": current_time, + "collectionTime": current_time, + }, ] try: @@ -347,6 +372,34 @@ def example_fetch_associated_pipeline(chronicle): ) +def example_fetch_sample_logs(chronicle): + """Example 7: Fetch Sample Logs by Streams.""" + print("\n=== Example 7: Fetch Sample Logs by Streams ===") + + # Define streams to fetch sample logs from + # Note: Replace with actual log type or feed ID from your environment + streams = [{"logType": "WINEVTLOG"}] + + try: + print(f"\nFetching sample logs for streams: {json.dumps(streams)}") + result = chronicle.fetch_sample_logs_by_streams( + streams=streams, sample_logs_count=5 + ) + + logs = result.get("logs", []) + print(f"\nFetched {len(logs)} sample log(s)") + + if logs: + print("\nFirst sample log:") + print(json.dumps(logs[0], indent=2)) + else: + print("No sample logs available for the specified streams.") + + except Exception as e: + print(f"Error fetching sample logs: {e}") + print("Note: Make sure the streams exist and have ingested logs.") + + # Map of example functions EXAMPLES = { "1": example_list_pipelines, @@ -355,6 +408,7 @@ def example_fetch_associated_pipeline(chronicle): "4": example_stream_association, "5": example_test_pipeline, "6": example_fetch_associated_pipeline, + "7": example_fetch_sample_logs, } @@ -376,7 +430,7 @@ def main(): "--example", "-e", help=( - "Example number to run (1-6). " + "Example number to run (1-7). " "If not specified, runs all examples." ), ) diff --git a/src/secops/chronicle/log_processing_pipelines.py b/src/secops/chronicle/log_processing_pipelines.py index 815f06b3..ccd73360 100644 --- a/src/secops/chronicle/log_processing_pipelines.py +++ b/src/secops/chronicle/log_processing_pipelines.py @@ -150,10 +150,13 @@ def patch_log_processing_pipeline( Raises: APIError: If the API request fails. """ - url = ( - f"{client.base_url}/{client.instance_id}/" - f"logProcessingPipelines/{pipeline_id}" - ) + if "/projects/" not in pipeline_id: + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}" + ) + else: + url = f"{client.base_url}/{pipeline_id}" params: dict[str, Any] = {} if update_mask: @@ -287,7 +290,10 @@ def fetch_associated_pipeline(client, stream: dict[str, Any]) -> dict[str, Any]: f"logProcessingPipelines:fetchAssociatedPipeline" ) - params = {"stream": stream} + # Pass stream fields as separate query parameters with stream. prefix + params = {} + for key, value in stream.items(): + params[f"stream.{key}"] = value response = client.session.get(url, params=params) if response.status_code != 200: From fca20f6336a9549a151cf0e550e4fab26b8671d4 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Mon, 15 Dec 2025 14:46:56 +0530 Subject: [PATCH 37/48] chore: added unit tests. updated pipeline id handling. --- .../chronicle/log_processing_pipelines.py | 47 +- .../chronicle/test_log_processing_pipeline.py | 706 ++++++++++++++++++ 2 files changed, 735 insertions(+), 18 deletions(-) create mode 100644 tests/chronicle/test_log_processing_pipeline.py diff --git a/src/secops/chronicle/log_processing_pipelines.py b/src/secops/chronicle/log_processing_pipelines.py index ccd73360..2b818d10 100644 --- a/src/secops/chronicle/log_processing_pipelines.py +++ b/src/secops/chronicle/log_processing_pipelines.py @@ -75,10 +75,13 @@ def get_log_processing_pipeline(client, pipeline_id: str) -> dict[str, Any]: Raises: APIError: If the API request fails. """ - url = ( - f"{client.base_url}/{client.instance_id}/" - f"logProcessingPipelines/{pipeline_id}" - ) + if not pipeline_id.startswith("projects/"): + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}" + ) + else: + url = f"{client.base_url}/{pipeline_id}" response = client.session.get(url) if response.status_code != 200: @@ -150,7 +153,7 @@ def patch_log_processing_pipeline( Raises: APIError: If the API request fails. """ - if "/projects/" not in pipeline_id: + if not pipeline_id.startswith("projects/"): url = ( f"{client.base_url}/{client.instance_id}/" f"logProcessingPipelines/{pipeline_id}" @@ -188,10 +191,13 @@ def delete_log_processing_pipeline( Raises: APIError: If the API request fails. """ - url = ( - f"{client.base_url}/{client.instance_id}/" - f"logProcessingPipelines/{pipeline_id}" - ) + if not pipeline_id.startswith("projects/"): + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}" + ) + else: + url = f"{client.base_url}/{pipeline_id}" params: dict[str, Any] = {} if etag: @@ -224,11 +230,13 @@ def associate_streams( Raises: APIError: If the API request fails. """ - url = ( - f"{client.base_url}/{client.instance_id}/" - f"logProcessingPipelines/{pipeline_id}:associateStreams" - ) - + if not pipeline_id.startswith("projects/"): + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}:associateStreams" + ) + else: + url = f"{client.base_url}/{pipeline_id}:associateStreams" body = {"streams": streams} response = client.session.post(url, json=body) @@ -256,10 +264,13 @@ def dissociate_streams( Raises: APIError: If the API request fails. """ - url = ( - f"{client.base_url}/{client.instance_id}/" - f"logProcessingPipelines/{pipeline_id}:dissociateStreams" - ) + if not pipeline_id.startswith("projects/"): + url = ( + f"{client.base_url}/{client.instance_id}/" + f"logProcessingPipelines/{pipeline_id}:dissociateStreams" + ) + else: + url = f"{client.base_url}/{pipeline_id}:dissociateStreams" body = {"streams": streams} diff --git a/tests/chronicle/test_log_processing_pipeline.py b/tests/chronicle/test_log_processing_pipeline.py new file mode 100644 index 00000000..8e3f10f4 --- /dev/null +++ b/tests/chronicle/test_log_processing_pipeline.py @@ -0,0 +1,706 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Tests for Chronicle log processing pipeline functions.""" + +import pytest +from unittest.mock import Mock, patch + +from secops.chronicle.client import ChronicleClient +from secops.chronicle.log_processing_pipelines import ( + list_log_processing_pipelines, + get_log_processing_pipeline, + create_log_processing_pipeline, + patch_log_processing_pipeline, + delete_log_processing_pipeline, + associate_streams, + dissociate_streams, + fetch_associated_pipeline, + fetch_sample_logs_by_streams, + test_pipeline as pipeline_test_function, +) +from secops.exceptions import APIError + + +@pytest.fixture +def chronicle_client(): + """Create a Chronicle client for testing.""" + with patch("secops.auth.SecOpsAuth") as mock_auth: + mock_session = Mock() + mock_session.headers = {} + mock_auth.return_value.session = mock_session + return ChronicleClient( + customer_id="test-customer", project_id="test-project" + ) + + +@pytest.fixture +def mock_response(): + """Create a mock API response.""" + mock = Mock() + mock.status_code = 200 + mock.json.return_value = { + "name": "projects/test-project/locations/us/instances/test-customer/logProcessingPipelines/pipeline_12345", + "displayName": "Test Pipeline", + "description": "Test pipeline description", + "processors": [{"filterProcessor": {"include": {}}}], + } + return mock + + +@pytest.fixture +def mock_error_response(): + """Create a mock error API response.""" + mock = Mock() + mock.status_code = 400 + mock.text = "Error message" + return mock + + +def test_list_log_processing_pipelines(chronicle_client, mock_response): + """Test list_log_processing_pipelines function.""" + mock_response.json.return_value = { + "logProcessingPipelines": [ + {"name": "pipeline1"}, + {"name": "pipeline2"}, + ] + } + + with patch.object( + chronicle_client.session, "get", return_value=mock_response + ) as mock_get: + result = list_log_processing_pipelines(chronicle_client) + + mock_get.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines", + params={}, + ) + assert result == mock_response.json.return_value + + +def test_list_log_processing_pipelines_with_params( + chronicle_client, mock_response +): + """Test list_log_processing_pipelines with pagination and filter.""" + mock_response.json.return_value = { + "logProcessingPipelines": [{"name": "pipeline1"}], + "nextPageToken": "token123", + } + + with patch.object( + chronicle_client.session, "get", return_value=mock_response + ) as mock_get: + result = list_log_processing_pipelines( + chronicle_client, + page_size=50, + page_token="prev_token", + filter_expr='displayName="Test"', + ) + + mock_get.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines", + params={ + "pageSize": 50, + "pageToken": "prev_token", + "filter": 'displayName="Test"', + }, + ) + assert result == mock_response.json.return_value + + +def test_list_log_processing_pipelines_error( + chronicle_client, mock_error_response +): + """Test list_log_processing_pipelines with error response.""" + with patch.object( + chronicle_client.session, "get", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + list_log_processing_pipelines(chronicle_client) + + assert "Failed to list log processing pipelines" in str(exc_info.value) + + +def test_get_log_processing_pipeline(chronicle_client, mock_response): + """Test get_log_processing_pipeline function.""" + pipeline_id = "pipeline_12345" + + with patch.object( + chronicle_client.session, "get", return_value=mock_response + ) as mock_get: + result = get_log_processing_pipeline(chronicle_client, pipeline_id) + + mock_get.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}" + ) + assert result == mock_response.json.return_value + + +def test_get_log_processing_pipeline_error( + chronicle_client, mock_error_response +): + """Test get_log_processing_pipeline with error response.""" + pipeline_id = "pipeline_12345" + + with patch.object( + chronicle_client.session, "get", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + get_log_processing_pipeline(chronicle_client, pipeline_id) + + assert "Failed to get log processing pipeline" in str(exc_info.value) + + +def test_create_log_processing_pipeline(chronicle_client, mock_response): + """Test create_log_processing_pipeline function.""" + pipeline_config = { + "displayName": "Test Pipeline", + "description": "Test description", + "processors": [{"filterProcessor": {"include": {}}}], + } + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = create_log_processing_pipeline( + chronicle_client, pipeline_config + ) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines", + json=pipeline_config, + params={}, + ) + assert result == mock_response.json.return_value + + +def test_create_log_processing_pipeline_with_id( + chronicle_client, mock_response +): + """Test create_log_processing_pipeline with custom pipeline ID.""" + pipeline_config = { + "displayName": "Test Pipeline", + "processors": [{"filterProcessor": {"include": {}}}], + } + pipeline_id = "custom_pipeline_id" + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = create_log_processing_pipeline( + chronicle_client, pipeline_config, pipeline_id=pipeline_id + ) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines", + json=pipeline_config, + params={"logProcessingPipelineId": pipeline_id}, + ) + assert result == mock_response.json.return_value + + +def test_create_log_processing_pipeline_error( + chronicle_client, mock_error_response +): + """Test create_log_processing_pipeline with error response.""" + pipeline_config = {"displayName": "Test Pipeline"} + + with patch.object( + chronicle_client.session, "post", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + create_log_processing_pipeline(chronicle_client, pipeline_config) + + assert "Failed to create log processing pipeline" in str(exc_info.value) + + +def test_patch_log_processing_pipeline(chronicle_client, mock_response): + """Test patch_log_processing_pipeline function.""" + pipeline_id = "pipeline_12345" + pipeline_config = { + "name": "projects/test-project/locations/us/instances/test-customer/logProcessingPipelines/pipeline_12345", + "displayName": "Updated Pipeline", + "processors": [{"filterProcessor": {"include": {}}}], + } + + with patch.object( + chronicle_client.session, "patch", return_value=mock_response + ) as mock_patch: + result = patch_log_processing_pipeline( + chronicle_client, pipeline_id, pipeline_config + ) + + mock_patch.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}", + json=pipeline_config, + params={}, + ) + assert result == mock_response.json.return_value + + +def test_patch_log_processing_pipeline_with_update_mask( + chronicle_client, mock_response +): + """Test patch_log_processing_pipeline with update mask.""" + pipeline_id = "pipeline_12345" + pipeline_config = { + "name": "projects/test-project/locations/us/instances/test-customer/logProcessingPipelines/pipeline_12345", + "displayName": "Updated Pipeline", + } + update_mask = "displayName,description" + + with patch.object( + chronicle_client.session, "patch", return_value=mock_response + ) as mock_patch: + result = patch_log_processing_pipeline( + chronicle_client, + pipeline_id, + pipeline_config, + update_mask=update_mask, + ) + + mock_patch.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}", + json=pipeline_config, + params={"updateMask": update_mask}, + ) + assert result == mock_response.json.return_value + + +def test_patch_log_processing_pipeline_with_full_name( + chronicle_client, mock_response +): + """Test patch_log_processing_pipeline with full resource name.""" + full_name = "projects/test-project/locations/us/instances/test-customer/logProcessingPipelines/pipeline_12345" + pipeline_config = { + "name": full_name, + "displayName": "Updated Pipeline", + } + + with patch.object( + chronicle_client.session, "patch", return_value=mock_response + ) as mock_patch: + result = patch_log_processing_pipeline( + chronicle_client, full_name, pipeline_config + ) + + mock_patch.assert_called_once_with( + f"{chronicle_client.base_url}/{full_name}", + json=pipeline_config, + params={}, + ) + assert result == mock_response.json.return_value + + +def test_patch_log_processing_pipeline_error( + chronicle_client, mock_error_response +): + """Test patch_log_processing_pipeline with error response.""" + pipeline_id = "pipeline_12345" + pipeline_config = {"displayName": "Updated Pipeline"} + + with patch.object( + chronicle_client.session, "patch", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + patch_log_processing_pipeline( + chronicle_client, pipeline_id, pipeline_config + ) + + assert "Failed to patch log processing pipeline" in str(exc_info.value) + + +def test_delete_log_processing_pipeline(chronicle_client, mock_response): + """Test delete_log_processing_pipeline function.""" + pipeline_id = "pipeline_12345" + mock_response.json.return_value = {} + + with patch.object( + chronicle_client.session, "delete", return_value=mock_response + ) as mock_delete: + result = delete_log_processing_pipeline(chronicle_client, pipeline_id) + + mock_delete.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}", + params={}, + ) + assert result == {} + + +def test_delete_log_processing_pipeline_with_etag( + chronicle_client, mock_response +): + """Test delete_log_processing_pipeline with etag.""" + pipeline_id = "pipeline_12345" + etag = "etag_value_123" + mock_response.json.return_value = {} + + with patch.object( + chronicle_client.session, "delete", return_value=mock_response + ) as mock_delete: + result = delete_log_processing_pipeline( + chronicle_client, pipeline_id, etag=etag + ) + + mock_delete.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}", + params={"etag": etag}, + ) + assert result == {} + + +def test_delete_log_processing_pipeline_error( + chronicle_client, mock_error_response +): + """Test delete_log_processing_pipeline with error response.""" + pipeline_id = "pipeline_12345" + + with patch.object( + chronicle_client.session, "delete", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + delete_log_processing_pipeline(chronicle_client, pipeline_id) + + assert "Failed to delete log processing pipeline" in str(exc_info.value) + + +def test_associate_streams(chronicle_client, mock_response): + """Test associate_streams function.""" + pipeline_id = "pipeline_12345" + streams = [{"logType": "WINEVTLOG"}, {"feedId": "feed_123"}] + mock_response.json.return_value = {} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = associate_streams(chronicle_client, pipeline_id, streams) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}:associateStreams", + json={"streams": streams}, + ) + assert result == {} + + +def test_associate_streams_error(chronicle_client, mock_error_response): + """Test associate_streams with error response.""" + pipeline_id = "pipeline_12345" + streams = [{"logType": "WINEVTLOG"}] + + with patch.object( + chronicle_client.session, "post", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + associate_streams(chronicle_client, pipeline_id, streams) + + assert "Failed to associate streams" in str(exc_info.value) + + +def test_associate_streams_empty_list(chronicle_client, mock_response): + """Test associate_streams with empty streams list.""" + pipeline_id = "pipeline_12345" + streams = [] + mock_response.json.return_value = {} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = associate_streams(chronicle_client, pipeline_id, streams) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}:associateStreams", + json={"streams": []}, + ) + assert result == {} + + +def test_dissociate_streams(chronicle_client, mock_response): + """Test dissociate_streams function.""" + pipeline_id = "pipeline_12345" + streams = [{"logType": "WINEVTLOG"}, {"feedId": "feed_123"}] + mock_response.json.return_value = {} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = dissociate_streams(chronicle_client, pipeline_id, streams) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines/{pipeline_id}:dissociateStreams", + json={"streams": streams}, + ) + assert result == {} + + +def test_dissociate_streams_error(chronicle_client, mock_error_response): + """Test dissociate_streams with error response.""" + pipeline_id = "pipeline_12345" + streams = [{"logType": "WINEVTLOG"}] + + with patch.object( + chronicle_client.session, "post", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + dissociate_streams(chronicle_client, pipeline_id, streams) + + assert "Failed to dissociate streams" in str(exc_info.value) + + +def test_fetch_associated_pipeline_with_log_type( + chronicle_client, mock_response +): + """Test fetch_associated_pipeline with logType.""" + stream = {"logType": "WINEVTLOG"} + + with patch.object( + chronicle_client.session, "get", return_value=mock_response + ) as mock_get: + result = fetch_associated_pipeline(chronicle_client, stream) + + mock_get.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines:fetchAssociatedPipeline", + params={"stream.logType": "WINEVTLOG"}, + ) + assert result == mock_response.json.return_value + + +def test_fetch_associated_pipeline_with_feed_id( + chronicle_client, mock_response +): + """Test fetch_associated_pipeline with feedId.""" + stream = {"feedId": "feed_123"} + + with patch.object( + chronicle_client.session, "get", return_value=mock_response + ) as mock_get: + result = fetch_associated_pipeline(chronicle_client, stream) + + mock_get.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines:fetchAssociatedPipeline", + params={"stream.feedId": "feed_123"}, + ) + assert result == mock_response.json.return_value + + +def test_fetch_associated_pipeline_with_multiple_fields( + chronicle_client, mock_response +): + """Test fetch_associated_pipeline with multiple stream fields.""" + stream = {"logType": "WINEVTLOG", "namespace": "test"} + + with patch.object( + chronicle_client.session, "get", return_value=mock_response + ) as mock_get: + result = fetch_associated_pipeline(chronicle_client, stream) + + mock_get.assert_called_once() + call_args = mock_get.call_args + assert "stream.logType" in call_args[1]["params"] + assert "stream.namespace" in call_args[1]["params"] + assert result == mock_response.json.return_value + + +def test_fetch_associated_pipeline_error(chronicle_client, mock_error_response): + """Test fetch_associated_pipeline with error response.""" + stream = {"logType": "WINEVTLOG"} + + with patch.object( + chronicle_client.session, "get", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + fetch_associated_pipeline(chronicle_client, stream) + + assert "Failed to fetch associated pipeline" in str(exc_info.value) + + +def test_fetch_sample_logs_by_streams(chronicle_client, mock_response): + """Test fetch_sample_logs_by_streams function.""" + streams = [{"logType": "WINEVTLOG"}, {"feedId": "feed_123"}] + mock_response.json.return_value = { + "logs": [{"data": "log1"}, {"data": "log2"}] + } + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = fetch_sample_logs_by_streams(chronicle_client, streams) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines:fetchSampleLogsByStreams", + json={"streams": streams}, + ) + assert result == mock_response.json.return_value + + +def test_fetch_sample_logs_by_streams_with_count( + chronicle_client, mock_response +): + """Test fetch_sample_logs_by_streams with sample count.""" + streams = [{"logType": "WINEVTLOG"}] + sample_logs_count = 50 + mock_response.json.return_value = {"logs": []} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = fetch_sample_logs_by_streams( + chronicle_client, streams, sample_logs_count=sample_logs_count + ) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines:fetchSampleLogsByStreams", + json={"streams": streams, "sampleLogsCount": sample_logs_count}, + ) + assert result == mock_response.json.return_value + + +def test_fetch_sample_logs_by_streams_error( + chronicle_client, mock_error_response +): + """Test fetch_sample_logs_by_streams with error response.""" + streams = [{"logType": "WINEVTLOG"}] + + with patch.object( + chronicle_client.session, "post", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + fetch_sample_logs_by_streams(chronicle_client, streams) + + assert "Failed to fetch sample logs by streams" in str(exc_info.value) + + +def test_fetch_sample_logs_by_streams_empty_streams( + chronicle_client, mock_response +): + """Test fetch_sample_logs_by_streams with empty streams list.""" + streams = [] + mock_response.json.return_value = {"logs": []} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = fetch_sample_logs_by_streams(chronicle_client, streams) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines:fetchSampleLogsByStreams", + json={"streams": []}, + ) + assert result == mock_response.json.return_value + + +def test_test_pipeline(chronicle_client, mock_response): + """Test test_pipeline function.""" + pipeline_config = { + "displayName": "Test Pipeline", + "processors": [{"filterProcessor": {"include": {}}}], + } + input_logs = [ + {"data": "bG9nMQ==", "logEntryTime": "2024-01-01T00:00:00Z"}, + {"data": "bG9nMg==", "logEntryTime": "2024-01-01T00:00:01Z"}, + ] + mock_response.json.return_value = {"logs": input_logs} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = pipeline_test_function( + chronicle_client, pipeline_config, input_logs + ) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines:testPipeline", + json={ + "logProcessingPipeline": pipeline_config, + "inputLogs": input_logs, + }, + ) + assert result == mock_response.json.return_value + + +def test_test_pipeline_error(chronicle_client, mock_error_response): + """Test test_pipeline with error response.""" + pipeline_config = {"displayName": "Test Pipeline"} + input_logs = [{"data": "bG9nMQ=="}] + + with patch.object( + chronicle_client.session, "post", return_value=mock_error_response + ): + with pytest.raises(APIError) as exc_info: + pipeline_test_function( + chronicle_client, pipeline_config, input_logs + ) + + assert "Failed to test pipeline" in str(exc_info.value) + + +def test_test_pipeline_empty_logs(chronicle_client, mock_response): + """Test test_pipeline with empty input logs.""" + pipeline_config = { + "displayName": "Test Pipeline", + "processors": [{"filterProcessor": {"include": {}}}], + } + input_logs = [] + mock_response.json.return_value = {"logs": []} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = pipeline_test_function( + chronicle_client, pipeline_config, input_logs + ) + + mock_post.assert_called_once_with( + f"{chronicle_client.base_url}/{chronicle_client.instance_id}/logProcessingPipelines:testPipeline", + json={ + "logProcessingPipeline": pipeline_config, + "inputLogs": [], + }, + ) + assert result == mock_response.json.return_value + + +def test_test_pipeline_with_complex_processors(chronicle_client, mock_response): + """Test test_pipeline with complex processor configuration.""" + pipeline_config = { + "displayName": "Complex Pipeline", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*error.*"], + } + } + }, + { + "transformProcessor": { + "fields": [{"field": "message", "transformation": "upper"}] + } + }, + ], + } + input_logs = [{"data": "bG9nMQ==", "logEntryTime": "2024-01-01T00:00:00Z"}] + mock_response.json.return_value = {"logs": input_logs} + + with patch.object( + chronicle_client.session, "post", return_value=mock_response + ) as mock_post: + result = pipeline_test_function( + chronicle_client, pipeline_config, input_logs + ) + + mock_post.assert_called_once() + call_kwargs = mock_post.call_args[1] + assert call_kwargs["json"]["logProcessingPipeline"] == pipeline_config + assert result == mock_response.json.return_value From 4fc897761afaaf1d7952d477657c68e04c5b3d26 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Tue, 16 Dec 2025 16:22:09 +0530 Subject: [PATCH 38/48] chore: added integration tests. Added documentation. --- CLI.md | 181 +++++ README.md | 246 +++++++ .../chronicle/log_processing_pipelines.py | 2 +- ...est_log_processing_pipeline_integration.py | 406 +++++++++++ tests/cli/test_log_processing_integration.py | 655 ++++++++++++++++++ 5 files changed, 1489 insertions(+), 1 deletion(-) create mode 100644 tests/chronicle/test_log_processing_pipeline_integration.py create mode 100644 tests/cli/test_log_processing_integration.py diff --git a/CLI.md b/CLI.md index 670c10c0..e1e66227 100644 --- a/CLI.md +++ b/CLI.md @@ -325,6 +325,187 @@ secops log generate-udm-mapping \ --compress-array-fields "false" ``` +### Log Processing Pipelines + +Chronicle log processing pipelines allow you to transform, filter, and enrich log data before it is stored in Chronicle. Common use cases include removing empty key-value pairs, redacting sensitive data, adding ingestion labels, filtering logs by field values, and extracting host information. Pipelines can be associated with log types (with optional collector IDs) and feeds, providing flexible control over your data ingestion workflow. + +The CLI provides comprehensive commands for managing pipelines, associating streams, testing configurations, and fetching sample logs. + +#### List pipelines + +```bash +# List all log processing pipelines +secops log-processing list + +# List with pagination +secops log-processing list --page-size 50 + +# List with filter expression +secops log-processing list --filter "displayName:production*" + +# List with pagination token +secops log-processing list --page-size 50 --page-token "next_page_token" +``` + +#### Get pipeline details + +```bash +# Get a specific pipeline by ID +secops log-processing get --id "1234567890" +``` + +#### Create a pipeline + +```bash +# Create from inline JSON +secops log-processing create --pipeline '{"displayName":"My Pipeline","description":"Filters error logs","processors":[{"filterProcessor":{"include":{"logMatchType":"REGEXP","logBodies":[".*error.*"]},"errorMode":"IGNORE"}}]}' +``` + +# Create from JSON file +secops log-processing create --pipeline pipeline_config.json + +Example `pipeline_config.json`: +```json +{ + "displayName": "Production Pipeline", + "description": "Filters and transforms production logs", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*error.*", ".*warning.*"] + }, + "errorMode": "IGNORE" + } + } + ], + "customMetadata": [ + {"key": "environment", "value": "production"}, + {"key": "team", "value": "security"} + ] +} +``` + +#### Update a pipeline + +```bash +# Update from JSON file with update mask +secops log-processing update --id "1234567890" --pipeline updated_config.json --update-mask "description" + +# Update from inline JSON +secops log-processing update --id "1234567890" --pipeline '{description":"Updated description"}' --update-mask "description" +``` + +#### Delete a pipeline + +```bash +# Delete a pipeline by ID +secops log-processing delete --id "1234567890" + +# Delete with etag for concurrency control +secops log-processing delete --id "1234567890" --etag "etag_value" +``` + +#### Associate streams with a pipeline + +Associate log streams (by log type or feed) with a pipeline: + +```bash +# Associate by log type (inline) +secops log-processing associate-streams --id "1234567890" --streams '[{"logType":"WINEVTLOG"},{"logType":"LINUX"}]' + +# Associate by feed ID +secops log-processing associate-streams --id "1234567890" --streams '[{"feed":"feed-uuid-1"},{"feed":"feed-uuid-2"}]' + +# Associate by log type (from file) +secops log-processing associate-streams --id "1234567890" --streams streams.json +``` + +Example `streams.json`: +```json +[ + {"logType": "WINEVTLOG"}, + {"logType": "LINUX"}, + {"logType": "OKTA"} +] +``` + +#### Dissociate streams from a pipeline + +```bash +# Dissociate streams (from file) +secops log-processing dissociate-streams --id "1234567890" --streams streams.json + +# Dissociate streams (inline) +secops log-processing dissociate-streams --id "1234567890" --streams '[{"logType":"WINEVTLOG"}]' +``` + +#### Fetch associated pipeline + +Find which pipeline is associated with a specific stream: + +```bash +# Find pipeline for a log type (inline) +secops log-processing fetch-associated --stream '{"logType":"WINEVTLOG"}' + +# Find pipeline for a feed +secops log-processing fetch-associated --stream '{"feed":"feed-uuid"}' + +# Find pipeline for a log type (from file) +secops log-processing fetch-associated --stream stream_query.json +``` + +Example `stream_query.json`: +```json +{ + "logType": "WINEVTLOG" +} +``` + +#### Fetch sample logs + +Retrieve sample logs for specific streams: + +```bash +# Fetch sample logs for log types (from file) +secops log-processing fetch-sample-logs --streams streams.json --count 10 + +# Fetch sample logs (inline) +secops log-processing fetch-sample-logs --streams '[{"logType":"WINEVTLOG"},{"logType":"LINUX"}]' --count 5 + +# Fetch sample logs for feeds +secops log-processing fetch-sample-logs --streams '[{"feed":"feed-uuid"}]' --count 10 +``` + +#### Test a pipeline + +Test a pipeline configuration against sample logs before deployment: + +```bash +# Test with inline JSON +secops log-processing test --pipeline '{"displayName":"Test","processors":[{"filterProcessor":{"include":{"logMatchType":"REGEXP","logBodies":[".*"]},"errorMode":"IGNORE"}}]}' --input-logs input_logs.json + +# Test with files +secops log-processing test --pipeline pipeline_config.json --input-logs test_logs.json +``` + +Example `input_logs.json` (logs must have base64-encoded data): +```json +[ + { + "data": "U2FtcGxlIGxvZyBlbnRyeQ==", + "logEntryTime": "2024-01-01T00:00:00Z", + "collectionTime": "2024-01-01T00:00:00Z" + }, + { + "data": "QW5vdGhlciBsb2cgZW50cnk=", + "logEntryTime": "2024-01-01T00:01:00Z", + "collectionTime": "2024-01-01T00:01:00Z" + } +] +``` + ### Parser Management Parsers in Chronicle are used to process and normalize raw log data into UDM (Unified Data Model) format. The CLI provides comprehensive parser management capabilities. diff --git a/README.md b/README.md index 7b8031e7..ec5ac8f7 100644 --- a/README.md +++ b/README.md @@ -518,6 +518,252 @@ chronicle.delete_forwarder(forwarder_id="1234567890") print("Forwarder deleted successfully") ``` +### Log Processing Pipelines + +Chronicle log processing pipelines allow you to transform, filter, and enrich log data before it is stored in Chronicle. Common use cases include removing empty key-value pairs, redacting sensitive data, adding ingestion labels, filtering logs by field values, and extracting host information. Pipelines can be associated with log types (with optional collector IDs) and feeds, providing flexible control over your data ingestion workflow. + +The SDK provides comprehensive methods for managing pipelines, associating streams, testing configurations, and fetching sample logs. + +#### List pipelines + +Retrieve all log processing pipelines in your Chronicle instance: + +```python +# Get all pipelines +result = chronicle.list_log_processing_pipelines() +pipelines = result.get("logProcessingPipelines", []) + +for pipeline in pipelines: + pipeline_id = pipeline["name"].split("/")[-1] + print(f"Pipeline: {pipeline['displayName']} (ID: {pipeline_id})") + +# List with pagination +result = chronicle.list_log_processing_pipelines( + page_size=50, + page_token="next_page_token" +) +``` + +#### Get pipeline details + +Retrieve details about a specific pipeline: + +```python +# Get pipeline by ID +pipeline_id = "1234567890" +pipeline = chronicle.get_log_processing_pipeline(pipeline_id) + +print(f"Name: {pipeline['displayName']}") +print(f"Description: {pipeline.get('description', 'N/A')}") +print(f"Processors: {len(pipeline.get('processors', []))}") +``` + +#### Create a pipeline + +Create a new log processing pipeline with processors: + +```python +# Define pipeline configuration +pipeline_config = { + "displayName": "My Custom Pipeline", + "description": "Filters and transforms application logs", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*error.*", ".*warning.*"], + }, + "errorMode": "IGNORE", + } + } + ], + "customMetadata": [ + {"key": "environment", "value": "production"}, + {"key": "team", "value": "security"} + ] +} + +# Create the pipeline (server generates ID) +created_pipeline = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config +) + +pipeline_id = created_pipeline["name"].split("/")[-1] +print(f"Created pipeline with ID: {pipeline_id}") +``` + +#### Update a pipeline + +Update an existing pipeline's configuration: + +```python +# Get the existing pipeline first +pipeline = chronicle.get_log_processing_pipeline(pipeline_id) + +# Update specific fields +updated_config = { + "name": pipeline["name"], + "description": "Updated description", + "processors": pipeline["processors"] +} + +# Patch with update mask +updated_pipeline = chronicle.patch_log_processing_pipeline( + pipeline_id=pipeline_id, + pipeline=updated_config, + update_mask="description" +) + +print(f"Updated: {updated_pipeline['displayName']}") +``` + +#### Delete a pipeline + +Delete an existing pipeline: + +```python +# Delete by ID +chronicle.delete_log_processing_pipeline(pipeline_id) +print("Pipeline deleted successfully") + +# Delete with etag for concurrency control +chronicle.delete_log_processing_pipeline( + pipeline_id=pipeline_id, + etag="etag_value" +) +``` + +#### Associate streams with a pipeline + +Associate log streams (by log type or feed) with a pipeline: + +```python +# Associate by log type +streams = [ + {"logType": "WINEVTLOG"}, + {"logType": "LINUX"} +] + +chronicle.associate_streams( + pipeline_id=pipeline_id, + streams=streams +) +print("Streams associated successfully") + +# Associate by feed ID +feed_streams = [ + {"feed": "feed-uuid-1"}, + {"feed": "feed-uuid-2"} +] + +chronicle.associate_streams( + pipeline_id=pipeline_id, + streams=feed_streams +) +``` + +#### Dissociate streams from a pipeline + +Remove stream associations from a pipeline: + +```python +# Dissociate streams +streams = [{"logType": "WINEVTLOG"}] + +chronicle.dissociate_streams( + pipeline_id=pipeline_id, + streams=streams +) +print("Streams dissociated successfully") +``` + +#### Fetch associated pipeline + +Find which pipeline is associated with a specific stream: + +```python +# Find pipeline for a log type +stream_query = {"logType": "WINEVTLOG"} +associated = chronicle.fetch_associated_pipeline(stream=stream_query) + +if associated: + print(f"Associated pipeline: {associated['name']}") +else: + print("No pipeline associated with this stream") + +# Find pipeline for a feed +feed_query = {"feed": "feed-uuid"} +associated = chronicle.fetch_associated_pipeline(stream=feed_query) +``` + +#### Fetch sample logs + +Retrieve sample logs for specific streams: + +```python +# Fetch sample logs for log types +streams = [ + {"logType": "WINEVTLOG"}, + {"logType": "LINUX"} +] + +result = chronicle.fetch_sample_logs_by_streams( + streams=streams, + sample_logs_count=10 +) + +for log in result.get("logs", []): + print(f"Log: {log}") +``` + +#### Test a pipeline + +Test a pipeline configuration against sample logs before deployment: + +```python +import base64 +from datetime import datetime, timezone + +# Define pipeline to test +pipeline_config = { + "displayName": "Test Pipeline", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ] +} + +# Create test logs with base64-encoded data +current_time = datetime.now(timezone.utc).isoformat() +log_data = base64.b64encode(b"Sample log entry").decode("utf-8") + +input_logs = [ + { + "data": log_data, + "logEntryTime": current_time, + "collectionTime": current_time, + } +] + +# Test the pipeline +result = chronicle.test_pipeline( + pipeline=pipeline_config, + input_logs=input_logs +) + +print(f"Processed {len(result.get('logs', []))} logs") +for processed_log in result.get("logs", []): + print(f"Result: {processed_log}") +``` + 5. Use custom timestamps: ```python from datetime import datetime, timedelta, timezone diff --git a/src/secops/chronicle/log_processing_pipelines.py b/src/secops/chronicle/log_processing_pipelines.py index 2b818d10..da33818a 100644 --- a/src/secops/chronicle/log_processing_pipelines.py +++ b/src/secops/chronicle/log_processing_pipelines.py @@ -341,7 +341,7 @@ def fetch_sample_logs_by_streams( f"logProcessingPipelines:fetchSampleLogsByStreams" ) - body: dict[str, Any] = {"streams": streams} + body = {"streams": streams} if sample_logs_count is not None: body["sampleLogsCount"] = sample_logs_count diff --git a/tests/chronicle/test_log_processing_pipeline_integration.py b/tests/chronicle/test_log_processing_pipeline_integration.py new file mode 100644 index 00000000..ac49ca19 --- /dev/null +++ b/tests/chronicle/test_log_processing_pipeline_integration.py @@ -0,0 +1,406 @@ +#!/usr/bin/env python3 +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Integration tests for log processing pipeline endpoints. + +These tests require valid credentials and API access. +""" +import pytest +import time +import uuid +import base64 +from datetime import datetime, timezone +from secops import SecOpsClient +from ..config import CHRONICLE_CONFIG, SERVICE_ACCOUNT_JSON +from secops.exceptions import APIError + + +@pytest.mark.integration +def test_log_processing_pipeline_crud_workflow(): + """Test CRUD workflow for log processing pipelines.""" + client = SecOpsClient(service_account_info=SERVICE_ACCOUNT_JSON) + chronicle = client.chronicle(**CHRONICLE_CONFIG) + + # Generate unique display name + unique_id = str(uuid.uuid4())[:8] + display_name = f"Test Pipeline {unique_id}" + + # Pipeline configuration + pipeline_config = { + "displayName": display_name, + "description": "Integration test pipeline", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + created_pipeline = None + + try: + # Test CREATE + print(f"Creating pipeline: {display_name}") + created_pipeline = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config + ) + + # Extract pipeline ID from the name + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + + assert created_pipeline is not None + assert "name" in created_pipeline + assert created_pipeline.get("displayName") == display_name + print(f"Pipeline created: {created_pipeline['name']}") + + # Wait for pipeline to be fully created + time.sleep(2) + + # Test GET + print(f"Getting pipeline: {pipeline_id}") + retrieved_pipeline = chronicle.get_log_processing_pipeline(pipeline_id) + assert retrieved_pipeline is not None + assert retrieved_pipeline.get("displayName") == display_name + print(f"Pipeline retrieved: {retrieved_pipeline['name']}") + + # Test LIST + print("Listing pipelines") + list_result = chronicle.list_log_processing_pipelines(page_size=10) + assert "logProcessingPipelines" in list_result + pipelines = list_result["logProcessingPipelines"] + pipeline_ids = [p["name"].split("/")[-1] for p in pipelines] + assert pipeline_id in pipeline_ids + print(f"Found {len(pipelines)} pipelines") + + # Test PATCH + updated_display_name = f"Updated Pipeline {unique_id}" + updated_config = { + "name": created_pipeline.get("name"), + "displayName": updated_display_name, + "description": "Updated description", + "processors": created_pipeline.get("processors"), + } + print(f"Updating pipeline: {pipeline_id}") + updated_pipeline = chronicle.patch_log_processing_pipeline( + pipeline_id=pipeline_id, + pipeline=updated_config, + update_mask="displayName,description", + ) + assert updated_pipeline is not None + assert updated_pipeline.get("displayName") == updated_display_name + print(f"Pipeline updated: {updated_pipeline['displayName']}") + + # Verify update + time.sleep(2) + verified_pipeline = chronicle.get_log_processing_pipeline(pipeline_id) + assert verified_pipeline.get("displayName") == updated_display_name + print("Pipeline update verified") + + except APIError as e: + print(f"Pipeline CRUD test failed: {str(e)}") + pytest.fail(f"Pipeline CRUD test failed due to API error: {str(e)}") + + finally: + # Test DELETE - cleanup + if created_pipeline: + try: + print(f"Deleting pipeline: {pipeline_id}") + chronicle.delete_log_processing_pipeline(pipeline_id) + print("Pipeline deleted successfully") + + # Verify deletion + time.sleep(2) + try: + chronicle.get_log_processing_pipeline(pipeline_id) + pytest.fail("Pipeline still exists after deletion") + except APIError: + print("Pipeline deletion verified") + + except APIError as e: + print(f"Warning: Failed to delete test pipeline: {str(e)}") + + +@pytest.mark.integration +def test_log_processing_pipeline_stream_operations(): + """Test stream association and dissociation workflow.""" + client = SecOpsClient(service_account_info=SERVICE_ACCOUNT_JSON) + chronicle = client.chronicle(**CHRONICLE_CONFIG) + + # Generate unique display name + unique_id = str(uuid.uuid4())[:8] + display_name = f"Stream Test Pipeline {unique_id}" + + # Pipeline configuration + pipeline_config = { + "displayName": display_name, + "description": "Integration test for stream operations", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + created_pipeline = None + + try: + # Create pipeline + print(f"Creating pipeline for stream test: {display_name}") + created_pipeline = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config + ) + + # Extract pipeline ID from the name + pipeline_id = created_pipeline.get("name", "").split("/")[-1] + assert created_pipeline is not None + print(f"Pipeline created: {created_pipeline['name']}") + time.sleep(2) + + # Test ASSOCIATE STREAMS + streams = [{"logType": "WINEVTLOG"}] + print(f"Associating streams to pipeline: {pipeline_id}") + associate_result = chronicle.associate_streams( + pipeline_id=pipeline_id, streams=streams + ) + assert associate_result is not None + print("Streams associated successfully") + time.sleep(2) + + # Test FETCH ASSOCIATED PIPELINE + print("Fetching associated pipeline by stream") + stream_query = {"logType": "WINEVTLOG"} + associated_pipeline = chronicle.fetch_associated_pipeline( + stream=stream_query + ) + assert associated_pipeline is not None + print(f"Associated pipeline: {associated_pipeline.get('name', 'N/A')}") + + # Test DISSOCIATE STREAMS + print(f"Dissociating streams from pipeline: {pipeline_id}") + dissociate_result = chronicle.dissociate_streams( + pipeline_id=pipeline_id, streams=streams + ) + assert dissociate_result is not None + print("Streams dissociated successfully") + + except APIError as e: + print(f"Stream operations test failed: {str(e)}") + pytest.fail(f"Stream operations test failed due to API error: {str(e)}") + + finally: + # Cleanup + if created_pipeline: + try: + print(f"Deleting pipeline: {pipeline_id}") + chronicle.delete_log_processing_pipeline(pipeline_id) + print("Pipeline deleted successfully") + except APIError as e: + print(f"Warning: Failed to delete test pipeline: {str(e)}") + + +@pytest.mark.integration +def test_fetch_sample_logs_by_streams(): + """Test fetching sample logs by streams.""" + client = SecOpsClient(service_account_info=SERVICE_ACCOUNT_JSON) + chronicle = client.chronicle(**CHRONICLE_CONFIG) + + # Generate unique display name + unique_id = str(uuid.uuid4())[:8] + display_name = f"Test Sample Logs Pipeline {unique_id}" + + # Pipeline configuration + pipeline_config = { + "displayName": display_name, + "description": "Pipeline for testing sample logs", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + pipeline_id = None + try: + # Create the pipeline + print(f"Creating pipeline: {display_name}") + created_pipeline = chronicle.create_log_processing_pipeline( + pipeline=pipeline_config + ) + pipeline_id = created_pipeline["name"].split("/")[-1] + print(f"Created pipeline with ID: {pipeline_id}") + + # Associate CS_EDR log type with the pipeline + streams = [{"logType": "CS_EDR"}] + print(f"Associating streams: {streams}") + chronicle.associate_streams(pipeline_id=pipeline_id, streams=streams) + print("Streams associated successfully") + + # Wait briefly for association to propagate + time.sleep(10) + + # Fetch sample logs for the log type + print(f"Fetching sample logs for streams: {streams}") + result = chronicle.fetch_sample_logs_by_streams( + streams=streams, sample_logs_count=5 + ) + + assert result is not None + if not result or ("logs" not in result and "sampleLogs" not in result): + pytest.skip("No sample logs found for CS_EDR log type") + + logs = result.get("logs", result.get("sampleLogs", [])) + print(f"Fetched sample logs: {len(logs)} logs") + assert len(logs) > 0, "Expected at least one sample log" + + except APIError as e: + print(f"Fetch sample logs test failed: {str(e)}") + pytest.skip( + f"Fetch sample logs test skipped due to API error: {str(e)}" + ) + + finally: + # Cleanup: Delete the created pipeline + if pipeline_id: + try: + print(f"Deleting pipeline: {pipeline_id}") + chronicle.delete_log_processing_pipeline(pipeline_id) + print("Test pipeline deleted successfully") + except APIError as e: + print(f"Warning: Failed to delete test pipeline: {str(e)}") + + +@pytest.mark.integration +def test_pipeline_testing_functionality(): + """Test the test_pipeline functionality.""" + client = SecOpsClient(service_account_info=SERVICE_ACCOUNT_JSON) + chronicle = client.chronicle(**CHRONICLE_CONFIG) + + # Pipeline configuration for testing + pipeline_config = { + "displayName": "Test Pipeline Config", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + # Create test input logs + current_time = datetime.now(timezone.utc).isoformat() + log_data_1 = base64.b64encode(b"Sample log line 1").decode("utf-8") + log_data_2 = base64.b64encode(b"Sample log line 2").decode("utf-8") + + input_logs = [ + { + "data": log_data_1, + "logEntryTime": current_time, + "collectionTime": current_time, + }, + { + "data": log_data_2, + "logEntryTime": current_time, + "collectionTime": current_time, + }, + ] + + try: + print("Testing pipeline with input logs") + print(f"Pipeline: {pipeline_config['displayName']}") + print(f"Number of input logs: {len(input_logs)}") + + result = chronicle.test_pipeline( + pipeline=pipeline_config, input_logs=input_logs + ) + + assert result is not None + assert "logs" in result + + processed_logs = result.get("logs", []) + print(f"Pipeline test completed: {len(processed_logs)} logs processed") + + if processed_logs: + print("First processed log data present") + assert len(processed_logs) > 0 + + except APIError as e: + print(f"Test pipeline functionality failed: {str(e)}") + pytest.skip( + f"Test pipeline functionality skipped due to API error: {str(e)}" + ) + + +@pytest.mark.integration +def test_list_pipelines_with_pagination(): + """Test listing pipelines with pagination.""" + client = SecOpsClient(service_account_info=SERVICE_ACCOUNT_JSON) + chronicle = client.chronicle(**CHRONICLE_CONFIG) + + try: + # Get first page with small page size + print("Fetching first page of pipelines") + first_page = chronicle.list_log_processing_pipelines(page_size=1) + + assert first_page is not None + assert "logProcessingPipelines" in first_page + pipelines = first_page.get("logProcessingPipelines", []) + print(f"First page: {len(pipelines)} pipelines") + + # If there's a next page token, fetch next page + next_token = first_page.get("nextPageToken") + if next_token: + print("Fetching second page of pipelines") + second_page = chronicle.list_log_processing_pipelines( + page_size=1, page_token=next_token + ) + assert second_page is not None + pipelines_2 = second_page.get("logProcessingPipelines", []) + print(f"Second page: {len(pipelines_2)} pipelines") + + # Verify pagination works correctly + if pipelines and pipelines_2: + assert pipelines[0].get("name") != pipelines_2[0].get("name") + print("Pagination verified successfully") + else: + print("No second page available for pagination test") + + except APIError as e: + print(f"List pipelines pagination test failed: {str(e)}") + pytest.skip( + f"List pipelines pagination test skipped due to API error: {str(e)}" + ) diff --git a/tests/cli/test_log_processing_integration.py b/tests/cli/test_log_processing_integration.py new file mode 100644 index 00000000..1493ef4b --- /dev/null +++ b/tests/cli/test_log_processing_integration.py @@ -0,0 +1,655 @@ +"""Integration tests for the SecOps CLI log processing commands.""" + +import base64 +import json +import os +import subprocess +import time +import uuid +from datetime import datetime, timezone + +import pytest + +from tests.config import CHRONICLE_CONFIG + + +@pytest.mark.integration +def test_cli_log_processing_crud_workflow(cli_env, common_args, tmp_path): + """Test the log processing pipeline create, update, and delete.""" + unique_id = str(uuid.uuid4())[:8] + display_name = f"Test Pipeline {unique_id}" + + pipeline_config = { + "displayName": display_name, + "description": "CLI integration test pipeline", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + config_file = tmp_path / "pipeline_config.json" + config_file.write_text(json.dumps(pipeline_config)) + + pipeline_id = None + + try: + create_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "create", + "--pipeline", + str(config_file), + ] + ) + + create_result = subprocess.run( + create_cmd, env=cli_env, capture_output=True, text=True + ) + + assert create_result.returncode == 0 + + pipeline_data = json.loads(create_result.stdout) + assert "name" in pipeline_data + pipeline_id = pipeline_data["name"].split("/")[-1] + print(f"Created pipeline with ID: {pipeline_id}") + + time.sleep(2) + + updated_display_name = f"Updated Pipeline {unique_id}" + updated_config = { + "name": pipeline_data.get("name"), + "displayName": updated_display_name, + "description": "Updated CLI integration test pipeline", + "processors": pipeline_data.get("processors"), + } + + updated_config_file = tmp_path / "updated_pipeline_config.json" + updated_config_file.write_text(json.dumps(updated_config)) + + update_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "update", + "--id", + pipeline_id, + "--pipeline", + str(updated_config_file), + "--update-mask", + "displayName,description", + ] + ) + + update_result = subprocess.run( + update_cmd, env=cli_env, capture_output=True, text=True + ) + + assert update_result.returncode == 0 + + updated_pipeline = json.loads(update_result.stdout) + assert updated_pipeline["displayName"] == updated_display_name + print(f"Updated pipeline to: {updated_display_name}") + + finally: + if pipeline_id: + delete_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "delete", + "--id", + pipeline_id, + ] + ) + + delete_result = subprocess.run( + delete_cmd, env=cli_env, capture_output=True, text=True + ) + + if delete_result.returncode == 0: + print(f"Successfully deleted pipeline: {pipeline_id}") + else: + print(f"Failed to delete test pipeline: {delete_result.stderr}") + + +@pytest.mark.integration +def test_cli_log_processing_stream_operations(cli_env, common_args, tmp_path): + """Test stream association and dissociation commands.""" + unique_id = str(uuid.uuid4())[:8] + display_name = f"Stream Test Pipeline {unique_id}" + + pipeline_config = { + "displayName": display_name, + "description": "CLI test for stream operations", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + config_file = tmp_path / "pipeline_config.json" + config_file.write_text(json.dumps(pipeline_config)) + + pipeline_id = None + + try: + create_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "create", + "--pipeline", + str(config_file), + ] + ) + + create_result = subprocess.run( + create_cmd, env=cli_env, capture_output=True, text=True + ) + + assert create_result.returncode == 0 + + pipeline_data = json.loads(create_result.stdout) + pipeline_id = pipeline_data["name"].split("/")[-1] + print(f"Created pipeline with ID: {pipeline_id}") + + time.sleep(2) + + streams = [{"logType": "WINEVTLOG"}] + streams_file = tmp_path / "streams.json" + streams_file.write_text(json.dumps(streams)) + + associate_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "associate-streams", + "--id", + pipeline_id, + "--streams", + str(streams_file), + ] + ) + + associate_result = subprocess.run( + associate_cmd, env=cli_env, capture_output=True, text=True + ) + + assert associate_result.returncode == 0 + print("Streams associated successfully") + + time.sleep(2) + + dissociate_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "dissociate-streams", + "--id", + pipeline_id, + "--streams", + str(streams_file), + ] + ) + + dissociate_result = subprocess.run( + dissociate_cmd, env=cli_env, capture_output=True, text=True + ) + + assert dissociate_result.returncode == 0 + print("Streams dissociated successfully") + + finally: + if pipeline_id: + delete_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "delete", + "--id", + pipeline_id, + ] + ) + + delete_result = subprocess.run( + delete_cmd, env=cli_env, capture_output=True, text=True + ) + + if delete_result.returncode == 0: + print(f"Successfully deleted pipeline: {pipeline_id}") + else: + print(f"Failed to delete test pipeline: {delete_result.stderr}") + + +@pytest.mark.integration +def test_cli_log_processing_fetch_associated(cli_env, common_args, tmp_path): + """Test fetch associated pipeline command.""" + unique_id = str(uuid.uuid4())[:8] + display_name = f"Fetch Test Pipeline {unique_id}" + + pipeline_config = { + "displayName": display_name, + "description": "CLI test for fetch associated", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + config_file = tmp_path / "pipeline_config.json" + config_file.write_text(json.dumps(pipeline_config)) + + pipeline_id = None + + try: + create_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "create", + "--pipeline", + str(config_file), + ] + ) + + create_result = subprocess.run( + create_cmd, env=cli_env, capture_output=True, text=True + ) + + assert create_result.returncode == 0 + + pipeline_data = json.loads(create_result.stdout) + pipeline_id = pipeline_data["name"].split("/")[-1] + print(f"Created pipeline with ID: {pipeline_id}") + + time.sleep(2) + + streams = [{"logType": "WINEVTLOG"}] + streams_file = tmp_path / "streams.json" + streams_file.write_text(json.dumps(streams)) + + associate_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "associate-streams", + "--id", + pipeline_id, + "--streams", + str(streams_file), + ] + ) + + associate_result = subprocess.run( + associate_cmd, env=cli_env, capture_output=True, text=True + ) + + assert associate_result.returncode == 0 + print("Streams associated successfully") + + time.sleep(2) + + stream_query = {"logType": "WINEVTLOG"} + stream_file = tmp_path / "stream_query.json" + stream_file.write_text(json.dumps(stream_query)) + + fetch_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "fetch-associated", + "--stream", + str(stream_file), + ] + ) + + fetch_result = subprocess.run( + fetch_cmd, env=cli_env, capture_output=True, text=True + ) + + assert fetch_result.returncode == 0 + + associated_pipeline = json.loads(fetch_result.stdout) + assert "name" in associated_pipeline + print(f"Fetched associated pipeline: {associated_pipeline['name']}") + + finally: + if pipeline_id: + delete_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "delete", + "--id", + pipeline_id, + ] + ) + + delete_result = subprocess.run( + delete_cmd, env=cli_env, capture_output=True, text=True + ) + + if delete_result.returncode == 0: + print(f"Successfully deleted pipeline: {pipeline_id}") + else: + print(f"Failed to delete test pipeline: {delete_result.stderr}") + + +@pytest.mark.integration +def test_cli_log_processing_fetch_sample_logs(cli_env, common_args, tmp_path): + """Test fetch sample logs command.""" + # Generate unique display name + unique_id = str(uuid.uuid4())[:8] + display_name = f"CLI Test Sample Logs Pipeline {unique_id}" + + # Pipeline configuration + pipeline_config = { + "displayName": display_name, + "description": "CLI test pipeline for sample logs", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + pipeline_config_file = tmp_path / "pipeline_config.json" + pipeline_config_file.write_text(json.dumps(pipeline_config)) + + pipeline_id = None + try: + # Create pipeline + create_cmd = ( + ["secops"] + + common_args + + [ + "log-processing", + "create", + "--pipeline", + str(pipeline_config_file), + ] + ) + + print(f"Creating pipeline: {display_name}") + create_result = subprocess.run( + create_cmd, env=cli_env, capture_output=True, text=True + ) + + if create_result.returncode != 0: + pytest.skip(f"Failed to create pipeline: {create_result.stderr}") + + created_pipeline = json.loads(create_result.stdout) + pipeline_id = created_pipeline["name"].split("/")[-1] + print(f"Created pipeline with ID: {pipeline_id}") + + # Associate CS_EDR log type with pipeline + streams = [{"logType": "CS_EDR"}] + streams_file = tmp_path / "streams.json" + streams_file.write_text(json.dumps(streams)) + + associate_cmd = ( + ["secops"] + + common_args + + [ + "log-processing", + "associate-streams", + "--id", + pipeline_id, + "--streams", + str(streams_file), + ] + ) + + print(f"Associating streams: {streams}") + associate_result = subprocess.run( + associate_cmd, env=cli_env, capture_output=True, text=True + ) + + if associate_result.returncode != 0: + pytest.skip( + f"Failed to associate streams: {associate_result.stderr}" + ) + + print("Streams associated successfully") + + # Wait for association to propagate + time.sleep(10) + + # Fetch sample logs + fetch_cmd = ( + ["secops"] + + common_args + + [ + "log-processing", + "fetch-sample-logs", + "--streams", + str(streams_file), + "--count", + "5", + ] + ) + + print(f"Fetching sample logs for streams: {streams}") + result = subprocess.run( + fetch_cmd, env=cli_env, capture_output=True, text=True + ) + + if result.returncode == 0: + output = json.loads(result.stdout) + if not output or ( + "logs" not in output and "sampleLogs" not in output + ): + pytest.skip("No sample logs available for CS_EDR log type") + + logs = output.get("logs", output.get("sampleLogs", [])) + print(f"Fetched sample logs: {len(logs)} logs") + assert len(logs) > 0, "Expected at least one sample log" + else: + pytest.skip(f"Fetch sample logs command skipped: {result.stderr}") + + finally: + # Cleanup: Delete the created pipeline + if pipeline_id: + delete_cmd = ( + ["secops"] + + common_args + + ["log-processing", "delete", "--id", pipeline_id] + ) + + print(f"Deleting pipeline: {pipeline_id}") + delete_result = subprocess.run( + delete_cmd, env=cli_env, capture_output=True, text=True + ) + + if delete_result.returncode == 0: + print("Test pipeline deleted successfully") + else: + print( + f"Warning: Failed to delete test pipeline: " + f"{delete_result.stderr}" + ) + + +@pytest.mark.integration +def test_cli_log_processing_test_pipeline(cli_env, common_args, tmp_path): + """Test the test pipeline command.""" + pipeline_config = { + "displayName": "Test Pipeline Config", + "processors": [ + { + "filterProcessor": { + "include": { + "logMatchType": "REGEXP", + "logBodies": [".*"], + }, + "errorMode": "IGNORE", + } + } + ], + } + + current_time = datetime.now(timezone.utc).isoformat() + log_data_1 = base64.b64encode(b"Sample log line 1").decode("utf-8") + log_data_2 = base64.b64encode(b"Sample log line 2").decode("utf-8") + + input_logs = [ + { + "data": log_data_1, + "logEntryTime": current_time, + "collectionTime": current_time, + }, + { + "data": log_data_2, + "logEntryTime": current_time, + "collectionTime": current_time, + }, + ] + + config_file = tmp_path / "pipeline_config.json" + config_file.write_text(json.dumps(pipeline_config)) + + logs_file = tmp_path / "input_logs.json" + logs_file.write_text(json.dumps(input_logs)) + + cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "test", + "--pipeline", + str(config_file), + "--input-logs", + str(logs_file), + ] + ) + + result = subprocess.run(cmd, env=cli_env, capture_output=True, text=True) + + if result.returncode == 0: + output = json.loads(result.stdout) + assert "logs" in output + print( + f"Pipeline test completed: {len(output.get('logs', []))} processed" + ) + else: + pytest.skip(f"Test pipeline command skipped: {result.stderr}") + + +@pytest.mark.integration +def test_cli_log_processing_list_with_pagination(cli_env, common_args): + """Test listing pipelines with pagination.""" + cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "list", + "--page-size", + "1", + ] + ) + + result = subprocess.run(cmd, env=cli_env, capture_output=True, text=True) + + assert result.returncode == 0 + + output = json.loads(result.stdout) + assert "logProcessingPipelines" in output + pipelines = output.get("logProcessingPipelines", []) + print(f"First page: {len(pipelines)} pipelines") + + if "nextPageToken" in output: + next_page_token = output["nextPageToken"] + + next_cmd = ( + [ + "secops", + ] + + common_args + + [ + "log-processing", + "list", + "--page-size", + "1", + "--page-token", + next_page_token, + ] + ) + + next_result = subprocess.run( + next_cmd, env=cli_env, capture_output=True, text=True + ) + + assert next_result.returncode == 0 + + next_output = json.loads(next_result.stdout) + assert "logProcessingPipelines" in next_output + next_pipelines = next_output.get("logProcessingPipelines", []) + print(f"Second page: {len(next_pipelines)} pipelines") From d5c0d99383f8cd89bfce15e40cc674c803df7973 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Tue, 16 Dec 2025 17:37:20 +0530 Subject: [PATCH 39/48] chore: updated doc string --- src/secops/chronicle/client.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 01c8aa5a..1ee62ccd 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -1468,7 +1468,11 @@ def create_log_processing_pipeline( """Creates a new log processing pipeline. Args: - pipeline: Pipeline configuration dict. + pipeline: Pipeline configuration dict containing: + - displayName: Display name for the pipeline + - description: Optional description + - processors: List of processor configurations + - customMetadata: Optional custom metadata list pipeline_id: Optional ID for the pipeline. Returns: @@ -1489,7 +1493,11 @@ def patch_log_processing_pipeline( Args: pipeline_id: ID of the pipeline to update. - pipeline: Pipeline configuration with fields to update. + pipeline: Pipeline configuration with fields to update containing: + - displayName: Display name for the pipeline + - description: Optional description + - processors: List of processor configurations + - customMetadata: Optional custom metadata list update_mask: Optional comma-separated list of fields. Returns: From 1d0400f3a0284ace52b5211be0a416a2ce94d8e1 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Wed, 17 Dec 2025 12:10:35 +0530 Subject: [PATCH 40/48] chore: renamed patch to update. added changelog. updated project version. refactored and formatting. --- CHANGELOG.md | 14 ++++++++++ README.md | 2 +- examples/log_processing_pipelines_example.py | 2 +- pyproject.toml | 2 +- src/secops/chronicle/__init__.py | 4 +-- src/secops/chronicle/client.py | 6 ++--- .../chronicle/log_processing_pipelines.py | 26 +++++++++++-------- src/secops/cli/commands/log_processing.py | 2 +- src/secops/cli/utils/input_utils.py | 4 +-- .../chronicle/test_log_processing_pipeline.py | 26 +++++++++---------- ...est_log_processing_pipeline_integration.py | 2 +- 11 files changed, 54 insertions(+), 36 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d985ea5..90bfd2f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,20 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.29.0] - 2025-12-17 +### Added +- Support for following log/data processing pipeline methods: + - List pipelines + - Create pipeline + - Get pipeline details + - Update pipeline + - Delete pipeline + - Associate stream to pipeline + - Dissociate stream from pipeline + - Fetch associated pipeline using stream + - Fetch sample logs by stream + - Test pipeline + ## [0.28.1] - 2025-12-11 ### Updated - CLI to show help when required sub-command/argument not provided. diff --git a/README.md b/README.md index ec5ac8f7..160d3a5a 100644 --- a/README.md +++ b/README.md @@ -609,7 +609,7 @@ updated_config = { } # Patch with update mask -updated_pipeline = chronicle.patch_log_processing_pipeline( +updated_pipeline = chronicle.update_log_processing_pipeline( pipeline_id=pipeline_id, pipeline=updated_config, update_mask="description" diff --git a/examples/log_processing_pipelines_example.py b/examples/log_processing_pipelines_example.py index 97a7c5f4..4eff92ce 100644 --- a/examples/log_processing_pipelines_example.py +++ b/examples/log_processing_pipelines_example.py @@ -182,7 +182,7 @@ def example_update_pipeline(chronicle): } print("\nUpdating pipeline...") - updated_pipeline = chronicle.patch_log_processing_pipeline( + updated_pipeline = chronicle.update_log_processing_pipeline( pipeline_id=pipeline_id, pipeline=updated_pipeline_config, update_mask="displayName,description", diff --git a/pyproject.toml b/pyproject.toml index 4388a25d..897997ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "secops" -version = "0.28.1" +version = "0.29.0" description = "Python SDK for wrapping the Google SecOps API for common use cases" readme = "README.md" requires-python = ">=3.10" diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index b8fe7dc4..e8e82d6c 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -87,7 +87,7 @@ fetch_sample_logs_by_streams, get_log_processing_pipeline, list_log_processing_pipelines, - patch_log_processing_pipeline, + update_log_processing_pipeline, test_pipeline, ) from secops.chronicle.models import ( @@ -326,7 +326,7 @@ "list_log_processing_pipelines", "get_log_processing_pipeline", "create_log_processing_pipeline", - "patch_log_processing_pipeline", + "update_log_processing_pipeline", "delete_log_processing_pipeline", "associate_streams", "dissociate_streams", diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 1ee62ccd..22f07549 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -139,7 +139,7 @@ list_log_processing_pipelines as _list_log_processing_pipelines, ) from secops.chronicle.log_processing_pipelines import ( - patch_log_processing_pipeline as _patch_log_processing_pipeline, + update_log_processing_pipeline as _update_log_processing_pipeline, ) from secops.chronicle.log_processing_pipelines import ( test_pipeline as _test_pipeline, @@ -1483,7 +1483,7 @@ def create_log_processing_pipeline( """ return _create_log_processing_pipeline(self, pipeline, pipeline_id) - def patch_log_processing_pipeline( + def update_log_processing_pipeline( self, pipeline_id: str, pipeline: dict[str, Any], @@ -1506,7 +1506,7 @@ def patch_log_processing_pipeline( Raises: APIError: If the API request fails. """ - return _patch_log_processing_pipeline( + return _update_log_processing_pipeline( self, pipeline_id, pipeline, update_mask ) diff --git a/src/secops/chronicle/log_processing_pipelines.py b/src/secops/chronicle/log_processing_pipelines.py index da33818a..3d2779f7 100644 --- a/src/secops/chronicle/log_processing_pipelines.py +++ b/src/secops/chronicle/log_processing_pipelines.py @@ -20,7 +20,7 @@ def list_log_processing_pipelines( - client, + client: "ChronicleClient", page_size: int | None = None, page_token: str | None = None, filter_expr: str | None = None, @@ -62,7 +62,9 @@ def list_log_processing_pipelines( return response.json() -def get_log_processing_pipeline(client, pipeline_id: str) -> dict[str, Any]: +def get_log_processing_pipeline( + client: "ChronicleClient", pipeline_id: str +) -> dict[str, Any]: """Gets a log processing pipeline by ID. Args: @@ -93,7 +95,7 @@ def get_log_processing_pipeline(client, pipeline_id: str) -> dict[str, Any]: def create_log_processing_pipeline( - client, + client: "ChronicleClient", pipeline: dict[str, Any], pipeline_id: str | None = None, ) -> dict[str, Any]: @@ -130,8 +132,8 @@ def create_log_processing_pipeline( return response.json() -def patch_log_processing_pipeline( - client, +def update_log_processing_pipeline( + client: "ChronicleClient", pipeline_id: str, pipeline: dict[str, Any], update_mask: str | None = None, @@ -175,7 +177,7 @@ def patch_log_processing_pipeline( def delete_log_processing_pipeline( - client, pipeline_id: str, etag: str | None = None + client: "ChronicleClient", pipeline_id: str, etag: str | None = None ) -> dict[str, Any]: """Deletes a log processing pipeline. @@ -213,7 +215,7 @@ def delete_log_processing_pipeline( def associate_streams( - client, pipeline_id: str, streams: list[dict[str, Any]] + client: "ChronicleClient", pipeline_id: str, streams: list[dict[str, Any]] ) -> dict[str, Any]: """Associates streams with a log processing pipeline. @@ -247,7 +249,7 @@ def associate_streams( def dissociate_streams( - client, pipeline_id: str, streams: list[dict[str, Any]] + client: "ChronicleClient", pipeline_id: str, streams: list[dict[str, Any]] ) -> dict[str, Any]: """Dissociates streams from a log processing pipeline. @@ -281,7 +283,9 @@ def dissociate_streams( return response.json() -def fetch_associated_pipeline(client, stream: dict[str, Any]) -> dict[str, Any]: +def fetch_associated_pipeline( + client: "ChronicleClient", stream: dict[str, Any] +) -> dict[str, Any]: """Fetches the pipeline associated with a specific stream. Args: @@ -314,7 +318,7 @@ def fetch_associated_pipeline(client, stream: dict[str, Any]) -> dict[str, Any]: def fetch_sample_logs_by_streams( - client, + client: "ChronicleClient", streams: list[dict[str, Any]], sample_logs_count: int | None = None, ) -> dict[str, Any]: @@ -355,7 +359,7 @@ def fetch_sample_logs_by_streams( def test_pipeline( - client, + client: "ChronicleClient", pipeline: dict[str, Any], input_logs: list[dict[str, Any]], ) -> dict[str, Any]: diff --git a/src/secops/cli/commands/log_processing.py b/src/secops/cli/commands/log_processing.py index 25b5ca7d..489c0710 100644 --- a/src/secops/cli/commands/log_processing.py +++ b/src/secops/cli/commands/log_processing.py @@ -226,7 +226,7 @@ def handle_update_command(args, chronicle): print("Error: pipeline must be a JSON object", file=sys.stderr) sys.exit(1) - result = chronicle.patch_log_processing_pipeline( + result = chronicle.update_log_processing_pipeline( pipeline_id=args.id, pipeline=pipeline_config, update_mask=args.update_mask, diff --git a/src/secops/cli/utils/input_utils.py b/src/secops/cli/utils/input_utils.py index f810c72a..8e666a6a 100644 --- a/src/secops/cli/utils/input_utils.py +++ b/src/secops/cli/utils/input_utils.py @@ -40,7 +40,7 @@ def load_json_or_file(value: str) -> Any: except json.JSONDecodeError as e: print(f"Error parsing JSON from file: {e}", file=sys.stderr) sys.exit(1) - except Exception as e: + except Exception as e: # pylint: disable=broad-exception-caught print(f"Error reading file: {e}", file=sys.stderr) sys.exit(1) @@ -72,7 +72,7 @@ def load_string_or_file(value: str) -> str: if file_path.exists() and file_path.is_file(): with open(file_path, encoding="utf-8") as f: return f.read() - except Exception as e: + except Exception as e: # pylint: disable=broad-exception-caught print(f"Error reading file: {e}", file=sys.stderr) sys.exit(1) diff --git a/tests/chronicle/test_log_processing_pipeline.py b/tests/chronicle/test_log_processing_pipeline.py index 8e3f10f4..15a427ae 100644 --- a/tests/chronicle/test_log_processing_pipeline.py +++ b/tests/chronicle/test_log_processing_pipeline.py @@ -22,7 +22,7 @@ list_log_processing_pipelines, get_log_processing_pipeline, create_log_processing_pipeline, - patch_log_processing_pipeline, + update_log_processing_pipeline, delete_log_processing_pipeline, associate_streams, dissociate_streams, @@ -225,8 +225,8 @@ def test_create_log_processing_pipeline_error( assert "Failed to create log processing pipeline" in str(exc_info.value) -def test_patch_log_processing_pipeline(chronicle_client, mock_response): - """Test patch_log_processing_pipeline function.""" +def test_update_log_processing_pipeline(chronicle_client, mock_response): + """Test update_log_processing_pipeline function.""" pipeline_id = "pipeline_12345" pipeline_config = { "name": "projects/test-project/locations/us/instances/test-customer/logProcessingPipelines/pipeline_12345", @@ -237,7 +237,7 @@ def test_patch_log_processing_pipeline(chronicle_client, mock_response): with patch.object( chronicle_client.session, "patch", return_value=mock_response ) as mock_patch: - result = patch_log_processing_pipeline( + result = update_log_processing_pipeline( chronicle_client, pipeline_id, pipeline_config ) @@ -249,10 +249,10 @@ def test_patch_log_processing_pipeline(chronicle_client, mock_response): assert result == mock_response.json.return_value -def test_patch_log_processing_pipeline_with_update_mask( +def test_update_log_processing_pipeline_with_update_mask( chronicle_client, mock_response ): - """Test patch_log_processing_pipeline with update mask.""" + """Test update_log_processing_pipeline with update mask.""" pipeline_id = "pipeline_12345" pipeline_config = { "name": "projects/test-project/locations/us/instances/test-customer/logProcessingPipelines/pipeline_12345", @@ -263,7 +263,7 @@ def test_patch_log_processing_pipeline_with_update_mask( with patch.object( chronicle_client.session, "patch", return_value=mock_response ) as mock_patch: - result = patch_log_processing_pipeline( + result = update_log_processing_pipeline( chronicle_client, pipeline_id, pipeline_config, @@ -278,10 +278,10 @@ def test_patch_log_processing_pipeline_with_update_mask( assert result == mock_response.json.return_value -def test_patch_log_processing_pipeline_with_full_name( +def test_update_log_processing_pipeline_with_full_name( chronicle_client, mock_response ): - """Test patch_log_processing_pipeline with full resource name.""" + """Test update_log_processing_pipeline with full resource name.""" full_name = "projects/test-project/locations/us/instances/test-customer/logProcessingPipelines/pipeline_12345" pipeline_config = { "name": full_name, @@ -291,7 +291,7 @@ def test_patch_log_processing_pipeline_with_full_name( with patch.object( chronicle_client.session, "patch", return_value=mock_response ) as mock_patch: - result = patch_log_processing_pipeline( + result = update_log_processing_pipeline( chronicle_client, full_name, pipeline_config ) @@ -303,10 +303,10 @@ def test_patch_log_processing_pipeline_with_full_name( assert result == mock_response.json.return_value -def test_patch_log_processing_pipeline_error( +def test_update_log_processing_pipeline_error( chronicle_client, mock_error_response ): - """Test patch_log_processing_pipeline with error response.""" + """Test update_log_processing_pipeline with error response.""" pipeline_id = "pipeline_12345" pipeline_config = {"displayName": "Updated Pipeline"} @@ -314,7 +314,7 @@ def test_patch_log_processing_pipeline_error( chronicle_client.session, "patch", return_value=mock_error_response ): with pytest.raises(APIError) as exc_info: - patch_log_processing_pipeline( + update_log_processing_pipeline( chronicle_client, pipeline_id, pipeline_config ) diff --git a/tests/chronicle/test_log_processing_pipeline_integration.py b/tests/chronicle/test_log_processing_pipeline_integration.py index ac49ca19..8781b9a6 100644 --- a/tests/chronicle/test_log_processing_pipeline_integration.py +++ b/tests/chronicle/test_log_processing_pipeline_integration.py @@ -99,7 +99,7 @@ def test_log_processing_pipeline_crud_workflow(): "processors": created_pipeline.get("processors"), } print(f"Updating pipeline: {pipeline_id}") - updated_pipeline = chronicle.patch_log_processing_pipeline( + updated_pipeline = chronicle.update_log_processing_pipeline( pipeline_id=pipeline_id, pipeline=updated_config, update_mask="displayName,description", From b1c973fa72b4a910cea6329e0b39df5c03a2d3b7 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Wed, 17 Dec 2025 12:19:10 +0530 Subject: [PATCH 41/48] chore: added API mapping --- api_module_mapping.md | 583 +++++++++++++++++++++--------------------- 1 file changed, 296 insertions(+), 287 deletions(-) diff --git a/api_module_mapping.md b/api_module_mapping.md index d2c7681b..1986387d 100644 --- a/api_module_mapping.md +++ b/api_module_mapping.md @@ -1,7 +1,6 @@ # SecOps API Endpoint and SDK Wrapper Module Mapping -Following shows mapping between SecOps [REST Resource](https://cloud.google.com/chronicle/docs/reference/rest) and SDK -wrapper module and its respective CLI command (if available). +Following shows mapping between SecOps [REST Resource](https://cloud.google.com/chronicle/docs/reference/rest) and SDK wrapper module and its respective CLI command (if available). **Note:** All the REST resources mentioned have suffix `projects.locations.instances`. @@ -95,288 +94,298 @@ wrapper module and its respective CLI command (if available). | curatedRuleSetCategories.list | v1alpha | chronicle.rule_set.list_curated_rule_set_categories | secops curated-rule rule-set-category list | | curatedRules.get | v1alpha | chronicle.rule_set.get_curated_rule
chronicle.rule_set.get_curated_rule_by_name | secops curated-rule rule get | | curatedRules.list | v1alpha | chronicle.rule_set.list_curated_rules | secops curated-rule rule list | -| dashboardCharts.batchGet | v1alpha | | | -| dashboardCharts.get | v1alpha | chronicle.dashboard.get_chart | secops dashboard get-chart | -| dashboardQueries.execute | v1alpha | chronicle.dashboard_query.execute_query | secops dashboard-query execute | -| dashboardQueries.get | v1alpha | chronicle.dashboard_query.get_execute_query | secops dashboard-query get | -| dashboards.copy | v1alpha | | | -| dashboards.create | v1alpha | | | -| dashboards.delete | v1alpha | | | -| dashboards.get | v1alpha | | | -| dashboards.list | v1alpha | | | -| dataAccessLabels.create | v1alpha | | | -| dataAccessLabels.delete | v1alpha | | | -| dataAccessLabels.get | v1alpha | | | -| dataAccessLabels.list | v1alpha | | | -| dataAccessLabels.patch | v1alpha | | | -| dataAccessScopes.create | v1alpha | | | -| dataAccessScopes.delete | v1alpha | | | -| dataAccessScopes.get | v1alpha | | | -| dataAccessScopes.list | v1alpha | | | -| dataAccessScopes.patch | v1alpha | | | -| dataExports.cancel | v1alpha | chronicle.data_export.cancel_data_export | secops export cancel | -| dataExports.create | v1alpha | chronicle.data_export.create_data_export | secops export create | -| dataExports.fetchavailablelogtypes | v1alpha | chronicle.data_export.fetch_available_log_types | secops export log-types | -| dataExports.get | v1alpha | chronicle.data_export.get_data_export | secops export status | -| dataExports.list | v1alpha | chronicle.data_export.list_data_export | secops export list | -| dataExports.patch | v1alpha | chronicle.data_export.update_data_export | secops export update | -| dataTableOperationErrors.get | v1alpha | | | -| dataTables.create | v1alpha | chronicle.data_table.create_data_table | secops data-table create | -| dataTables.dataTableRows.bulkCreate | v1alpha | chronicle.data_table.create_data_table_rows | secops data-table add-rows | -| dataTables.dataTableRows.bulkCreateAsync | v1alpha | | | -| dataTables.dataTableRows.bulkGet | v1alpha | | | -| dataTables.dataTableRows.bulkReplace | v1alpha | chronicle.data_table.replace_data_table_rows | secops data-table replace-rows | -| dataTables.dataTableRows.bulkReplaceAsync | v1alpha | | | -| dataTables.dataTableRows.bulkUpdate | v1alpha | chronicle.data_table.update_data_table_rows | secops data-table update-rows | -| dataTables.dataTableRows.bulkUpdateAsync | v1alpha | | | -| dataTables.dataTableRows.create | v1alpha | | | -| dataTables.dataTableRows.delete | v1alpha | chronicle.data_table.delete_data_table_rows | secops data-table delete-rows | -| dataTables.dataTableRows.get | v1alpha | | | -| dataTables.dataTableRows.list | v1alpha | chronicle.data_table.list_data_table_rows | secops data-table list-rows | -| dataTables.dataTableRows.patch | v1alpha | | | -| dataTables.delete | v1alpha | chronicle.data_table.delete_data_table | secops data-table delete | -| dataTables.get | v1alpha | chronicle.data_table.get_data_table | secops data-table get | -| dataTables.list | v1alpha | chronicle.data_table.list_data_tables | secops data-table list | -| dataTables.patch | v1alpha | | | -| dataTables.upload | v1alpha | | | -| dataTaps.create | v1alpha | | | -| dataTaps.delete | v1alpha | | | -| dataTaps.get | v1alpha | | | -| dataTaps.list | v1alpha | | | -| dataTaps.patch | v1alpha | | | -| delete | v1alpha | | | -| enrichmentControls.create | v1alpha | | | -| enrichmentControls.delete | v1alpha | | | -| enrichmentControls.get | v1alpha | | | -| enrichmentControls.list | v1alpha | | | -| entities.get | v1alpha | | | -| entities.import | v1alpha | chronicle.log_ingest.import_entities | secops entity import | -| entities.modifyEntityRiskScore | v1alpha | | | -| entities.queryEntityRiskScoreModifications | v1alpha | | | -| entityRiskScores.query | v1alpha | | | -| errorNotificationConfigs.create | v1alpha | | | -| errorNotificationConfigs.delete | v1alpha | | | -| errorNotificationConfigs.get | v1alpha | | | -| errorNotificationConfigs.list | v1alpha | | | -| errorNotificationConfigs.patch | v1alpha | | | -| events.batchGet | v1alpha | | | -| events.get | v1alpha | | | -| events.import | v1alpha | chronicle.log_ingest.ingest_udm | secops log ingest-udm | -| extractSyslog | v1alpha | | | -| federationGroups.create | v1alpha | | | -| federationGroups.delete | v1alpha | | | -| federationGroups.get | v1alpha | | | -| federationGroups.list | v1alpha | | | -| federationGroups.patch | v1alpha | | | -| feedPacks.get | v1alpha | | | -| feedPacks.list | v1alpha | | | -| feedServiceAccounts.fetchServiceAccountForCustomer | v1alpha | | | -| feedSourceTypeSchemas.list | v1alpha | | | -| feedSourceTypeSchemas.logTypeSchemas.list | v1alpha | | | -| feeds.create | v1alpha | chronicle.feeds.create_feed | secops feed create | -| feeds.delete | v1alpha | chronicle.feeds.delete_feed | secops feed delete | -| feeds.disable | v1alpha | chronicle.feeds.disable_feed | secops feed disable | -| feeds.enable | v1alpha | chronicle.feeds.enable_feed | secops feed enable | -| feeds.generateSecret | v1alpha | chronicle.feeds.generate_secret | secops feed secret | -| feeds.get | v1alpha | chronicle.feeds.get_feed | secops feed get | -| feeds.importPushLogs | v1alpha | | | -| feeds.list | v1alpha | chronicle.feeds.list_feeds | secops feed list | -| feeds.patch | v1alpha | chronicle.feeds.update_feed | secops feed update | -| feeds.scheduleTransfer | v1alpha | | | -| fetchFederationAccess | v1alpha | | | -| findEntity | v1alpha | | | -| findEntityAlerts | v1alpha | | | -| findRelatedEntities | v1alpha | | | -| findUdmFieldValues | v1alpha | | | -| findingsGraph.exploreNode | v1alpha | | | -| findingsGraph.initializeGraph | v1alpha | | | -| findingsRefinements.computeFindingsRefinementActivity | v1alpha | chronicle.rule_exclusion.compute_rule_exclusion_activity | secops rule-exclusion compute-activity | -| findingsRefinements.create | v1alpha | chronicle.rule_exclusion.create_rule_exclusion | secops rule-exclusion create | -| findingsRefinements.get | v1alpha | chronicle.rule_exclusion.get_rule_exclusion | secops rule-exclusion get | -| findingsRefinements.getDeployment | v1alpha | chronicle.rule_exclusion.get_rule_exclusion_deployment | secops rule-exclusion get-deployment | -| findingsRefinements.list | v1alpha | chronicle.rule_exclusion.list_rule_exclusions | secops rule-exclusion list | -| findingsRefinements.patch | v1alpha | chronicle.rule_exclusion.patch_rule_exclusion | secops rule-exclusion update | -| findingsRefinements.updateDeployment | v1alpha | chronicle.rule_exclusion.update_rule_exclusion_deployment | secops rule-exclusion update-deployment | -| forwarders.collectors.create | v1alpha | | | -| forwarders.collectors.delete | v1alpha | | | -| forwarders.collectors.get | v1alpha | | | -| forwarders.collectors.list | v1alpha | | | -| forwarders.collectors.patch | v1alpha | | | -| forwarders.create | v1alpha | chronicle.log_ingest.create_forwarder | secops forwarder create | -| forwarders.delete | v1alpha | chronicle.log_ingest.delete_forwarder | secops forwarder delete | -| forwarders.generateForwarderFiles | v1alpha | | | -| forwarders.get | v1alpha | chronicle.log_ingest.get_forwarder | secops forwarder get | -| forwarders.importStatsEvents | v1alpha | | | -| forwarders.list | v1alpha | chronicle.log_ingest.list_forwarder | secops forwarder list | -| forwarders.patch | v1alpha | chronicle.log_ingest.update_forwarder | secops forwarder update | -| generateCollectionAgentAuth | v1alpha | | | -| generateSoarAuthJwt | v1alpha | | | -| generateUdmKeyValueMappings | v1alpha | | | -| generateWorkspaceConnectionToken | v1alpha | | | -| get | v1alpha | | | -| getBigQueryExport | v1alpha | | | -| getMultitenantDirectory | v1alpha | | | -| getRiskConfig | v1alpha | | | -| ingestionLogLabels.get | v1alpha | | | -| ingestionLogLabels.list | v1alpha | | | -| ingestionLogNamespaces.get | v1alpha | | | -| ingestionLogNamespaces.list | v1alpha | | | -| iocs.batchGet | v1alpha | | | -| iocs.findFirstAndLastSeen | v1alpha | | | -| iocs.get | v1alpha | | | -| iocs.getIocState | v1alpha | | | -| iocs.searchCuratedDetectionsForIoc | v1alpha | | | -| iocs.updateIocState | v1alpha | | | -| legacy.legacyBatchGetCases | v1alpha | chronicle.case.get_cases_from_list | secops case | -| legacy.legacyBatchGetCollections | v1alpha | | | -| legacy.legacyCreateOrUpdateCase | v1alpha | | | -| legacy.legacyCreateSoarAlert | v1alpha | | | -| legacy.legacyFetchAlertsView | v1alpha | chronicle.alert.get_alerts | secops alert | -| legacy.legacyFetchUdmSearchCsv | v1alpha | chronicle.udm_search.fetch_udm_search_csv | secops search --csv | -| legacy.legacyFetchUdmSearchView | v1alpha | chronicle.udm_search.fetch_udm_search_view | secops udm-search-view | -| legacy.legacyFindAssetEvents | v1alpha | | | -| legacy.legacyFindRawLogs | v1alpha | | | -| legacy.legacyFindUdmEvents | v1alpha | | | -| legacy.legacyGetAlert | v1alpha | chronicle.rule_alert.get_alert | | -| legacy.legacyGetCuratedRulesTrends | v1alpha | | | -| legacy.legacyGetDetection | v1alpha | | | -| legacy.legacyGetEventForDetection | v1alpha | | | -| legacy.legacyGetRuleCounts | v1alpha | | | -| legacy.legacyGetRulesTrends | v1alpha | | | -| legacy.legacyListCases | v1alpha | chronicle.case.get_cases | secops case --ids | -| legacy.legacyRunTestRule | v1alpha | chronicle.rule.run_rule_test | secops rule validate | -| legacy.legacySearchArtifactEvents | v1alpha | | | -| legacy.legacySearchArtifactIoCDetails | v1alpha | | | -| legacy.legacySearchAssetEvents | v1alpha | | | -| legacy.legacySearchCuratedDetections | v1alpha | | | -| legacy.legacySearchCustomerStats | v1alpha | | | -| legacy.legacySearchDetections | v1alpha | chronicle.rule_detection.list_detections | | -| legacy.legacySearchDomainsRecentlyRegistered | v1alpha | | | -| legacy.legacySearchDomainsTimingStats | v1alpha | | | -| legacy.legacySearchEnterpriseWideAlerts | v1alpha | | | -| legacy.legacySearchEnterpriseWideIoCs | v1alpha | chronicle.ioc.list_iocs | secops iocs | -| legacy.legacySearchFindings | v1alpha | | | -| legacy.legacySearchIngestionStats | v1alpha | | | -| legacy.legacySearchIoCInsights | v1alpha | | | -| legacy.legacySearchRawLogs | v1alpha | | | -| legacy.legacySearchRuleDetectionCountBuckets | v1alpha | | | -| legacy.legacySearchRuleDetectionEvents | v1alpha | | | -| legacy.legacySearchRuleResults | v1alpha | | | -| legacy.legacySearchRulesAlerts | v1alpha | chronicle.rule_alert.search_rule_alerts | | -| legacy.legacySearchUserEvents | v1alpha | | | -| legacy.legacyStreamDetectionAlerts | v1alpha | | | -| legacy.legacyTestRuleStreaming | v1alpha | | | -| legacy.legacyUpdateAlert | v1alpha | chronicle.rule_alert.update_alert | | -| listAllFindingsRefinementDeployments | v1alpha | | | -| logTypes.create | v1alpha | | | -| logTypes.generateEventTypesSuggestions | v1alpha | | | -| logTypes.get | v1alpha | | | -| logTypes.getLogTypeSetting | v1alpha | | | -| logTypes.legacySubmitParserExtension | v1alpha | | | -| logTypes.list | v1alpha | | | -| logTypes.logs.export | v1alpha | | | -| logTypes.logs.get | v1alpha | | | -| logTypes.logs.import | v1alpha | chronicle.log_ingest.ingest_log | secops log ingest | -| logTypes.logs.list | v1alpha | | | -| logTypes.parserExtensions.activate | v1alpha | chronicle.parser_extension.activate_parser_extension | secops parser-extension activate | -| logTypes.parserExtensions.create | v1alpha | chronicle.parser_extension.create_parser_extension | secops parser-extension create | -| logTypes.parserExtensions.delete | v1alpha | chronicle.parser_extension.delete_parser_extension | secops parser-extension delete | -| logTypes.parserExtensions.extensionValidationReports.get | v1alpha | | | -| logTypes.parserExtensions.extensionValidationReports.list | v1alpha | | | -| logTypes.parserExtensions.extensionValidationReports.validationErrors.list | v1alpha | | | -| logTypes.parserExtensions.get | v1alpha | chronicle.parser_extension.get_parser_extension | secops parser-extension get | -| logTypes.parserExtensions.list | v1alpha | chronicle.parser_extension.list_parser_extensions | secops parser-extension list | -| logTypes.parserExtensions.validationReports.get | v1alpha | | | -| logTypes.parserExtensions.validationReports.parsingErrors.list | v1alpha | | | -| logTypes.parsers.activate | v1alpha | chronicle.parser.activate_parser | secops parser activate | -| logTypes.parsers.activateReleaseCandidateParser | v1alpha | chronicle.parser.activate_release_candidate | secops parser activate-rc | -| logTypes.parsers.copy | v1alpha | chronicle.parser.copy_parser | secops parser copy | -| logTypes.parsers.create | v1alpha | chronicle.parser.create_parser | secops parser create | -| logTypes.parsers.deactivate | v1alpha | chronicle.parser.deactivate_parser | secops parser deactivate | -| logTypes.parsers.delete | v1alpha | chronicle.parser.delete_parser | secops parser delete | -| logTypes.parsers.get | v1alpha | chronicle.parser.get_parser | secops parser get | -| logTypes.parsers.list | v1alpha | chronicle.parser.list_parsers | secops parser list | -| logTypes.parsers.validationReports.get | v1alpha | | | -| logTypes.parsers.validationReports.parsingErrors.list | v1alpha | | | -| logTypes.patch | v1alpha | | | -| logTypes.runParser | v1alpha | chronicle.parser.run_parser | secops parser run | -| logTypes.updateLogTypeSetting | v1alpha | | | -| logs.classify | v1alpha | | | -| nativeDashboards.addChart | v1alpha | chronicle.dashboard.add_chart | secops dashboard add-chart | -| nativeDashboards.create | v1alpha | chronicle.dashboard.create_dashboard | secops dashboard create | -| nativeDashboards.delete | v1alpha | chronicle.dashboard.delete_dashboard | secops dashboard delete | -| nativeDashboards.duplicate | v1alpha | chronicle.dashboard.duplicate_dashboard | secops dashboard duplicate | -| nativeDashboards.duplicateChart | v1alpha | | | -| nativeDashboards.editChart | v1alpha | chronicle.dashboard.edit_chart | secops dashboard edit-chart | -| nativeDashboards.export | v1alpha | chronicle.dashboard.export_dashboard | secops dashboard export | -| nativeDashboards.get | v1alpha | chronicle.dashboard.get_dashboard | secops dashboard get | -| nativeDashboards.import | v1alpha | chronicle.dashboard.import_dashboard | secops dashboard import | -| nativeDashboards.list | v1alpha | chronicle.dashboard.list_dashboards | secops dashboard list | -| nativeDashboards.patch | v1alpha | chronicle.dashboard.update_dashboard | secops dashboard update | -| nativeDashboards.removeChart | v1alpha | chronicle.dashboard.remove_chart | secops dashboard remove-chart | -| operations.cancel | v1alpha | | | -| operations.delete | v1alpha | | | -| operations.get | v1alpha | | | -| operations.list | v1alpha | | | -| operations.streamSearch | v1alpha | | | -| queryProductSourceStats | v1alpha | | | -| referenceLists.create | v1alpha | | | -| referenceLists.get | v1alpha | | | -| referenceLists.list | v1alpha | | | -| referenceLists.patch | v1alpha | | | -| report | v1alpha | | | -| ruleExecutionErrors.list | v1alpha | chronicle.rule_detection.list_errors | | -| rules.create | v1alpha | | | -| rules.delete | v1alpha | | | -| rules.deployments.list | v1alpha | | | -| rules.get | v1alpha | | | -| rules.getDeployment | v1alpha | | | -| rules.list | v1alpha | | | -| rules.listRevisions | v1alpha | | | -| rules.patch | v1alpha | | | -| rules.retrohunts.create | v1alpha | | | -| rules.retrohunts.get | v1alpha | | | -| rules.retrohunts.list | v1alpha | | | -| rules.updateDeployment | v1alpha | | | -| searchEntities | v1alpha | | | -| searchRawLogs | v1alpha | | | -| summarizeEntitiesFromQuery | v1alpha | chronicle.entity.summarize_entity | secops entity | -| summarizeEntity | v1alpha | chronicle.entity.summarize_entity | | -| testFindingsRefinement | v1alpha | | | -| translateUdmQuery | v1alpha | chronicle.nl_search.translate_nl_to_udm | | -| translateYlRule | v1alpha | | | -| udmSearch | v1alpha | chronicle.search.search_udm | secops search | -| undelete | v1alpha | | | -| updateBigQueryExport | v1alpha | | | -| updateRiskConfig | v1alpha | | | -| users.clearConversationHistory | v1alpha | | | -| users.conversations.create | v1alpha | chronicle.gemini.create_conversation | | -| users.conversations.delete | v1alpha | | | -| users.conversations.get | v1alpha | | | -| users.conversations.list | v1alpha | | | -| users.conversations.messages.create | v1alpha | chronicle.gemini.query_gemini | secops gemini | -| users.conversations.messages.delete | v1alpha | | | -| users.conversations.messages.get | v1alpha | | | -| users.conversations.messages.list | v1alpha | | | -| users.conversations.messages.patch | v1alpha | | | -| users.conversations.patch | v1alpha | | | -| users.getPreferenceSet | v1alpha | chronicle.gemini.opt_in_to_gemini | secops gemini --opt-in | -| users.searchQueries.create | v1alpha | | | -| users.searchQueries.delete | v1alpha | | | -| users.searchQueries.get | v1alpha | | | -| users.searchQueries.list | v1alpha | | | -| users.searchQueries.patch | v1alpha | | | -| users.updatePreferenceSet | v1alpha | | | -| validateQuery | v1alpha | chronicle.validate.validate_query | | -| verifyReferenceList | v1alpha | | | -| verifyRuleText | v1alpha | chronicle.rule_validation.validate_rule | secops rule validate | -| watchlists.create | v1alpha | | | -| watchlists.delete | v1alpha | | | -| watchlists.entities.add | v1alpha | | | -| watchlists.entities.batchAdd | v1alpha | | | -| watchlists.entities.batchRemove | v1alpha | | | -| watchlists.entities.remove | v1alpha | | | -| watchlists.get | v1alpha | | | -| watchlists.list | v1alpha | | | -| watchlists.listEntities | v1alpha | | | -| watchlists.patch | v1alpha | | | +| dashboardCharts.batchGet |v1alpha| | | +|dashboardCharts.get |v1alpha|chronicle.dashboard.get_chart |secops dashboard get-chart | +|dashboardQueries.execute |v1alpha|chronicle.dashboard_query.execute_query |secops dashboard-query execute | +|dashboardQueries.get |v1alpha|chronicle.dashboard_query.get_execute_query |secops dashboard-query get | +|dashboards.copy |v1alpha| | | +|dashboards.create |v1alpha| | | +|dashboards.delete |v1alpha| | | +|dashboards.get |v1alpha| | | +|dashboards.list |v1alpha| | | +|dataAccessLabels.create |v1alpha| | | +|dataAccessLabels.delete |v1alpha| | | +|dataAccessLabels.get |v1alpha| | | +|dataAccessLabels.list |v1alpha| | | +|dataAccessLabels.patch |v1alpha| | | +|dataAccessScopes.create |v1alpha| | | +|dataAccessScopes.delete |v1alpha| | | +|dataAccessScopes.get |v1alpha| | | +|dataAccessScopes.list |v1alpha| | | +|dataAccessScopes.patch |v1alpha| | | +|dataExports.cancel |v1alpha|chronicle.data_export.cancel_data_export |secops export cancel | +|dataExports.create |v1alpha|chronicle.data_export.create_data_export |secops export create | +|dataExports.fetchavailablelogtypes |v1alpha|chronicle.data_export.fetch_available_log_types |secops export log-types | +|dataExports.get |v1alpha|chronicle.data_export.get_data_export |secops export status | +|dataExports.list |v1alpha|chronicle.data_export.list_data_export |secops export list | +|dataExports.patch |v1alpha|chronicle.data_export.update_data_export |secops export update | +|dataTableOperationErrors.get |v1alpha| | | +|dataTables.create |v1alpha|chronicle.data_table.create_data_table |secops data-table create | +|dataTables.dataTableRows.bulkCreate |v1alpha|chronicle.data_table.create_data_table_rows |secops data-table add-rows | +|dataTables.dataTableRows.bulkCreateAsync |v1alpha| | | +|dataTables.dataTableRows.bulkGet |v1alpha| | | +|dataTables.dataTableRows.bulkReplace |v1alpha|chronicle.data_table.replace_data_table_rows |secops data-table replace-rows | +|dataTables.dataTableRows.bulkReplaceAsync |v1alpha| | | +|dataTables.dataTableRows.bulkUpdate |v1alpha|chronicle.data_table.update_data_table_rows |secops data-table update-rows | +|dataTables.dataTableRows.bulkUpdateAsync |v1alpha| | | +|dataTables.dataTableRows.create |v1alpha| | | +|dataTables.dataTableRows.delete |v1alpha|chronicle.data_table.delete_data_table_rows |secops data-table delete-rows | +|dataTables.dataTableRows.get |v1alpha| | | +|dataTables.dataTableRows.list |v1alpha|chronicle.data_table.list_data_table_rows |secops data-table list-rows | +|dataTables.dataTableRows.patch |v1alpha| | | +|dataTables.delete |v1alpha|chronicle.data_table.delete_data_table |secops data-table delete | +|dataTables.get |v1alpha|chronicle.data_table.get_data_table |secops data-table get | +|dataTables.list |v1alpha|chronicle.data_table.list_data_tables |secops data-table list | +|dataTables.patch |v1alpha| | | +|dataTables.upload |v1alpha| | | +|dataTaps.create |v1alpha| | | +|dataTaps.delete |v1alpha| | | +|dataTaps.get |v1alpha| | | +|dataTaps.list |v1alpha| | | +|dataTaps.patch |v1alpha| | | +|delete |v1alpha| | | +|enrichmentControls.create |v1alpha| | | +|enrichmentControls.delete |v1alpha| | | +|enrichmentControls.get |v1alpha| | | +|enrichmentControls.list |v1alpha| | | +|entities.get |v1alpha| | | +|entities.import |v1alpha|chronicle.log_ingest.import_entities |secops entity import | +|entities.modifyEntityRiskScore |v1alpha| | | +|entities.queryEntityRiskScoreModifications |v1alpha| | | +|entityRiskScores.query |v1alpha| | | +|errorNotificationConfigs.create |v1alpha| | | +|errorNotificationConfigs.delete |v1alpha| | | +|errorNotificationConfigs.get |v1alpha| | | +|errorNotificationConfigs.list |v1alpha| | | +|errorNotificationConfigs.patch |v1alpha| | | +|events.batchGet |v1alpha| | | +|events.get |v1alpha| | | +|events.import |v1alpha|chronicle.log_ingest.ingest_udm |secops log ingest-udm | +|extractSyslog |v1alpha| | | +|federationGroups.create |v1alpha| | | +|federationGroups.delete |v1alpha| | | +|federationGroups.get |v1alpha| | | +|federationGroups.list |v1alpha| | | +|federationGroups.patch |v1alpha| | | +|feedPacks.get |v1alpha| | | +|feedPacks.list |v1alpha| | | +|feedServiceAccounts.fetchServiceAccountForCustomer |v1alpha| | | +|feedSourceTypeSchemas.list |v1alpha| | | +|feedSourceTypeSchemas.logTypeSchemas.list |v1alpha| | | +|feeds.create |v1alpha|chronicle.feeds.create_feed |secops feed create | +|feeds.delete |v1alpha|chronicle.feeds.delete_feed |secops feed delete | +|feeds.disable |v1alpha|chronicle.feeds.disable_feed |secops feed disable | +|feeds.enable |v1alpha|chronicle.feeds.enable_feed |secops feed enable | +|feeds.generateSecret |v1alpha|chronicle.feeds.generate_secret |secops feed secret | +|feeds.get |v1alpha|chronicle.feeds.get_feed |secops feed get | +|feeds.importPushLogs |v1alpha| | | +|feeds.list |v1alpha|chronicle.feeds.list_feeds |secops feed list | +|feeds.patch |v1alpha|chronicle.feeds.update_feed |secops feed update | +|feeds.scheduleTransfer |v1alpha| | | +|fetchFederationAccess |v1alpha| | | +|findEntity |v1alpha| | | +|findEntityAlerts |v1alpha| | | +|findRelatedEntities |v1alpha| | | +|findUdmFieldValues |v1alpha| | | +|findingsGraph.exploreNode |v1alpha| | | +|findingsGraph.initializeGraph |v1alpha| | | +|findingsRefinements.computeFindingsRefinementActivity |v1alpha|chronicle.rule_exclusion.compute_rule_exclusion_activity |secops rule-exclusion compute-activity | +|findingsRefinements.create |v1alpha|chronicle.rule_exclusion.create_rule_exclusion |secops rule-exclusion create | +|findingsRefinements.get |v1alpha|chronicle.rule_exclusion.get_rule_exclusion |secops rule-exclusion get | +|findingsRefinements.getDeployment |v1alpha|chronicle.rule_exclusion.get_rule_exclusion_deployment |secops rule-exclusion get-deployment | +|findingsRefinements.list |v1alpha|chronicle.rule_exclusion.list_rule_exclusions |secops rule-exclusion list | +|findingsRefinements.patch |v1alpha|chronicle.rule_exclusion.patch_rule_exclusion |secops rule-exclusion update | +|findingsRefinements.updateDeployment |v1alpha|chronicle.rule_exclusion.update_rule_exclusion_deployment |secops rule-exclusion update-deployment| +|forwarders.collectors.create |v1alpha| | | +|forwarders.collectors.delete |v1alpha| | | +|forwarders.collectors.get |v1alpha| | | +|forwarders.collectors.list |v1alpha| | | +|forwarders.collectors.patch |v1alpha| | | +|forwarders.create |v1alpha|chronicle.log_ingest.create_forwarder |secops forwarder create | +|forwarders.delete |v1alpha|chronicle.log_ingest.delete_forwarder |secops forwarder delete | +|forwarders.generateForwarderFiles |v1alpha| | | +|forwarders.get |v1alpha|chronicle.log_ingest.get_forwarder |secops forwarder get | +|forwarders.importStatsEvents |v1alpha| | | +|forwarders.list |v1alpha|chronicle.log_ingest.list_forwarder |secops forwarder list | +|forwarders.patch |v1alpha|chronicle.log_ingest.update_forwarder |secops forwarder update | +|generateCollectionAgentAuth |v1alpha| | | +|generateSoarAuthJwt |v1alpha| | | +|generateUdmKeyValueMappings |v1alpha| | | +|generateWorkspaceConnectionToken |v1alpha| | | +|get |v1alpha| | | +|getBigQueryExport |v1alpha| | | +|getMultitenantDirectory |v1alpha| | | +|getRiskConfig |v1alpha| | | +|ingestionLogLabels.get |v1alpha| | | +|ingestionLogLabels.list |v1alpha| | | +|ingestionLogNamespaces.get |v1alpha| | | +|ingestionLogNamespaces.list |v1alpha| | | +|iocs.batchGet |v1alpha| | | +|iocs.findFirstAndLastSeen |v1alpha| | | +|iocs.get |v1alpha| | | +|iocs.getIocState |v1alpha| | | +|iocs.searchCuratedDetectionsForIoc |v1alpha| | | +|iocs.updateIocState |v1alpha| | | +|legacy.legacyBatchGetCases |v1alpha|chronicle.case.get_cases_from_list |secops case | +|legacy.legacyBatchGetCollections |v1alpha| | | +|legacy.legacyCreateOrUpdateCase |v1alpha| | | +|legacy.legacyCreateSoarAlert |v1alpha| | | +|legacy.legacyFetchAlertsView |v1alpha|chronicle.alert.get_alerts |secops alert | +|legacy.legacyFetchUdmSearchCsv |v1alpha|chronicle.udm_search.fetch_udm_search_csv |secops search --csv | +|legacy.legacyFetchUdmSearchView |v1alpha|chronicle.udm_search.fetch_udm_search_view |secops udm-search-view | +|legacy.legacyFindAssetEvents |v1alpha| | | +|legacy.legacyFindRawLogs |v1alpha| | | +|legacy.legacyFindUdmEvents |v1alpha| | | +|legacy.legacyGetAlert |v1alpha|chronicle.rule_alert.get_alert | | +|legacy.legacyGetCuratedRulesTrends |v1alpha| | | +|legacy.legacyGetDetection |v1alpha| | | +|legacy.legacyGetEventForDetection |v1alpha| | | +|legacy.legacyGetRuleCounts |v1alpha| | | +|legacy.legacyGetRulesTrends |v1alpha| | | +|legacy.legacyListCases |v1alpha|chronicle.case.get_cases |secops case --ids | +|legacy.legacyRunTestRule |v1alpha|chronicle.rule.run_rule_test |secops rule validate | +|legacy.legacySearchArtifactEvents |v1alpha| | | +|legacy.legacySearchArtifactIoCDetails |v1alpha| | | +|legacy.legacySearchAssetEvents |v1alpha| | | +|legacy.legacySearchCuratedDetections |v1alpha| | | +|legacy.legacySearchCustomerStats |v1alpha| | | +|legacy.legacySearchDetections |v1alpha|chronicle.rule_detection.list_detections | | +|legacy.legacySearchDomainsRecentlyRegistered |v1alpha| | | +|legacy.legacySearchDomainsTimingStats |v1alpha| | | +|legacy.legacySearchEnterpriseWideAlerts |v1alpha| | | +|legacy.legacySearchEnterpriseWideIoCs |v1alpha|chronicle.ioc.list_iocs |secops iocs | +|legacy.legacySearchFindings |v1alpha| | | +|legacy.legacySearchIngestionStats |v1alpha| | | +|legacy.legacySearchIoCInsights |v1alpha| | | +|legacy.legacySearchRawLogs |v1alpha| | | +|legacy.legacySearchRuleDetectionCountBuckets |v1alpha| | | +|legacy.legacySearchRuleDetectionEvents |v1alpha| | | +|legacy.legacySearchRuleResults |v1alpha| | | +|legacy.legacySearchRulesAlerts |v1alpha|chronicle.rule_alert.search_rule_alerts | | +|legacy.legacySearchUserEvents |v1alpha| | | +|legacy.legacyStreamDetectionAlerts |v1alpha| | | +|legacy.legacyTestRuleStreaming |v1alpha| | | +|legacy.legacyUpdateAlert |v1alpha|chronicle.rule_alert.update_alert | | +|listAllFindingsRefinementDeployments |v1alpha| | | +|logTypes.create |v1alpha| | | +|logTypes.generateEventTypesSuggestions |v1alpha| | | +|logTypes.get |v1alpha| | | +|logTypes.getLogTypeSetting |v1alpha| | | +|logTypes.legacySubmitParserExtension |v1alpha| | | +|logTypes.list |v1alpha| | | +|logTypes.logs.export |v1alpha| | | +|logTypes.logs.get |v1alpha| | | +|logTypes.logs.import |v1alpha|chronicle.log_ingest.ingest_log |secops log ingest | +|logTypes.logs.list |v1alpha| | | +|logTypes.parserExtensions.activate |v1alpha|chronicle.parser_extension.activate_parser_extension |secops parser-extension activate | +|logTypes.parserExtensions.create |v1alpha|chronicle.parser_extension.create_parser_extension |secops parser-extension create | +|logTypes.parserExtensions.delete |v1alpha|chronicle.parser_extension.delete_parser_extension |secops parser-extension delete | +|logTypes.parserExtensions.extensionValidationReports.get |v1alpha| | | +|logTypes.parserExtensions.extensionValidationReports.list |v1alpha| | | +|logTypes.parserExtensions.extensionValidationReports.validationErrors.list |v1alpha| | | +|logTypes.parserExtensions.get |v1alpha|chronicle.parser_extension.get_parser_extension |secops parser-extension get | +|logTypes.parserExtensions.list |v1alpha|chronicle.parser_extension.list_parser_extensions |secops parser-extension list | +|logTypes.parserExtensions.validationReports.get |v1alpha| | | +|logTypes.parserExtensions.validationReports.parsingErrors.list |v1alpha| | | +|logTypes.parsers.activate |v1alpha|chronicle.parser.activate_parser |secops parser activate | +|logTypes.parsers.activateReleaseCandidateParser |v1alpha|chronicle.parser.activate_release_candidate |secops parser activate-rc | +|logTypes.parsers.copy |v1alpha|chronicle.parser.copy_parser |secops parser copy | +|logTypes.parsers.create |v1alpha|chronicle.parser.create_parser |secops parser create | +|logTypes.parsers.deactivate |v1alpha|chronicle.parser.deactivate_parser |secops parser deactivate | +|logTypes.parsers.delete |v1alpha|chronicle.parser.delete_parser |secops parser delete | +|logTypes.parsers.get |v1alpha|chronicle.parser.get_parser |secops parser get | +|logTypes.parsers.list |v1alpha|chronicle.parser.list_parsers |secops parser list | +|logTypes.parsers.validationReports.get |v1alpha| | | +|logTypes.parsers.validationReports.parsingErrors.list |v1alpha| | | +|logTypes.patch |v1alpha| | | +|logTypes.runParser |v1alpha|chronicle.parser.run_parser |secops parser run | +|logTypes.updateLogTypeSetting |v1alpha| | | +|logProcessingPipelines.associateStreams |v1alpha|chronicle.log_processing_pipelines.associate_streams |secops log-processing associate-streams| +|logProcessingPipelines.create |v1alpha|chronicle.log_processing_pipelines.create_log_processing_pipeline|secops log-processing create | +|logProcessingPipelines.delete |v1alpha|chronicle.log_processing_pipelines.delete_log_processing_pipeline|secops log-processing delete | +|logProcessingPipelines.dissociateStreams |v1alpha|chronicle.log_processing_pipelines.dissociate_streams |secops log-processing dissociate-streams| +|logProcessingPipelines.fetchAssociatedPipeline |v1alpha|chronicle.log_processing_pipelines.fetch_associated_pipeline|secops log-processing fetch-associated | +|logProcessingPipelines.fetchSampleLogsByStreams |v1alpha|chronicle.log_processing_pipelines.fetch_sample_logs_by_streams|secops log-processing fetch-sample-logs| +|logProcessingPipelines.get |v1alpha|chronicle.log_processing_pipelines.get_log_processing_pipeline|secops log-processing get | +|logProcessingPipelines.list |v1alpha|chronicle.log_processing_pipelines.list_log_processing_pipelines|secops log-processing list | +|logProcessingPipelines.patch |v1alpha|chronicle.log_processing_pipelines.update_log_processing_pipeline|secops log-processing update | +|logProcessingPipelines.testPipeline |v1alpha|chronicle.log_processing_pipelines.test_pipeline |secops log-processing test | +|logs.classify |v1alpha| | | +| nativeDashboards.addChart | v1alpha |chronicle.dashboard.add_chart |secops dashboard add-chart | +| nativeDashboards.create | v1alpha |chronicle.dashboard.create_dashboard |secops dashboard create | +| nativeDashboards.delete | v1alpha |chronicle.dashboard.delete_dashboard |secops dashboard delete | +| nativeDashboards.duplicate | v1alpha |chronicle.dashboard.duplicate_dashboard |secops dashboard duplicate | +| nativeDashboards.duplicateChart | v1alpha | | | +| nativeDashboards.editChart | v1alpha |chronicle.dashboard.edit_chart |secops dashboard edit-chart | +| nativeDashboards.export | v1alpha |chronicle.dashboard.export_dashboard |secops dashboard export | +| nativeDashboards.get | v1alpha |chronicle.dashboard.get_dashboard |secops dashboard get | +| nativeDashboards.import | v1alpha |chronicle.dashboard.import_dashboard |secops dashboard import | +| nativeDashboards.list | v1alpha |chronicle.dashboard.list_dashboards |secops dashboard list | +| nativeDashboards.patch | v1alpha |chronicle.dashboard.update_dashboard |secops dashboard update | +| nativeDashboards.removeChart | v1alpha |chronicle.dashboard.remove_chart |secops dashboard remove-chart | +|operations.cancel |v1alpha| | | +|operations.delete |v1alpha| | | +|operations.get |v1alpha| | | +|operations.list |v1alpha| | | +|operations.streamSearch |v1alpha| | | +|queryProductSourceStats |v1alpha| | | +|referenceLists.create |v1alpha| | | +|referenceLists.get |v1alpha| | | +|referenceLists.list |v1alpha| | | +|referenceLists.patch |v1alpha| | | +|report |v1alpha| | | +|ruleExecutionErrors.list |v1alpha|chronicle.rule_detection.list_errors | | +|rules.create |v1alpha| | | +|rules.delete |v1alpha| | | +|rules.deployments.list |v1alpha| | | +|rules.get |v1alpha| | | +|rules.getDeployment |v1alpha| | | +|rules.list |v1alpha| | | +|rules.listRevisions |v1alpha| | | +|rules.patch |v1alpha| | | +|rules.retrohunts.create |v1alpha| | | +|rules.retrohunts.get |v1alpha| | | +|rules.retrohunts.list |v1alpha| | | +|rules.updateDeployment |v1alpha| | | +|searchEntities |v1alpha| | | +|searchRawLogs |v1alpha| | | +|summarizeEntitiesFromQuery |v1alpha|chronicle.entity.summarize_entity |secops entity | +|summarizeEntity |v1alpha|chronicle.entity.summarize_entity | | +|testFindingsRefinement |v1alpha| | | +|translateUdmQuery |v1alpha|chronicle.nl_search.translate_nl_to_udm | | +|translateYlRule |v1alpha| | | +|udmSearch |v1alpha|chronicle.search.search_udm |secops search | +|undelete |v1alpha| | | +|updateBigQueryExport |v1alpha| | | +|updateRiskConfig |v1alpha| | | +|users.clearConversationHistory |v1alpha| | | +|users.conversations.create |v1alpha|chronicle.gemini.create_conversation | | +|users.conversations.delete |v1alpha| | | +|users.conversations.get |v1alpha| | | +|users.conversations.list |v1alpha| | | +|users.conversations.messages.create |v1alpha|chronicle.gemini.query_gemini |secops gemini | +|users.conversations.messages.delete |v1alpha| | | +|users.conversations.messages.get |v1alpha| | | +|users.conversations.messages.list |v1alpha| | | +|users.conversations.messages.patch |v1alpha| | | +|users.conversations.patch |v1alpha| | | +|users.getPreferenceSet |v1alpha|chronicle.gemini.opt_in_to_gemini |secops gemini --opt-in | +|users.searchQueries.create |v1alpha| | | +|users.searchQueries.delete |v1alpha| | | +|users.searchQueries.get |v1alpha| | | +|users.searchQueries.list |v1alpha| | | +|users.searchQueries.patch |v1alpha| | | +|users.updatePreferenceSet |v1alpha| | | +|validateQuery |v1alpha|chronicle.validate.validate_query | | +|verifyReferenceList |v1alpha| | | +|verifyRuleText |v1alpha|chronicle.rule_validation.validate_rule |secops rule validate | +|watchlists.create |v1alpha| | | +|watchlists.delete |v1alpha| | | +|watchlists.entities.add |v1alpha| | | +|watchlists.entities.batchAdd |v1alpha| | | +|watchlists.entities.batchRemove |v1alpha| | | +|watchlists.entities.remove |v1alpha| | | +|watchlists.get |v1alpha| | | +|watchlists.list |v1alpha| | | +|watchlists.listEntities |v1alpha| | | +|watchlists.patch |v1alpha| | | From ca956c8a7eac5a1454e57c17f2cbfc90dd870d0e Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Wed, 17 Dec 2025 15:07:22 +0530 Subject: [PATCH 42/48] chore: minor refactor --- examples/log_processing_pipelines_example.py | 14 ++++++++++ src/secops/chronicle/__init__.py | 28 ++++++++++---------- 2 files changed, 28 insertions(+), 14 deletions(-) diff --git a/examples/log_processing_pipelines_example.py b/examples/log_processing_pipelines_example.py index 4eff92ce..cbeff285 100644 --- a/examples/log_processing_pipelines_example.py +++ b/examples/log_processing_pipelines_example.py @@ -1,4 +1,18 @@ #!/usr/bin/env python3 +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """Example usage of the Google SecOps SDK for Log Processing Pipelines.""" import argparse diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index e8e82d6c..a984c00e 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -72,12 +72,6 @@ list_forwarders, update_forwarder, ) -from secops.chronicle.log_types import ( - get_all_log_types, - get_log_type_description, - is_valid_log_type, - search_log_types, -) from secops.chronicle.log_processing_pipelines import ( associate_streams, create_log_processing_pipeline, @@ -87,8 +81,14 @@ fetch_sample_logs_by_streams, get_log_processing_pipeline, list_log_processing_pipelines, - update_log_processing_pipeline, test_pipeline, + update_log_processing_pipeline, +) +from secops.chronicle.log_types import ( + get_all_log_types, + get_log_type_description, + is_valid_log_type, + search_log_types, ) from secops.chronicle.models import ( AlertCount, @@ -149,18 +149,18 @@ from secops.chronicle.rule_retrohunt import create_retrohunt, get_retrohunt from secops.chronicle.rule_set import ( batch_update_curated_rule_set_deployments, - list_curated_rule_sets, - list_curated_rule_set_categories, - list_curated_rules, get_curated_rule, - get_curated_rule_set_category, + get_curated_rule_by_name, get_curated_rule_set, - list_curated_rule_set_deployments, + get_curated_rule_set_category, get_curated_rule_set_deployment, get_curated_rule_set_deployment_by_name, - get_curated_rule_by_name, - update_curated_rule_set_deployment, + list_curated_rule_set_categories, + list_curated_rule_set_deployments, + list_curated_rule_sets, + list_curated_rules, search_curated_detections, + update_curated_rule_set_deployment, ) from secops.chronicle.rule_validation import ValidationResult from secops.chronicle.search import search_udm From aa056782e9fcf9601cf263ef1de8d719e7235beb Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 18 Dec 2025 12:12:07 +0530 Subject: [PATCH 43/48] chore: minor refactoring and formatting. --- src/secops/chronicle/utils/request_utils.py | 12 +++++++----- src/secops/chronicle/watchlist.py | 20 +++++++++++++------- src/secops/cli/commands/watchlist.py | 7 ++++++- tests/chronicle/test_watchlist.py | 4 ++-- 4 files changed, 28 insertions(+), 15 deletions(-) diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py index 45dca789..85eb9658 100644 --- a/src/secops/chronicle/utils/request_utils.py +++ b/src/secops/chronicle/utils/request_utils.py @@ -20,8 +20,11 @@ from secops.chronicle.models import APIVersion +DEFAULT_PAGE_SIZE = 1000 + + def chronicle_paginated_request( - client, + client: "ChronicleClient", base_url: str, path: str, items_key: str, @@ -30,8 +33,7 @@ def chronicle_paginated_request( page_token: Optional[str] = None, extra_params: Optional[Dict[str, Any]] = None, ) -> Union[Dict[str, List[Any]], List[Any]]: - """ - Helper to get items from endpoints that use pagination. + """Helper to get items from endpoints that use pagination. Args: client: ChronicleClient instance @@ -59,7 +61,7 @@ def chronicle_paginated_request( while True: # Build params each loop to prevent stale keys being # included in the next request - params = {"pageSize": 1000 if not page_size else page_size} + params = {"pageSize": DEFAULT_PAGE_SIZE if not page_size else page_size} if next_token: params["pageToken"] = next_token if extra_params: @@ -89,7 +91,7 @@ def chronicle_paginated_request( def chronicle_request( - client, + client: "ChronicleClient", method: str, endpoint_path: str, *, diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index 50081f04..41aa8cac 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -24,11 +24,11 @@ def list_watchlists( - client, - page_size: Optional[str] = None, + client: "ChronicleClient", + page_size: Optional[int] = None, page_token: Optional[str] = None, ) -> Dict[str, Any]: - """Get a list of all watchlists + """Get a list of watchlists. Args: client: ChronicleClient instance @@ -51,7 +51,9 @@ def list_watchlists( ) -def get_watchlist(client, watchlist_id: str) -> Dict[str, Any]: +def get_watchlist( + client: "ChronicleClient", watchlist_id: str +) -> Dict[str, Any]: """Get a watchlist by ID Args: @@ -73,7 +75,7 @@ def get_watchlist(client, watchlist_id: str) -> Dict[str, Any]: def delete_watchlist( - client, watchlist_id: str, force: Optional[bool] = None + client: "ChronicleClient", watchlist_id: str, force: Optional[bool] = None ) -> Dict[str, Any]: """Delete a watchlist by ID @@ -91,7 +93,11 @@ def delete_watchlist( Raises: APIError: If the API request fails """ - params = {"force": force} + params = {} + + if force is not None: + params["force"] = force + return chronicle_request( client, method="DELETE", @@ -102,7 +108,7 @@ def delete_watchlist( def create_watchlist( - client, + client: "ChronicleClient", name: str, display_name: str, multiplying_factor: float, diff --git a/src/secops/cli/commands/watchlist.py b/src/secops/cli/commands/watchlist.py index 689b8b10..e16a42ba 100644 --- a/src/secops/cli/commands/watchlist.py +++ b/src/secops/cli/commands/watchlist.py @@ -59,6 +59,11 @@ def setup_watchlist_command(subparsers): dest="watchlist_id", required=True, ) + delete_parser.add_argument( + "--force", + action="store_true", + help="Flag to remove entities under watchlist", + ) delete_parser.set_defaults(func=handle_watchlist_delete_command) # create command @@ -116,7 +121,7 @@ def handle_watchlist_get_command(args, chronicle): def handle_watchlist_delete_command(args, chronicle): """Delete watchlist by ID""" try: - out = chronicle.delete_watchlist(args.watchlist_id) + out = chronicle.delete_watchlist(args.watchlist_id, args.force) output_formatter(out, getattr(args, "output", "json")) except Exception as e: # pylint: disable=broad-exception-caught print(f"Error deleting watchlist: {e}", file=sys.stderr) diff --git a/tests/chronicle/test_watchlist.py b/tests/chronicle/test_watchlist.py index 893d0e8b..d3ec4461 100644 --- a/tests/chronicle/test_watchlist.py +++ b/tests/chronicle/test_watchlist.py @@ -81,7 +81,7 @@ def test_list_watchlists_success(chronicle_client): ) as mock_paginated: result = list_watchlists( chronicle_client, - page_size="10", + page_size=10, page_token="next-token", ) @@ -92,7 +92,7 @@ def test_list_watchlists_success(chronicle_client): base_url=chronicle_client.base_url(APIVersion.V1), path="watchlists", items_key="watchlists", - page_size="10", + page_size=10, page_token="next-token", ) From ebe94b5c3f57a3b7f48128799907ea83a45e6ec3 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 18 Dec 2025 12:15:03 +0530 Subject: [PATCH 44/48] chore: fixed unit test --- tests/chronicle/test_watchlist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/chronicle/test_watchlist.py b/tests/chronicle/test_watchlist.py index d3ec4461..d391583b 100644 --- a/tests/chronicle/test_watchlist.py +++ b/tests/chronicle/test_watchlist.py @@ -191,7 +191,7 @@ def test_delete_watchlist_success(chronicle_client): method="DELETE", endpoint_path="watchlists/watchlist-123", api_version=APIVersion.V1, - params={"force": None}, + params={}, ) From 12fcebafe9aabd7eb332f5ae3b670314a5e35886 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Thu, 18 Dec 2025 14:54:28 +0530 Subject: [PATCH 45/48] chore: added support for watchlist update --- CLI.md | 13 ++ README.md | 15 ++ api_module_mapping.md | 2 +- src/secops/chronicle/__init__.py | 2 + src/secops/chronicle/client.py | 44 +++++ src/secops/chronicle/watchlist.py | 72 ++++++++ src/secops/cli/commands/watchlist.py | 86 +++++++++ tests/chronicle/test_watchlist.py | 157 ++++++++++++++++ tests/chronicle/test_watchlist_integration.py | 172 ++++++++++++++++++ tests/cli/test_watchlist_cli_integration.py | 94 ++++++++-- 10 files changed, 636 insertions(+), 21 deletions(-) create mode 100644 tests/chronicle/test_watchlist_integration.py diff --git a/CLI.md b/CLI.md index e1e66227..65945ac3 100644 --- a/CLI.md +++ b/CLI.md @@ -690,6 +690,19 @@ Create a new watchlist: secops watchlist create --name "my_watchlist" --display-name "my_watchlist" --description "My watchlist description" --multiplying-factor 1.5 ``` +Update a watchlist: + +```bash +# Update display name and description +secops watchlist update --watchlist-id "abc-123-def" --display-name "Updated Name" --description "Updated description" + +# Update multiplying factor and pin the watchlist +secops watchlist update --watchlist-id "abc-123-def" --multiplying-factor 2.0 --pinned true + +# Update entity population mechanism (JSON string or file path) +secops watchlist update --watchlist-id "abc-123-def" --entity-population-mechanism '{"manual": {}}' +``` + Delete a watchlist: ```bash diff --git a/README.md b/README.md index 160d3a5a..61d7b744 100644 --- a/README.md +++ b/README.md @@ -1719,6 +1719,21 @@ watchlist = chronicle.create_watchlist( ) ``` +### Updating a Watchlist + +Update a watchlist by ID: + +```python +updated_watchlist = chronicle.update_watchlist( + watchlist_id="abc-123-def", + display_name="Updated Watchlist Name", + description="Updated description", + multiplying_factor=2.0, + entity_population_mechanism={"manual": {}}, + watchlist_user_preferences={"pinned": True} +) +``` + ### Deleting a Watchlist Delete a watchlist by ID: diff --git a/api_module_mapping.md b/api_module_mapping.md index 1986387d..726b9307 100644 --- a/api_module_mapping.md +++ b/api_module_mapping.md @@ -41,7 +41,7 @@ Following shows mapping between SecOps [REST Resource](https://cloud.google.com/ | watchlists.delete | v1 | chronicle.watchlist.delete_watchlist | secops watchlist delete | | watchlists.get | v1 | chronicle.watchlist.get_watchlist | secops watchlist get | | watchlists.list | v1 | chronicle.watchlist.list_watchlists | secops watchlist list | -| watchlists.patch | v1 | | | +| watchlists.patch | v1 | chronicle.watchlist.update_watchlist | secops watchlist update | | dataAccessLabels.create | v1beta | | | | dataAccessLabels.delete | v1beta | | | | dataAccessLabels.get | v1beta | | | diff --git a/src/secops/chronicle/__init__.py b/src/secops/chronicle/__init__.py index a984c00e..b874c106 100644 --- a/src/secops/chronicle/__init__.py +++ b/src/secops/chronicle/__init__.py @@ -180,6 +180,7 @@ get_watchlist, delete_watchlist, create_watchlist, + update_watchlist, ) __all__ = [ @@ -338,4 +339,5 @@ "get_watchlist", "delete_watchlist", "create_watchlist", + "update_watchlist", ] diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 22f07549..64364eb7 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -314,6 +314,7 @@ get_watchlist as _get_watchlist, delete_watchlist as _delete_watchlist, create_watchlist as _create_watchlist, + update_watchlist as _update_watchlist, ) from secops.exceptions import SecOpsError @@ -694,6 +695,49 @@ def create_watchlist( self, name, display_name, multiplying_factor, description ) + def update_watchlist( + self, + watchlist_id: str, + display_name: Optional[str] = None, + description: Optional[str] = None, + multiplying_factor: Optional[float] = None, + entity_population_mechanism: Optional[Dict[str, Any]] = None, + watchlist_user_preferences: Optional[Dict[str, Any]] = None, + update_mask: Optional[str] = None, + ) -> Dict[str, Any]: + """Update a watchlist. + + Args: + watchlist_id: ID of the watchlist to update. + display_name: Optional. Display name of the watchlist. + Must be 1-63 characters. + description: Optional. Description of the watchlist. + multiplying_factor: Optional. Weight applied to risk score + for entities in this watchlist. Default is 1.0. + entity_population_mechanism: Optional. Mechanism to populate + entities in the watchlist. Example: {"manual": {}}. + watchlist_user_preferences: Optional. User preferences for + watchlist configuration. Example: {"pinned": True}. + update_mask: Optional. Comma-separated list of fields to + update. If not provided, all non-None fields are updated. + + Returns: + Updated watchlist. + + Raises: + APIError: If the API request fails. + """ + return _update_watchlist( + self, + watchlist_id, + display_name, + description, + multiplying_factor, + entity_population_mechanism, + watchlist_user_preferences, + update_mask, + ) + def get_stats( self, query: str, diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index 41aa8cac..5ec0f049 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -142,3 +142,75 @@ def create_watchlist( "entityPopulationMechanism": {"manual": {}}, }, ) + + +def update_watchlist( + client: "ChronicleClient", + watchlist_id: str, + display_name: Optional[str] = None, + description: Optional[str] = None, + multiplying_factor: Optional[float] = None, + entity_population_mechanism: Optional[Dict[str, Any]] = None, + watchlist_user_preferences: Optional[Dict[str, Any]] = None, + update_mask: Optional[str] = None, +) -> Dict[str, Any]: + """Update a watchlist. + + Args: + client: ChronicleClient instance. + watchlist_id: ID of the watchlist to update. + display_name: Optional. Display name of the watchlist. + Must be 1-63 characters. + description: Optional. Description of the watchlist. + multiplying_factor: Optional. Weight applied to risk score + for entities in this watchlist. Default is 1.0. + entity_population_mechanism: Optional. Mechanism to populate + entities in the watchlist. Example: {"manual": {}}. + watchlist_user_preferences: Optional. User preferences for + watchlist configuration. Example: {"pinned": True}. + update_mask: Optional. Comma-separated list of fields to update. + If not provided, all non-None fields will be updated. + + Returns: + Updated watchlist. + + Raises: + APIError: If the API request fails. + """ + body = {} + mask_fields = [] + + if display_name is not None: + body["displayName"] = display_name + mask_fields.append("display_name") + + if description is not None: + body["description"] = description + mask_fields.append("description") + + if multiplying_factor is not None: + body["multiplyingFactor"] = multiplying_factor + mask_fields.append("multiplying_factor") + + if entity_population_mechanism is not None: + body["entityPopulationMechanism"] = entity_population_mechanism + mask_fields.append("entity_population_mechanism") + + if watchlist_user_preferences is not None: + body["watchlistUserPreferences"] = watchlist_user_preferences + mask_fields.append("watchlist_user_preferences") + + params = {} + if update_mask is not None: + params["updateMask"] = update_mask + elif mask_fields: + params["updateMask"] = ",".join(mask_fields) + + return chronicle_request( + client, + method="PATCH", + endpoint_path=f"watchlists/{watchlist_id}", + api_version=APIVersion.V1, + params=params if params else None, + json=body, + ) diff --git a/src/secops/cli/commands/watchlist.py b/src/secops/cli/commands/watchlist.py index e16a42ba..303baf8d 100644 --- a/src/secops/cli/commands/watchlist.py +++ b/src/secops/cli/commands/watchlist.py @@ -21,6 +21,7 @@ add_time_range_args, add_pagination_args, ) +from secops.cli.utils.input_utils import load_json_or_file def setup_watchlist_command(subparsers): @@ -94,6 +95,60 @@ def setup_watchlist_command(subparsers): ) create_parser.set_defaults(func=handle_watchlist_create_command) + # update command + update_parser = lvl1.add_parser("update", help="Update watchlist by ID") + update_parser.add_argument( + "--watchlist-id", + type=str, + help="ID of the watchlist to update", + dest="watchlist_id", + required=True, + ) + update_parser.add_argument( + "--display-name", + type=str, + help="New display name for the watchlist", + dest="display_name", + required=False, + ) + update_parser.add_argument( + "--description", + type=str, + help="New description for the watchlist", + dest="description", + required=False, + ) + update_parser.add_argument( + "--multiplying-factor", + type=float, + help="New multiplying factor for the watchlist", + dest="multiplying_factor", + required=False, + ) + update_parser.add_argument( + "--pinned", + type=str, + choices=["true", "false"], + help="Pin or unpin the watchlist on dashboard", + dest="pinned", + required=False, + ) + update_parser.add_argument( + "--entity-population-mechanism", + type=str, + help="Entity population mechanism as JSON string or file path", + dest="entity_population_mechanism", + required=False, + ) + update_parser.add_argument( + "--update-mask", + type=str, + help="Comma-separated list of fields to update", + dest="update_mask", + required=False, + ) + update_parser.set_defaults(func=handle_watchlist_update_command) + def handle_watchlist_list_command(args, chronicle): """List watchlists""" @@ -141,3 +196,34 @@ def handle_watchlist_create_command(args, chronicle): except Exception as e: # pylint: disable=broad-exception-caught print(f"Error creating watchlist: {e}", file=sys.stderr) sys.exit(1) + + +def handle_watchlist_update_command(args, chronicle): + """Update watchlist by ID.""" + try: + # Build watchlist_user_preferences if pinned is provided + watchlist_user_preferences = None + if args.pinned is not None: + watchlist_user_preferences = { + "pinned": args.pinned.lower() == "true" + } + + # Parse entity_population_mechanism if provided + entity_population_mechanism = None + epm_value = getattr(args, "entity_population_mechanism", None) + if epm_value is not None: + entity_population_mechanism = load_json_or_file(epm_value) + + out = chronicle.update_watchlist( + watchlist_id=args.watchlist_id, + display_name=getattr(args, "display_name", None), + description=getattr(args, "description", None), + multiplying_factor=getattr(args, "multiplying_factor", None), + entity_population_mechanism=entity_population_mechanism, + watchlist_user_preferences=watchlist_user_preferences, + update_mask=getattr(args, "update_mask", None), + ) + output_formatter(out, getattr(args, "output", "json")) + except Exception as e: # pylint: disable=broad-exception-caught + print(f"Error updating watchlist: {e}", file=sys.stderr) + sys.exit(1) diff --git a/tests/chronicle/test_watchlist.py b/tests/chronicle/test_watchlist.py index d391583b..10e0d646 100644 --- a/tests/chronicle/test_watchlist.py +++ b/tests/chronicle/test_watchlist.py @@ -26,6 +26,7 @@ get_watchlist, delete_watchlist, create_watchlist, + update_watchlist, ) from secops.exceptions import APIError @@ -296,3 +297,159 @@ def test_create_watchlist_without_description(chronicle_client): "entityPopulationMechanism": {"manual": {}}, }, ) + + +# -- update_watchlist tests -- + + +def test_update_watchlist_success_all_fields(chronicle_client): + """Test update_watchlist with all fields provided.""" + expected = { + "name": "watchlist-123", + "displayName": "Updated Watchlist", + "description": "Updated description", + "multiplyingFactor": 2.5, + "entityPopulationMechanism": {"manual": {}}, + "watchlistUserPreferences": {"pinned": True}, + } + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = update_watchlist( + chronicle_client, + watchlist_id="watchlist-123", + display_name="Updated Watchlist", + description="Updated description", + multiplying_factor=2.5, + entity_population_mechanism={"manual": {}}, + watchlist_user_preferences={"pinned": True}, + ) + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="PATCH", + endpoint_path="watchlists/watchlist-123", + api_version=APIVersion.V1, + params={ + "updateMask": ( + "display_name,description,multiplying_factor," + "entity_population_mechanism,watchlist_user_preferences" + ) + }, + json={ + "displayName": "Updated Watchlist", + "description": "Updated description", + "multiplyingFactor": 2.5, + "entityPopulationMechanism": {"manual": {}}, + "watchlistUserPreferences": {"pinned": True}, + }, + ) + + +def test_update_watchlist_single_field(chronicle_client): + """Test update_watchlist with only display_name.""" + expected = { + "name": "watchlist-123", + "displayName": "New Name", + } + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = update_watchlist( + chronicle_client, + watchlist_id="watchlist-123", + display_name="New Name", + ) + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="PATCH", + endpoint_path="watchlists/watchlist-123", + api_version=APIVersion.V1, + params={"updateMask": "display_name"}, + json={"displayName": "New Name"}, + ) + + +def test_update_watchlist_explicit_update_mask(chronicle_client): + """Test update_watchlist with explicit update_mask overrides auto-mask.""" + expected = { + "name": "watchlist-123", + "displayName": "Updated Name", + "description": "Updated desc", + } + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = update_watchlist( + chronicle_client, + watchlist_id="watchlist-123", + display_name="Updated Name", + description="Updated desc", + update_mask="display_name", + ) + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="PATCH", + endpoint_path="watchlists/watchlist-123", + api_version=APIVersion.V1, + params={"updateMask": "display_name"}, + json={ + "displayName": "Updated Name", + "description": "Updated desc", + }, + ) + + +def test_update_watchlist_no_fields(chronicle_client): + """Test update_watchlist with no optional fields (edge case).""" + expected = {"name": "watchlist-123"} + + with patch( + "secops.chronicle.watchlist.chronicle_request", + return_value=expected, + ) as mock_request: + result = update_watchlist( + chronicle_client, + watchlist_id="watchlist-123", + ) + + assert result == expected + + mock_request.assert_called_once_with( + chronicle_client, + method="PATCH", + endpoint_path="watchlists/watchlist-123", + api_version=APIVersion.V1, + params=None, + json={}, + ) + + +def test_update_watchlist_error(chronicle_client): + """Test update_watchlist raises APIError on failure.""" + with patch( + "secops.chronicle.watchlist.chronicle_request", + side_effect=APIError("Failed to update watchlist watchlist-123"), + ): + with pytest.raises(APIError) as exc_info: + update_watchlist( + chronicle_client, + watchlist_id="watchlist-123", + display_name="New Name", + ) + + assert "Failed to update watchlist" in str(exc_info.value) diff --git a/tests/chronicle/test_watchlist_integration.py b/tests/chronicle/test_watchlist_integration.py new file mode 100644 index 00000000..01fbbafe --- /dev/null +++ b/tests/chronicle/test_watchlist_integration.py @@ -0,0 +1,172 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Integration tests for Chronicle watchlist.""" +import pytest +from datetime import datetime, timezone +from secops import SecOpsClient +from ..config import CHRONICLE_CONFIG, SERVICE_ACCOUNT_JSON + + +@pytest.fixture(scope="module") +def chronicle(): + """Fixture to create a Chronicle client.""" + client = SecOpsClient(service_account_info=SERVICE_ACCOUNT_JSON) + return client.chronicle(**CHRONICLE_CONFIG) + + +@pytest.mark.integration +def test_watchlist_crud_workflow(chronicle): + """Test complete watchlist CRUD workflow including update.""" + + ts = datetime.now(timezone.utc).strftime("%Y%m%d%H%M%S") + watchlist_name = f"secops-test-watchlist-{ts}" + display_name = f"SecOps Test Watchlist {ts}" + description = ( + "Integration test watchlist - created by test_watchlist_integration.py" + ) + multiplying_factor = 1.0 + + created_watchlist = None + watchlist_id = None + + try: + # 1. Create watchlist + print("\n1. Creating watchlist...") + created_watchlist = chronicle.create_watchlist( + name=watchlist_name, + display_name=display_name, + multiplying_factor=multiplying_factor, + description=description, + ) + + assert isinstance(created_watchlist, dict) + assert "name" in created_watchlist + assert created_watchlist.get("displayName") == display_name + assert created_watchlist.get("description") == description + + watchlist_id = created_watchlist["name"].split("/")[-1] + print(f" Created watchlist: {display_name} (ID: {watchlist_id})") + + # 2. Get watchlist + print("\n2. Getting watchlist...") + fetched_watchlist = chronicle.get_watchlist(watchlist_id) + + assert isinstance(fetched_watchlist, dict) + assert fetched_watchlist.get("name") == created_watchlist["name"] + assert fetched_watchlist.get("displayName") == display_name + print(f" Fetched watchlist: {fetched_watchlist.get('displayName')}") + + # 3. Update watchlist - change display_name, description, multiplying_factor + print("\n3. Updating watchlist fields...") + updated_display_name = f"Updated Watchlist {ts}" + updated_description = "Updated description - integration test" + updated_multiplying_factor = 2.5 + + updated_watchlist = chronicle.update_watchlist( + watchlist_id=watchlist_id, + display_name=updated_display_name, + description=updated_description, + multiplying_factor=updated_multiplying_factor, + ) + + assert isinstance(updated_watchlist, dict) + assert updated_watchlist.get("displayName") == updated_display_name + assert updated_watchlist.get("description") == updated_description + assert ( + updated_watchlist.get("multiplyingFactor") + == updated_multiplying_factor + ) + print( + f" Updated display_name: {updated_watchlist.get('displayName')}" + ) + print(f" Updated description: {updated_watchlist.get('description')}") + print( + f" Updated multiplying_factor: " + f"{updated_watchlist.get('multiplyingFactor')}" + ) + + # 4. Update watchlist user preferences (pinned) + print("\n4. Updating watchlist user preferences (pinned=True)...") + pinned_watchlist = chronicle.update_watchlist( + watchlist_id=watchlist_id, + watchlist_user_preferences={"pinned": True}, + ) + + assert isinstance(pinned_watchlist, dict) + user_prefs = pinned_watchlist.get("watchlistUserPreferences", {}) + assert user_prefs.get("pinned") is True + print(f" Pinned: {user_prefs.get('pinned')}") + + # 5. List watchlists and verify our watchlist is present + print("\n5. Listing watchlists...") + watchlists_response = chronicle.list_watchlists(page_size=100) + + assert isinstance(watchlists_response, dict) + watchlists = watchlists_response.get("watchlists", []) + watchlist_names = [w.get("name") for w in watchlists] + assert created_watchlist["name"] in watchlist_names + print( + f" Found {len(watchlists)} watchlists, " + f"verified test watchlist is present" + ) + + # 6. Delete watchlist (cleanup) + print("\n6. Deleting watchlist...") + delete_result = chronicle.delete_watchlist(watchlist_id) + + assert isinstance(delete_result, dict) + print(f" Successfully deleted watchlist {watchlist_id}") + + # Verify deletion + print("\n7. Verifying deletion...") + try: + chronicle.get_watchlist(watchlist_id) + pytest.fail("Watchlist should have been deleted") + except Exception: + print(" Watchlist successfully deleted (get returned error)") + + except Exception as e: + # Cleanup on failure + if watchlist_id: + try: + print( + f"\nCleanup: Attempting to delete watchlist {watchlist_id}" + ) + chronicle.delete_watchlist(watchlist_id, force=True) + print("Cleanup: Successfully deleted watchlist") + except Exception as cleanup_error: + print(f"Cleanup failed: {cleanup_error}") + raise e + + +@pytest.mark.integration +def test_watchlist_list(chronicle): + """Test listing watchlists with pagination.""" + print("\nTesting watchlist list with pagination...") + + # List with small page size + result = chronicle.list_watchlists(page_size=1) + + assert isinstance(result, dict) + watchlists = result.get("watchlists", []) + assert isinstance(watchlists, list) + print(f"Listed {len(watchlists)} watchlist(s) with page_size=1") + + # If there's more data, verify pagination token exists + if len(watchlists) == 1: + # List all to check total count + all_result = chronicle.list_watchlists() + all_watchlists = all_result.get("watchlists", []) + print(f"Total watchlists available: {len(all_watchlists)}") diff --git a/tests/cli/test_watchlist_cli_integration.py b/tests/cli/test_watchlist_cli_integration.py index 746350bd..d63af973 100644 --- a/tests/cli/test_watchlist_cli_integration.py +++ b/tests/cli/test_watchlist_cli_integration.py @@ -36,11 +36,7 @@ def test_cli_watchlist_list_and_get(cli_env, common_args): # 1. List watchlists print("1. Listing watchlists") - list_cmd = ( - ["secops"] - + common_args - + ["watchlist", "list"] - ) + list_cmd = ["secops"] + common_args + ["watchlist", "list"] list_result = subprocess.run( list_cmd, @@ -63,7 +59,9 @@ def test_cli_watchlist_list_and_get(cli_env, common_args): first_watchlist = watchlists[0] assert "name" in first_watchlist, "Missing 'name' in watchlist" - assert "displayName" in first_watchlist, "Missing 'displayName' in watchlist" + assert ( + "displayName" in first_watchlist + ), "Missing 'displayName' in watchlist" # Extract watchlist ID (name is a resource path, ID is last component) watchlist_name = first_watchlist["name"] @@ -95,22 +93,26 @@ def test_cli_watchlist_list_and_get(cli_env, common_args): assert get_result.returncode == 0, f"Command failed: {get_result.stderr}" watchlist_data = json.loads(get_result.stdout) - assert isinstance(watchlist_data, dict), "Expected dict response from watchlist get" - assert watchlist_data.get("name") == watchlist_name, "Watchlist name doesn't match" + assert isinstance( + watchlist_data, dict + ), "Expected dict response from watchlist get" + assert ( + watchlist_data.get("name") == watchlist_name + ), "Watchlist name doesn't match" assert ( watchlist_data.get("displayName") == display_name ), "Watchlist display name doesn't match" @pytest.mark.integration -def test_cli_watchlist_create_and_delete(cli_env, common_args): - """Test CLI commands for creating and deleting a watchlist. +def test_cli_watchlist_create_update_delete(cli_env, common_args): + """Test CLI commands for creating, updating, and deleting a watchlist. Args: cli_env: Environment variables for CLI execution. common_args: Common CLI arguments. """ - print("\nTesting watchlist create and delete commands") + print("\nTesting watchlist create, update, and delete commands") # Use a timestamped name to avoid collisions ts = datetime.now(timezone.utc).strftime("%Y%m%d%H%M%S") @@ -145,10 +147,12 @@ def test_cli_watchlist_create_and_delete(cli_env, common_args): text=True, ) - assert create_result.returncode == 0, f"Create failed: {create_result.stderr}" + assert ( + create_result.returncode == 0 + ), f"Create failed: {create_result.stderr}" created_data = json.loads(create_result.stdout) - assert isinstance(created_data, dict), "Expected dict response from watchlist create" + assert isinstance(created_data, dict), "Expected dict response" assert created_data.get("name"), "Missing 'name' in created watchlist" assert ( created_data.get("displayName") == display_name @@ -158,8 +162,56 @@ def test_cli_watchlist_create_and_delete(cli_env, common_args): created_id = created_name.split("/")[-1] print(f"Created watchlist: {display_name} (ID: {created_id})") - # 2. Get created watchlist to verify - print("\n2. Verifying created watchlist via get command") + # 2. Update watchlist + print("\n2. Updating watchlist") + updated_display_name = f"Updated Watchlist {ts}" + updated_multiplying_factor = 2.5 + updated_description = "Updated integration test watchlist" + + update_cmd = ( + ["secops"] + + common_args + + [ + "watchlist", + "update", + "--watchlist-id", + created_id, + "--display-name", + updated_display_name, + "--multiplying-factor", + str(updated_multiplying_factor), + "--description", + updated_description, + "--pinned", + "true", + ] + ) + + update_result = subprocess.run( + update_cmd, + env=cli_env, + capture_output=True, + text=True, + ) + + assert ( + update_result.returncode == 0 + ), f"Update failed: {update_result.stderr}" + + update_data = json.loads(update_result.stdout) + assert isinstance(update_data, dict), "Expected dict response" + assert ( + update_data.get("displayName") == updated_display_name + ), "Updated display name mismatch" + assert ( + update_data.get("multiplyingFactor") == updated_multiplying_factor + ), "Updated multiplying factor mismatch" + user_prefs = update_data.get("watchlistUserPreferences", {}) + assert user_prefs.get("pinned") is True, "Watchlist should be pinned" + print(f"Updated watchlist: {updated_display_name}") + + # 3. Verify updates via get command + print("\n3. Verifying updates via get command") get_cmd = ( ["secops"] + common_args @@ -183,11 +235,12 @@ def test_cli_watchlist_create_and_delete(cli_env, common_args): get_data = json.loads(get_result.stdout) assert get_data.get("name") == created_name, "Get watchlist name mismatch" assert ( - get_data.get("displayName") == display_name + get_data.get("displayName") == updated_display_name ), "Get watchlist display name mismatch" + print("Verified updates successfully") - # 3. Delete created watchlist - print("\n3. Deleting created watchlist") + # 4. Delete created watchlist (cleanup) + print("\n4. Deleting created watchlist") delete_cmd = ( ["secops"] + common_args @@ -206,9 +259,10 @@ def test_cli_watchlist_create_and_delete(cli_env, common_args): text=True, ) - assert delete_result.returncode == 0, f"Delete failed: {delete_result.stderr}" + assert ( + delete_result.returncode == 0 + ), f"Delete failed: {delete_result.stderr}" - # Response from delete may be empty or contain metadata; just ensure it's valid JSON if delete_result.stdout.strip(): delete_data = json.loads(delete_result.stdout) assert isinstance( From 620d7ad8cc7148164a4c0dd53e3eb933550d7d75 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Mon, 22 Dec 2025 12:45:55 +0530 Subject: [PATCH 46/48] chore: fixed pagination token return for paginated request --- src/secops/chronicle/utils/request_utils.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py index 85eb9658..eb03babf 100644 --- a/src/secops/chronicle/utils/request_utils.py +++ b/src/secops/chronicle/utils/request_utils.py @@ -87,7 +87,12 @@ def chronicle_paginated_request( # Return a list if the API returns a list, otherwise return a dict if isinstance(data, list): return results - return {items_key: results} + response = {items_key: results} + + if data.get("nextPageToken"): + response["nextPageToken"] = data.get("nextPageToken") + + return response def chronicle_request( From 07cbb02c76031641ba9bba50d1502d45b193cfe9 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Mon, 22 Dec 2025 12:48:22 +0530 Subject: [PATCH 47/48] chore: added changelog. updated project version --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 90bfd2f7..1b8217b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,15 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.30.0] - 2025-12-22 +### Added +- Support for following watchlist management methods: + - List watchlists + - Create watchlist + - Get watchlist details + - Update watchlist + - Delete watchlist + ## [0.29.0] - 2025-12-17 ### Added - Support for following log/data processing pipeline methods: diff --git a/pyproject.toml b/pyproject.toml index 897997ad..120e676d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "secops" -version = "0.29.0" +version = "0.30.0" description = "Python SDK for wrapping the Google SecOps API for common use cases" readme = "README.md" requires-python = ">=3.10" From bb81114835d0c7dcd044321c6fa03872cfccf4c5 Mon Sep 17 00:00:00 2001 From: Mihir Vala <179564180+mihirvala-crestdata@users.noreply.github.com> Date: Mon, 22 Dec 2025 15:25:31 +0530 Subject: [PATCH 48/48] chore: refactor and linting --- src/secops/chronicle/client.py | 32 ++++++++++----------- src/secops/chronicle/utils/request_utils.py | 18 ++++++------ src/secops/chronicle/watchlist.py | 32 ++++++++++----------- 3 files changed, 41 insertions(+), 41 deletions(-) diff --git a/src/secops/chronicle/client.py b/src/secops/chronicle/client.py index 64364eb7..c96aedc3 100644 --- a/src/secops/chronicle/client.py +++ b/src/secops/chronicle/client.py @@ -18,7 +18,7 @@ from collections.abc import Iterator from datetime import datetime from enum import Enum -from typing import Any, Literal, Union, Optional, Dict +from typing import Any, Literal, Union from google.auth.transport import requests as google_auth_requests @@ -614,9 +614,9 @@ def validate_query(self, query: str) -> dict[str, Any]: def list_watchlists( self, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Dict[str, Any]: + page_size: int | None = None, + page_token: str | None = None, + ) -> dict[str, Any]: """Get a list of all watchlists. Args: @@ -634,7 +634,7 @@ def list_watchlists( def get_watchlist( self, watchlist_id: str, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Get a specific watchlist by ID. Args: @@ -651,8 +651,8 @@ def get_watchlist( def delete_watchlist( self, watchlist_id: str, - force: Optional[bool] = None, - ) -> Dict[str, Any]: + force: bool | None = None, + ) -> dict[str, Any]: """Delete a watchlist by ID. Args: @@ -675,8 +675,8 @@ def create_watchlist( name: str, display_name: str, multiplying_factor: float, - description: Optional[str] = None, - ) -> Dict[str, Any]: + description: str | None = None, + ) -> dict[str, Any]: """Create a watchlist Args: @@ -698,13 +698,13 @@ def create_watchlist( def update_watchlist( self, watchlist_id: str, - display_name: Optional[str] = None, - description: Optional[str] = None, - multiplying_factor: Optional[float] = None, - entity_population_mechanism: Optional[Dict[str, Any]] = None, - watchlist_user_preferences: Optional[Dict[str, Any]] = None, - update_mask: Optional[str] = None, - ) -> Dict[str, Any]: + display_name: str | None = None, + description: str | None = None, + multiplying_factor: float | None = None, + entity_population_mechanism: dict[str, Any] | None = None, + watchlist_user_preferences: dict[str, Any] | None = None, + update_mask: str | None = None, + ) -> dict[str, Any]: """Update a watchlist. Args: diff --git a/src/secops/chronicle/utils/request_utils.py b/src/secops/chronicle/utils/request_utils.py index eb03babf..0cfe925e 100644 --- a/src/secops/chronicle/utils/request_utils.py +++ b/src/secops/chronicle/utils/request_utils.py @@ -14,7 +14,7 @@ # """Helper functions for Chronicle.""" -from typing import Dict, Any, Optional, List, Union +from typing import Any from secops.exceptions import APIError from secops.chronicle.models import APIVersion @@ -29,10 +29,10 @@ def chronicle_paginated_request( path: str, items_key: str, *, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - extra_params: Optional[Dict[str, Any]] = None, -) -> Union[Dict[str, List[Any]], List[Any]]: + page_size: int | None = None, + page_token: str | None = None, + extra_params: dict[str, Any] | None = None, +) -> dict[str, list[Any]] | list[Any]: """Helper to get items from endpoints that use pagination. Args: @@ -101,11 +101,11 @@ def chronicle_request( endpoint_path: str, *, api_version: str = APIVersion.V1, - params: Optional[Dict[str, Any]] = None, - json: Optional[Dict[str, Any]] = None, + params: dict[str, Any] | None = None, + json: dict[str, Any] | None = None, expected_status: int = 200, - error_message: Optional[str] = None, -) -> Dict[str, Any]: + error_message: str | None = None, +) -> dict[str, Any]: """Perform an HTTP request and return JSON, raising APIError on failure. Args: diff --git a/src/secops/chronicle/watchlist.py b/src/secops/chronicle/watchlist.py index 5ec0f049..ee8327da 100644 --- a/src/secops/chronicle/watchlist.py +++ b/src/secops/chronicle/watchlist.py @@ -14,7 +14,7 @@ # """Watchlist functionality for Chronicle.""" -from typing import Dict, Any, Optional +from typing import Any from secops.chronicle.models import APIVersion from secops.chronicle.utils.request_utils import ( @@ -25,9 +25,9 @@ def list_watchlists( client: "ChronicleClient", - page_size: Optional[int] = None, - page_token: Optional[str] = None, -) -> Dict[str, Any]: + page_size: int | None = None, + page_token: str | None = None, +) -> dict[str, Any]: """Get a list of watchlists. Args: @@ -53,7 +53,7 @@ def list_watchlists( def get_watchlist( client: "ChronicleClient", watchlist_id: str -) -> Dict[str, Any]: +) -> dict[str, Any]: """Get a watchlist by ID Args: @@ -75,8 +75,8 @@ def get_watchlist( def delete_watchlist( - client: "ChronicleClient", watchlist_id: str, force: Optional[bool] = None -) -> Dict[str, Any]: + client: "ChronicleClient", watchlist_id: str, force: bool | None = None +) -> dict[str, Any]: """Delete a watchlist by ID Args: @@ -112,8 +112,8 @@ def create_watchlist( name: str, display_name: str, multiplying_factor: float, - description: Optional[str] = None, -) -> Dict[str, Any]: + description: str | None = None, +) -> dict[str, Any]: """Create a watchlist Args: @@ -147,13 +147,13 @@ def create_watchlist( def update_watchlist( client: "ChronicleClient", watchlist_id: str, - display_name: Optional[str] = None, - description: Optional[str] = None, - multiplying_factor: Optional[float] = None, - entity_population_mechanism: Optional[Dict[str, Any]] = None, - watchlist_user_preferences: Optional[Dict[str, Any]] = None, - update_mask: Optional[str] = None, -) -> Dict[str, Any]: + display_name: str | None = None, + description: str | None = None, + multiplying_factor: float | None = None, + entity_population_mechanism: dict[str, Any] | None = None, + watchlist_user_preferences: dict[str, Any] | None = None, + update_mask: str | None = None, +) -> dict[str, Any]: """Update a watchlist. Args: