diff --git a/.gitignore b/.gitignore index 839210e..ec42fcd 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,5 @@ # ignore cloud credentials /bot/cloud-credentials.json + +__pycache__/ \ No newline at end of file diff --git a/cve-jira-processing/README.md b/cve-jira-processing/README.md new file mode 100644 index 0000000..f2aa565 --- /dev/null +++ b/cve-jira-processing/README.md @@ -0,0 +1,388 @@ +# CVE Jira Processing + +Tools for processing CVE Vulnerability issues in Jira. Queries Jira for unprocessed CVEs, creates tracking structures, and establishes dependency chains across versions. + +The script is **idempotent** - it can be run multiple times safely without creating duplicate issues. + +## Setup + +1. Install dependencies: + ```bash + pip install -r requirements.txt + ``` + +2. Set environment variables: + ```bash + export JIRA_API_TOKEN="your-api-token" + export JIRA_SERVER="https://issues.redhat.com" # optional, this is the default + ``` + +## Scripts + +| Script | Description | +|--------|-------------| +| `dup_cve.py` | Main script for CVE processing | +| `manage_task_cves.py` | Manage CVEs linked to a task (update fields, close, set release notes) | +| `inspect_issues.py` | Inspect issues - show status and links | +| `reset_cves.py` | Remove processed labels to allow reprocessing | + +## Usage + +### dup_cve.py - Main Processing Script + +```bash +# Dry run - see what would be done without making changes +python dup_cve.py --dry-run + +# Process CVEs for real +python dup_cve.py + +# Process only a specific CVE +python dup_cve.py --cve CVE-2024-1234 + +# Verbose output for debugging +python dup_cve.py --dry-run -v + +# Use a custom config file +python dup_cve.py --config /path/to/config.yaml +``` + +### inspect_issues.py - Inspect Issues + +```bash +# Inspect specific issues +python inspect_issues.py OCPBUGS-12345 OSASINFRA-67890 + +# Inspect issues from a file +python inspect_issues.py -f issues.txt + +# Combine both +python inspect_issues.py OCPBUGS-12345 -f more_issues.txt +``` + +### manage_task_cves.py - Manage CVEs Linked to a Task + +```bash +# See what CVEs are linked to a task +python manage_task_cves.py OSASINFRA-12345 --dry-run + +# Close all linked CVEs with defaults (status=Closed, resolution=Done) +python manage_task_cves.py OSASINFRA-12345 + +# Transition to a different status +python manage_task_cves.py OSASINFRA-12345 --status ON_QA + +# Close with custom comment and resolution +python manage_task_cves.py OSASINFRA-12345 --comment "Fixed in OCPBUGS-99999" --resolution "Fixed" + +# Set release note fields when closing (using defaults: type=CVE, status=Proposed) +python manage_task_cves.py OSASINFRA-12345 \ + --release-note-text "This CVE was addressed upstream." \ + --release-note-type \ + --release-note-status + +# Set release note fields with custom values +python manage_task_cves.py OSASINFRA-12345 \ + --release-note-text "This CVE was addressed upstream." \ + --release-note-type "Bug Fix" \ + --release-note-status "Done" + +# Update release notes without transitioning (no status change) +python manage_task_cves.py OSASINFRA-12345 --no-transition \ + --release-note-text "This CVE was addressed upstream." \ + --release-note-type \ + --release-note-status + +# Add a patch link to CVEs +python manage_task_cves.py OSASINFRA-12345 \ + --patch-url "https://github.com/org/repo/pull/123" \ + --patch-title "Fix for CVE-2024-1234" + +# Transition CVEs to POST and task to POST +python manage_task_cves.py OSASINFRA-12345 --status POST --task-status POST + +# Close CVEs and the parent task +python manage_task_cves.py OSASINFRA-12345 --close-task + +# Close CVEs and task with patch link on both +python manage_task_cves.py OSASINFRA-12345 --close-task \ + --patch-url "https://github.com/org/repo/pull/123" + +# Close task with custom resolution +python manage_task_cves.py OSASINFRA-12345 \ + --task-status Closed --task-resolution "Won't Fix" + +# Process only Bug issues (not Vulnerabilities) +python manage_task_cves.py OSASINFRA-12345 --bugs-only --status POST +``` + +### reset_cves.py - Reset Processed Labels + +```bash +# Dry run - see what would be reset +python reset_cves.py --dry-run + +# Actually remove labels +python reset_cves.py + +# Reset only a specific CVE +python reset_cves.py --cve CVE-2024-1234 + +# Reset issues from a file +python reset_cves.py -f issues.txt + +# Also remove dependency and duplicate links +python reset_cves.py -f issues.txt --remove-links + +# Transition issues back to ASSIGNED status +python reset_cves.py -f issues.txt --reassign + +# Full reset: remove label, links, and reassign +python reset_cves.py -f issues.txt --remove-links --reassign +``` + +## Processing Flow + +``` +┌─────────────────────────────────────────────────────────────┐ +│ 1. Query unprocessed CVEs │ +│ - Status: New or Assigned │ +│ - No processed label │ +│ - Matching configured downstream components │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 2. Group by {component}:{cve_id} │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 3. For each CVE group: │ +│ a. Find/create Epic in OSASINFRA │ +│ b. Query ALL CVEs for this CVE ID (version detection) │ +│ c. Determine version range: min → max CVE version │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 4. For each version with CVEs: │ +│ ├─ Create Task in OSASINFRA (with Epic Link) │ +│ ├─ Link Task to CVEs (is caused by) │ +│ ├─ Set Target Backport Versions = Affects Version/s │ +│ └─ Add processed label to all CVEs │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 5. Create VERIFIED bug for next version after max CVE: │ +│ ├─ Create Task in OSASINFRA (with Epic Link) │ +│ ├─ Create VERIFIED Bug in OCPBUGS │ +│ └─ Link Task → is related to → Bug │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 6. Create dependency chain (older depends on newer): │ +│ 4.12 → depends on → 4.13 → depends on → 4.14 │ +└─────────────────────────────────────────────────────────────┘ +``` + +## JQL Queries Used + +### 1. Initial Query - Find Unprocessed CVEs + +```sql +project = OCPBUGS AND type = Vulnerability +AND ("Downstream Component Name" ~ "comp1" OR "Downstream Component Name" ~ "comp2" ...) +AND labels != "{cve_processed_label}" +AND status IN (New, Assigned) +AND "CVE ID" ~ "{cve_filter}" -- only if --cve flag provided +``` + +### 2. Secondary Query - Find All Related CVEs + +```sql +project = OCPBUGS AND type = Vulnerability +AND "CVE ID" ~ "{cve_id}" +AND ("Downstream Component Name" ~ "comp1" OR ...) +AND (status != Closed OR resolution IN (Duplicate, "Won't Do", "Not a Bug")) +``` + +### 3. Find Existing Epic + +```sql +project = OSASINFRA AND type = Epic +AND summary ~ "\"{cve_id} - {component}\"" +``` + +### 4. Find Existing Task + +```sql +project = OSASINFRA AND type = Task +AND summary ~ "\"{cve_id} - {component} - {version}\"" +``` + +### 5. Find Existing Bug + +```sql +project = OCPBUGS AND type = Bug +AND summary ~ "\"{cve_id} - {component} - {version}\"" +``` + +## Issue Structure Created + +``` +OSASINFRA: +├── Epic: CVE-2024-1234 - openstack-cinder +│ ├── Task: CVE-2024-1234 - openstack-cinder - 4.12 (Epic Link) +│ │ └── is caused by: CVE (Vulnerability) for 4.12 +│ ├── Task: CVE-2024-1234 - openstack-cinder - 4.13 (Epic Link) +│ │ └── is caused by: CVE (Vulnerability) for 4.13 +│ └── Task: CVE-2024-1234 - openstack-cinder - 4.14 (Epic Link) +│ └── is related to: VERIFIED Bug (CVE fixed in earlier version) + +OCPBUGS (one VERIFIED bug for next version after max CVE): +└── Bug: CVE-2024-1234 - openstack-cinder - 4.14 (VERIFIED) + +Dependency Chain (older depends on newer): +CVE-4.12 → depends on → CVE-4.13 → depends on → Bug-4.14 +``` + +## Link Types Used + +| Source | Link Type | Target | Description | +|--------|-----------|--------|-------------| +| Task | Epic Link (field) | Epic | Tasks belong to the Epic | +| Task | is related to | Bug (ON_QA) | Task linked to created bug | +| Task | is caused by | CVE | Task is caused by the CVE | +| Older issue | depends on | Newer issue | Dependency chain | + +## Fields Set on Created Issues + +### Epics (in OSASINFRA) + +| Field | Value | +|-------|-------| +| Project | OSASINFRA | +| Issue Type | Epic | +| Summary | `{CVE ID} - {component}` | +| Epic Name | `{CVE ID} - {component}` | +| Security Level | Red Hat Employee | +| Assignee | User running the script | +| Description | `Grouping epic for bugs related to {CVE ID} - {component}` | + +### Tasks (in OSASINFRA) + +| Field | Value | +|-------|-------| +| Project | OSASINFRA | +| Issue Type | Task | +| Summary | `{CVE ID} - {component} - {version}` | +| Security Level | Red Hat Employee | +| Assignee | User running the script | +| Epic Link | Link to parent Epic | + +### VERIFIED Bugs (in OCPBUGS) + +| Field | Value | +|-------|-------| +| Project | OCPBUGS | +| Issue Type | Bug | +| Summary | `{CVE ID} - {component} - {version}` | +| Component/s | Inherited from source CVE issues | +| Target Version | `{version}.z` (e.g., 4.14.z) | +| Security Level | Red Hat Employee | +| Assignee | User running the script | +| Status | Transitioned to VERIFIED | + +## Version Range Logic + +- **CVE versions**: Only versions with reported CVEs get processed +- **VERIFIED bug**: One bug created for next version after max CVE version +- This indicates the CVE was fixed in an earlier version + +## Idempotency + +The script can be run multiple times safely: + +- **Existing Epics** are reused (searched by summary) +- **Existing Tasks** are reused (searched by summary) +- **Existing Bugs** are reused (searched by summary) +- **Already-processed CVEs** are identified by the processed label and skipped + +## Files + +| File | Description | +|------|-------------| +| `dup_cve.py` | Main script for CVE processing | +| `manage_task_cves.py` | Utility to manage CVEs linked to a task | +| `inspect_issues.py` | Utility to inspect issue status and links | +| `reset_cves.py` | Utility to remove processed labels | +| `lib/jira_client.py` | Jira API client wrapper | +| `lib/jira_formatter.py` | Field formatting utilities for Jira API requests | +| `config.yaml` | Configuration file (component mappings, labels) | + +## Configuration + +The `config.yaml` file contains: + +- **cve_processed_label**: Label applied to processed CVE issues +- **repo_to_component**: Mapping of downstream repository names to component names + +The script queries for Vulnerabilities matching the downstream components defined in `repo_to_component`. + +To add support for new components, add entries to the `repo_to_component` section: + +```yaml +repo_to_component: + openshift4/ose-new-component-rhel9: openshift/new-component +``` + +## Example Output + +``` +2025-01-15 10:30:00 - Loaded config from config.yaml +2025-01-15 10:30:00 - Configured components: ['openshift4/ose-cloud-provider-openstack-rhel9', ...] +2025-01-15 10:30:01 - Found 50 numeric target versions +2025-01-15 10:30:01 - Querying Jira for Vulnerabilities... +2025-01-15 10:30:05 - Found 7 unprocessed Vulnerabilities +2025-01-15 10:30:05 - Found 2 CVE groups + +2025-01-15 10:30:05 - Processing CVE group: openstack-cinder:CVE-2024-1234 +2025-01-15 10:30:05 - CVE: CVE-2024-1234, Component: openstack-cinder +2025-01-15 10:30:05 - Found CVEs in versions: ['4.12', '4.13'] +2025-01-15 10:30:05 - Processing versions: ['4.12', '4.13', '4.14', '4.15', '4.16'] +2025-01-15 10:30:06 - Created Epic: OSASINFRA-99999 +2025-01-15 10:30:07 - Version 4.12: +2025-01-15 10:30:07 - Created task: OSASINFRA-100000 +2025-01-15 10:30:07 - 2 CVEs (1 new) +2025-01-15 10:30:07 - OCPBUGS-11111 (already processed) +2025-01-15 10:30:08 - Linked: OCPBUGS-22222 +2025-01-15 10:30:09 - Version 4.13: +2025-01-15 10:30:09 - Created task: OSASINFRA-100001 +2025-01-15 10:30:09 - 1 CVEs (1 new) +2025-01-15 10:30:09 - Linked: OCPBUGS-33333 +2025-01-15 10:30:10 - Version 4.14: +2025-01-15 10:30:10 - Created task: OSASINFRA-100002 +2025-01-15 10:30:10 - No CVEs, creating ON_QA bug +2025-01-15 10:30:11 - Created bug: OCPBUGS-100003 +2025-01-15 10:30:11 - Transitioned OCPBUGS-100003 to ON_QA +... + +====================================================================== + SUMMARY (DRY RUN - no changes made) +====================================================================== + + TOTALS + ---------------------------------------- + Epics: 2 (1 existing, 1 to create) + Tasks: 10 (3 existing, 7 to create) + Bugs: 6 (0 existing, 6 to create) + CVEs: 5 to link + + DETAILS BY CVE GROUP + ---------------------------------------- + [CVE-2024-1234] openstack-cinder + Epic: OSASINFRA-99999 + Tasks (5): OSASINFRA-100000, OSASINFRA-100001, ... OSASINFRA-100004 + Bugs (3): OCPBUGS-100003, OCPBUGS-100004, OCPBUGS-100005 + CVEs (2): OCPBUGS-11111, OCPBUGS-22222 + +====================================================================== +``` diff --git a/cve-jira-processing/config.yaml b/cve-jira-processing/config.yaml new file mode 100644 index 0000000..aa9113f --- /dev/null +++ b/cve-jira-processing/config.yaml @@ -0,0 +1,22 @@ +# CVE Jira Processing Configuration + +# Label applied to processed CVE issues +cve_processed_label: "os-as-infra-cve-triaged" + +# Mapping of downstream repository names to upstream component names +# Used to group CVEs by component +repo_to_component: + # cloud-provider-openstack + openshift4/ose-openstack-cloud-controller-manager-rhel9: openshift/cloud-provider-openstack + openshift4/ose-openstack-cinder-csi-driver-rhel9: openshift/cloud-provider-openstack + openshift4/ose-openstack-cinder-csi-driver-rhel8: openshift/cloud-provider-openstack + openshift4/ose-csi-driver-manila-rhel8: openshift/cloud-provider-openstack + openshift4/ose-csi-driver-manila-rhel9: openshift/cloud-provider-openstack + + # csi-driver-manila-operator + openshift4/ose-csi-driver-manila-rhel9-operator: openshift/csi-driver-manila-operator + openshift4/ose-csi-driver-manila-rhel8-operator: openshift/csi-driver-manila-operator + + # openstack-cinder-csi-driver-operator + openshift4/ose-openstack-cinder-csi-driver-rhel9-operator: openshift/openstack-cinder-csi-driver-operator + openshift4/ose-openstack-cinder-csi-driver-rhel8-operator: openshift/openstack-cinder-csi-driver-operator diff --git a/cve-jira-processing/dup_cve.py b/cve-jira-processing/dup_cve.py new file mode 100644 index 0000000..f40e4cb --- /dev/null +++ b/cve-jira-processing/dup_cve.py @@ -0,0 +1,1153 @@ +#!/usr/bin/env python3 +"""CVE duplicate detection and handling for Jira issues.""" + +import argparse +import logging +import re +from collections import defaultdict +from dataclasses import dataclass +from pathlib import Path +from typing import Dict, List, Optional + +import yaml + +from lib.jira_client import JiraTool + + +logger = logging.getLogger(__name__) + +# Default config file location (same directory as script) +DEFAULT_CONFIG_PATH = Path(__file__).parent / "config.yaml" + + +def load_config(config_path: Path) -> dict: + """Load configuration from YAML file.""" + if not config_path.exists(): + raise FileNotFoundError(f"Config file not found: {config_path}") + + with open(config_path, "r") as f: + config = yaml.safe_load(f) + + # Validate required keys + required_keys = ["cve_processed_label", "repo_to_component"] + for key in required_keys: + if key not in config: + raise ValueError(f"Missing required config key: {key}") + + return config + + +@dataclass +class ComplexBug: + key: str + components: list + affected_version: list + cve_id: str + + +@dataclass +class VersionGroup: + """Group of bugs for a specific version within a CVE.""" + + version: str + bugs: List[ComplexBug] + + +@dataclass +class CVEGroup: + """Group of bugs for a CVE/component combination across versions.""" + + cve_id: str + component: str + components_list: list # Jira components for issue creation + version_groups: Dict[str, VersionGroup] + epic_key: Optional[str] = None + + +@dataclass +class ProcessedResult: + """Result of processing a CVE group.""" + + epic_key: str + cve_id: str + component: str + tasks: List[str] + bugs: List[str] + cves_linked: List[str] + + +def parse_numeric_version(version: str) -> Optional[tuple]: + """ + Parse a version string to extract numeric major.minor parts. + + Handles versions like "4.12", "4.15.z", "4.16.0", etc. + Returns tuple (major, minor) or None if not a valid numeric version. + """ + # Match X.Y optionally followed by .z or .0 or similar + match = re.match(r"^(\d+)\.(\d+)(?:\.\w+)?$", version) + if match: + return (int(match.group(1)), int(match.group(2))) + return None + + +def normalize_version(version: str) -> str: + """ + Normalize a version string to X.Y format. + + Strips suffixes like .z, .0, etc. + """ + parsed = parse_numeric_version(version) + if parsed: + return f"{parsed[0]}.{parsed[1]}" + return version + + +def get_sorted_versions(version_names: List[str]) -> List[str]: + """ + Filter and sort version names to only include numeric versions. + + Returns sorted unique normalized versions like ["4.12", "4.13", "4.14", + "4.15"]. + """ + numeric_versions = set() + for name in version_names: + parsed = parse_numeric_version(name) + if parsed: + # Normalize to X.Y format to deduplicate 4.14 and 4.14.z + numeric_versions.add(normalize_version(name)) + + return sorted(numeric_versions, key=lambda v: parse_numeric_version(v)) + + +def get_versions_in_range( + min_version: str, max_version: str, available_versions: List[str] +) -> List[str]: + """ + Get all versions between min and max (inclusive) from available versions. + + Args: + min_version: The minimum version (e.g., "4.12") + max_version: The maximum version (e.g., "4.17") + available_versions: Sorted list of available versions + + Returns: + List of versions in range, sorted. + """ + min_parsed = parse_numeric_version(min_version) + max_parsed = parse_numeric_version(max_version) + + if not min_parsed or not max_parsed: + return [] + + result = [] + for version in available_versions: + parsed = parse_numeric_version(version) + if parsed and min_parsed <= parsed <= max_parsed: + result.append(version) + + return result + + +def group_issues_by_cve( + issues: List[dict], repo_to_component: Dict[str, str] +) -> Dict[str, CVEGroup]: + """ + Group issues by CVE and component, then by version within each group. + + Args: + issues: List of issue data dictionaries (each with 'key' field). + repo_to_component: Mapping of downstream repo names to component names. + + Returns a dict where keys are "component:cve_id" and values are + CVEGroup instances containing version-grouped bugs. + """ + # First pass: collect all bugs + temp_groups: Dict[str, Dict[str, List[ComplexBug]]] = defaultdict( + lambda: defaultdict(list) + ) + components_map: Dict[str, list] = {} + + for issue in issues: + bug_key = issue["key"] + downstream_component = issue.get("Downstream Component Name") + if downstream_component not in repo_to_component: + logger.warning( + "Skipping %s: unknown component %s",bug_key, downstream_component + ) + continue + + component = repo_to_component[downstream_component] + raw_version = issue["Affects Version/s"][0]["name"] + version = normalize_version(raw_version) + cve_id = issue["CVE ID"] + + group_key = f"{component}:{cve_id}" + bug = ComplexBug( + key=bug_key, + components=issue["Component/s"], + affected_version=issue["Affects Version/s"], + cve_id=cve_id, + ) + temp_groups[group_key][version].append(bug) + + # Store components for issue creation (use first bug's components) + if group_key not in components_map: + components_map[group_key] = issue["Component/s"] + + # Second pass: build CVEGroup objects + result = {} + for group_key, version_dict in temp_groups.items(): + component, cve_id = group_key.rsplit(":", 1) + version_groups = { + ver: VersionGroup(version=ver, bugs=bugs) + for ver, bugs in version_dict.items() + } + result[group_key] = CVEGroup( + cve_id=cve_id, + component=component, + components_list=components_map[group_key], + version_groups=version_groups, + ) + + return result + + +# ============================================================================= +# Query functions +# ============================================================================= + + +def query_vulnerabilities( + client: JiraTool, + downstream_components: List[str], + cve_processed_label: str, + cve_filter: Optional[str] = None, +) -> List[dict]: + """ + Query Jira for all unprocessed Vulnerability issues for given components. + + Args: + client: Jira client. + downstream_components: List of downstream component names to query. + cve_processed_label: Label that marks already-processed CVEs. + cve_filter: Optional CVE ID to filter for (e.g., "CVE-2024-1234"). + + Returns: + List of issue data dictionaries. + """ + # Build component conditions with OR (only ~ operator is supported) + component_conditions = " OR ".join( + f'"Downstream Component Name" ~ "{c}"' for c in downstream_components + ) + + jql = ( + f"project = OCPBUGS AND type = Vulnerability " + f"AND ({component_conditions}) " + f'AND labels != "{cve_processed_label}" ' + f"AND status IN (New, ASSIGNED)" + ) + + # Add CVE ID filter if specified + if cve_filter: + jql += f' AND "CVE ID" ~ "{cve_filter}"' + + logger.info("Querying Jira for Vulnerabilities...") + logger.debug("JQL: %s", jql) + + # Fetch all fields in single query (optimized - 1 request instead of N+1) + fields = [ + "Affects Version/s", + "CVE ID", + "Downstream Component Name", + "Component/s", + "Issue Type", + "Labels", + ] + issues = client.search_issues_as_dicts( + jql, fields=fields, max_results=1000) + logger.info("Found %d unprocessed Vulnerabilities", len(issues)) + + return issues + + +# ============================================================================= +# Search functions +# ============================================================================= + + +def query_all_cves_for_cve_id( + client: JiraTool, cve_id: str, downstream_components: List[str] +) -> Dict[str, List[dict]]: + """ + Query for CVEs with a specific CVE ID, excluding closed/fixed ones. + + This finds all versions where this CVE exists across the given components. + Excludes CVEs that are closed (unless closed as Duplicate/Won't Do/Not a + Bug). + + Args: + client: Jira client. + cve_id: The CVE identifier (e.g., "CVE-2024-1234"). + downstream_components: List of downstream component names. + + Returns: + Dictionary mapping version -> list of CVE issue data. + """ + # Build component conditions + component_conditions = " OR ".join( + f'"Downstream Component Name" ~ "{c}"' for c in downstream_components + ) + + # Exclude closed CVEs unless they were closed as Duplicate/Won't Do/Not a + # Bug This way we still track versions with unresolved CVEs + jql = ( + f"project = OCPBUGS AND type = Vulnerability " + f'AND "CVE ID" ~ "{cve_id}" ' + f"AND ({component_conditions}) " + f"AND (status != Closed OR resolution IN " + f'(Duplicate, "Won\'t Do", "Not a Bug"))' + ) + + logger.debug("Querying all CVEs for %s: %s", cve_id, jql) + + # Fetch all fields in single query (optimized - 1 request instead of N+1) + fields = [ + "Affects Version/s", + "CVE ID", + "Downstream Component Name", + "Component/s", + "Labels", + ] + issues = client.search_issues_as_dicts( + jql, fields=fields, max_results=1000) + + result: Dict[str, List[dict]] = {} + for issue_data in issues: + # Extract version and normalize (e.g., "4.15.z" -> "4.15") + affects = issue_data.get("Affects Version/s", []) + if affects: + raw_version = affects[0]["name"] + version = normalize_version(raw_version) + if version not in result: + result[version] = [] + result[version].append(issue_data) + + return result + + +@dataclass +class ExistingIssuesCache: + """Cache of existing issues for a CVE/component.""" + + epic_key: Optional[str] = None + tasks_by_version: Dict[str, str] = None # version -> task key + bugs_by_version: Dict[str, str] = None # version -> bug key + + def __post_init__(self): + if self.tasks_by_version is None: + self.tasks_by_version = {} + if self.bugs_by_version is None: + self.bugs_by_version = {} + + +def find_all_existing_issues( + client: JiraTool, cve_id: str, component: str +) -> ExistingIssuesCache: + """ + Find all existing issues for a CVE/component in a single batch query. + + This is more efficient than individual lookups (1 query instead of N). + + Args: + client: Jira client. + cve_id: The CVE identifier. + component: The component name. + + Returns: + ExistingIssuesCache with found issues. + """ + cache = ExistingIssuesCache() + summary_prefix = f"{cve_id} - {component}" + escaped_prefix = summary_prefix.replace('"', '\\"') + + # Query OSASINFRA for epics and tasks + jql_osasinfra = ( + f"project = OSASINFRA AND type IN (Epic, Task) " + f'AND summary ~ "\\"{escaped_prefix}\\""' + ) + osasinfra_issues = client.search_issues( + jql_osasinfra, fields=["summary", "issuetype"], max_results=100 + ) + + for issue in osasinfra_issues: + issue_type = issue.fields.issuetype.name + summary = issue.fields.summary + + if issue_type == "Epic" and summary == summary_prefix: + cache.epic_key = issue.key + elif issue_type == "Task" and summary.startswith(summary_prefix + " - "): + # Extract version from "CVE-ID - component - version" + version = summary[len(summary_prefix) + 3:] + cache.tasks_by_version[version] = issue.key + + # Query OCPBUGS for bugs + jql_ocpbugs = ( + f"project = OCPBUGS AND type = Bug " f'AND summary ~ "\\"{escaped_prefix}\\""' + ) + ocpbugs_issues = client.search_issues( + jql_ocpbugs, fields=["summary"], max_results=100 + ) + + for issue in ocpbugs_issues: + summary = issue.fields.summary + if summary.startswith(summary_prefix + " - "): + version = summary[len(summary_prefix) + 3:] + cache.bugs_by_version[version] = issue.key + + logger.debug( + "Found existing: epic=%s, %d tasks, %d bugs", + cache.epic_key, + len(cache.tasks_by_version), + len(cache.bugs_by_version), + ) + return cache + + +# ============================================================================= +# Creation functions +# ============================================================================= + + +def find_or_create_epic( + client: JiraTool, + cve_id: str, + component: str, + cache: ExistingIssuesCache, + dry_run: bool = False, +) -> str: + """Find an existing Epic or create a new one for a CVE/component.""" + epic_summary = f"{cve_id} - {component}" + + # Check cache first + if cache.epic_key: + logger.info(" Found existing Epic: %s", cache.epic_key) + return cache.epic_key + + if dry_run: + logger.info(" [DRY RUN] Would create Epic: %s", epic_summary) + return f"OSASINFRA-NEW({epic_summary})" + + epic_description = ( + f"Tracking epic for {cve_id} affecting {component}.\n\n" + f"This epic groups all version-specific tasks for this CVE/component combination.\n" + f"Each task tracks the fix status for a specific OpenShift version." + ) + + epic = client.create_jira_issue( + { + "project": "OSASINFRA", + "Epic Name": epic_summary, + "summary": epic_summary, + "security level": "Red Hat Employee", + "assignee": client.get_current_user(), + "Description": epic_description, + }, + "Epic", + ) + logger.info(" Created Epic: %s", epic.key) + return epic.key + + +def create_task( + client: JiraTool, + cve_id: str, + component: str, + version: str, + epic_key: str, + cache: ExistingIssuesCache, + dry_run: bool = False, +) -> Optional[str]: + """ + Create a Task in OSASINFRA for tracking a CVE/component/version. + + Returns the task key or a descriptive placeholder in dry-run mode. + """ + task_summary = f"{cve_id} - {component} - {version}" + + # Check cache first + existing = cache.tasks_by_version.get(version) + if existing: + logger.info(" Found existing task: %s", existing) + return existing + + if dry_run: + logger.info(" [DRY RUN] Would create task: %s", task_summary) + return f"OSASINFRA-NEW({task_summary})" + + task_description = ( + f"Tracking task for {cve_id} in {component} for OpenShift {version}.\n\n" + f"This task tracks the remediation status of the CVE for this specific version.\n" + f"Linked CVE issues (if any) represent the upstream vulnerability reports." + ) + + task_fields = { + "project": "OSASINFRA", + "summary": task_summary, + "security level": "Red Hat Employee", + "assignee": client.get_current_user(), + "Description": task_description, + } + + # Add epic link if epic exists (not a dry-run placeholder) + if epic_key and not is_dry_run_key(epic_key): + task_fields["Epic Link"] = epic_key + + task = client.create_jira_issue(task_fields, "Task") + logger.info(" Created task: %s", task.key) + return task.key + + +def create_verified_bug( + client: JiraTool, + cve_id: str, + component: str, + version: str, + components: List[str], + cache: ExistingIssuesCache, + is_latest_version: bool = False, + dry_run: bool = False, +) -> Optional[str]: + """ + Create a VERIFIED Bug in OCPBUGS for CVE verification. + + This bug indicates that the CVE was verified as not affecting this version. + + Returns the bug key or a descriptive placeholder in dry-run mode. + """ + bug_summary = f"{cve_id} - {component} - {version}" + + # Check cache first + existing = cache.bugs_by_version.get(version) + if existing: + logger.info(" Found existing bug: %s", existing) + return existing + + # Latest version doesn't have .z suffix + target_version = version if is_latest_version else version + ".z" + + if dry_run: + logger.info(" [DRY RUN] Would create VERIFIED bug: %s", bug_summary) + return f"OCPBUGS-NEW({bug_summary})" + + bug_description = ( + f"Verification bug for {cve_id} in {component} for OpenShift {version}.\n\n" + f"This version was verified as not affected by the CVE.\n" + f"The fix was applied in a previous version and this version inherits it." + ) + + issue_fields = { + "project": "OCPBUGS", + "summary": bug_summary, + "Description": bug_description, + "component/s": components, + "security level": "Red Hat Employee", + "assignee": client.get_current_user(), + "target version": [target_version], + "Affects Version/s": [target_version], + "labels": ["bugwatcher-ignore"], + } + + bug = client.create_jira_issue(issue_fields, "Bug") + logger.info(" Created bug: %s", bug.key) + + # Transition to VERIFIED + client.transition_issue_status(bug.key, "VERIFIED") + logger.info(" Transitioned %s to VERIFIED", bug.key) + + return bug.key + + +# ============================================================================= +# Linking functions +# ============================================================================= + + +def is_dry_run_key(key: str) -> bool: + """Check if a key is a dry-run placeholder.""" + return "-NEW(" in key + + +def link_task_to_bug( + client: JiraTool, task_key: str, bug_key: str, dry_run: bool = False +) -> None: + """Link a task to its corresponding OCPBUGS bug.""" + if dry_run or is_dry_run_key(task_key) or is_dry_run_key(bug_key): + return + + client.link_issue("is related to", task_key, bug_key) + logger.debug(" Linked task %s to bug %s", task_key, bug_key) + + +def link_task_to_cves( + client: JiraTool, task_key: str, cve_keys: List[str], dry_run: bool = False +) -> None: + """Link a task to all related CVE issues.""" + if dry_run or is_dry_run_key(task_key): + return + + for cve_key in cve_keys: + # Task is caused by CVE: link_issue(type, A, B) creates A "is caused by" B + client.link_issue("is caused by", task_key, cve_key) + logger.debug(" Linked task %s is caused by CVE %s", task_key, cve_key) + + +@dataclass +class VersionIssue: + """Issue info for dependency linking.""" + + key: str + downstream_component: str + + +def link_version_dependencies( + client: JiraTool, + issues_by_version: Dict[str, List[VersionIssue]], + available_versions: List[str], + dry_run: bool = False, +) -> None: + """ + Link issues with 'depends on' relationship: newer depends on older. + + Issues can be CVEs (Vulnerabilities) or created bugs. + For example: 4.17 depends on 4.16 depends on 4.15 + Prioritizes linking to issues with the same downstream component. + Falls back to linking to one issue if no same-component match exists. + """ + # Get versions in order (already sorted) + versions_with_issues = [ + v for v in available_versions if v in issues_by_version] + + if not versions_with_issues: + return + + if dry_run: + logger.info(" [DRY RUN] Dependency chain:") + for i, version in enumerate(versions_with_issues[:-1]): + next_version = versions_with_issues[i + 1] + current_issues = issues_by_version[version] + next_issues = issues_by_version[next_version] + logger.info( + " %d issues in %s -> depends on -> %d issues in %s", + len(current_issues), + version, + len(next_issues), + next_version, + ) + return + + for i, version in enumerate(versions_with_issues[:-1]): + next_version = versions_with_issues[i + 1] + current_issues = issues_by_version[version] + next_issues = issues_by_version[next_version] + + for current_issue in current_issues: + if is_dry_run_key(current_issue.key): + continue + + # Find issues in next version with same downstream component + same_component_issues = [ + ni + for ni in next_issues + if ni.downstream_component == current_issue.downstream_component + and not is_dry_run_key(ni.key) + ] + + if same_component_issues: + # Link to all issues with same component + for next_issue in same_component_issues: + # Older version depends on newer version + # link_issue(type, A, B) creates: A "depends on" B + client.link_issue( + "depends on", current_issue.key, next_issue.key) + logger.info( + " Linked %s (%s) depends on %s (%s) [same component]", + current_issue.key, + version, + next_issue.key, + next_version, + ) + else: + # Fall back to linking to first available issue in next version + fallback_issues = [ + ni for ni in next_issues if not is_dry_run_key(ni.key) + ] + if fallback_issues: + next_issue = fallback_issues[0] + # Older version depends on newer version + # link_issue(type, A, B) creates: A "depends on" B + client.link_issue( + "depends on", current_issue.key, next_issue.key) + logger.info( + " Linked %s (%s) depends on %s (%s) [fallback]", + current_issue.key, + version, + next_issue.key, + next_version, + ) + + +# ============================================================================= +# Main processing +# ============================================================================= + + +def get_downstream_components_for_upstream( + upstream_component: str, repo_to_component: Dict[str, str] +) -> List[str]: + """ + Get all downstream component names that map to a specific upstream + component. + + Args: + upstream_component: The upstream component name + (e.g., "openstack-cinder"). + repo_to_component: Mapping of downstream repo names to upstream + components. + + Returns: + List of downstream component names that map to this upstream component. + """ + return [ + downstream + for downstream, upstream in repo_to_component.items() + if upstream == upstream_component + ] + + +def process_cve_group( + client: JiraTool, + cve_group: CVEGroup, + available_versions: List[str], + repo_to_component: Dict[str, str], + cve_processed_label: str, + dry_run: bool = False, +) -> ProcessedResult: + """ + Process a single CVE group. + + Creates: + 1. Epic in OSASINFRA + 2. Task per version in OSASINFRA (linked to CVEs or created bug) + 3. ON_QA Bug in OCPBUGS only for versions WITHOUT CVEs + 4. Dependency chain between issues (CVE or bug, newer depends on older) + 5. Links between tasks and their version's main issue + """ + logger.info("Processing CVE group: %s:%s", + cve_group.component, cve_group.cve_id) + logger.info(" CVE: %s, Component: %s", + cve_group.cve_id, cve_group.component) + + # Batch query for all existing issues (2 queries instead of N) + cache = find_all_existing_issues( + client, cve_group.cve_id, cve_group.component) + + # Find or create Epic + epic_key = find_or_create_epic( + client, cve_group.cve_id, cve_group.component, cache, dry_run + ) + + components = [x["name"] for x in cve_group.components_list] + + # Get downstream components relevant to this upstream component + relevant_downstream = get_downstream_components_for_upstream( + cve_group.component, repo_to_component + ) + + # Query for ALL CVEs with this CVE ID (processed and unprocessed) + # This gives us the complete picture of which versions have CVEs + # This is a read-only query, safe to run in dry-run mode + all_cves_by_version = query_all_cves_for_cve_id( + client, cve_group.cve_id, relevant_downstream + ) + logger.info(" Found CVEs in versions: %s", + list(all_cves_by_version.keys())) + + # Determine version range from all known CVEs + all_cve_versions = set(all_cves_by_version.keys()) + + if not all_cve_versions or not available_versions: + return ProcessedResult( + epic_key=epic_key, + cve_id=cve_group.cve_id, + component=cve_group.component, + tasks=[], + bugs=[], + cves_linked=[], + ) + + # Get min/max CVE versions + min_version = min( + all_cve_versions, key=lambda v: parse_numeric_version(v) or (0, 0) + ) + max_cve_version = max( + all_cve_versions, key=lambda v: parse_numeric_version(v) or (0, 0) + ) + + # Only process versions that have CVEs + cve_versions = get_versions_in_range( + min_version, max_cve_version, available_versions) + logger.info(" Processing CVE versions: %s", cve_versions) + + # Find next version after max CVE for VERIFIED bug + next_version = None + max_idx = available_versions.index(max_cve_version) if max_cve_version in available_versions else -1 + if max_idx >= 0 and max_idx + 1 < len(available_versions): + next_version = available_versions[max_idx + 1] + logger.info(" Will create VERIFIED bug for version: %s", next_version) + + all_versions = cve_versions + + tasks = [] + created_bugs = [] + # Track main issue per version (CVE or created bug) for dependency chain + main_issue_by_version = {} + all_cves_linked = [] + + # Process each version + for version in all_versions: + logger.info(" Version %s:", version) + + # Create or get task in OSASINFRA (linked to epic via Epic Link field) + task_key = create_task( + client, + cve_group.cve_id, + cve_group.component, + version, + epic_key, + cache, + dry_run, + ) + if task_key: + tasks.append(task_key) + + # Check if CVEs exist for this version + version_cves = all_cves_by_version.get(version, []) + + if version_cves: + # Sort CVEs by key for consistent ordering (lowest = oldest) + version_cves.sort(key=lambda x: x["key"]) + + # Separate processed and unprocessed for logging + unprocessed_cves = [] + for cve_data in version_cves: + labels = cve_data.get("Labels", []) or [] + if cve_processed_label not in labels: + unprocessed_cves.append(cve_data) + + logger.info( + " %d CVEs (%d new)", len( + version_cves), len(unprocessed_cves) + ) + + # Store all CVEs with downstream component for dependency chain + main_issue_by_version[version] = [ + VersionIssue( + key=cve["key"], + downstream_component=cve.get( + "Downstream Component Name", ""), + ) + for cve in version_cves + ] + + # Link all CVEs to task and add processed label + for cve_data in version_cves: + cve_key = cve_data["key"] + all_cves_linked.append(cve_key) + + # Check if already processed + labels = cve_data.get("Labels", []) or [] + already_processed = cve_processed_label in labels + + if already_processed: + logger.info(" %s (already processed)", cve_key) + else: + # Get affects version for Target Backport Versions + affects_version = cve_data.get("Affects Version/s", []) + + if dry_run: + logger.info( + " [DRY RUN] Would link and label: %s", cve_key) + if affects_version: + version_names = [v["name"] for v in affects_version] + logger.info( + " [DRY RUN] Would set Target Backport " + "Versions: %s", version_names) + else: + # Link to task + if task_key: + link_task_to_cves(client, task_key, [cve_key]) + + # Set Target Backport Versions to match Affects Version + if affects_version: + version_names = [v["name"] for v in affects_version] + client.update_issue( + cve_key, + {"Target Backport Versions": version_names} + ) + + # Add processed label + client.add_label(cve_key, cve_processed_label) + logger.info(" Linked: %s", cve_key) + + # Create VERIFIED bug for the next version after max CVE version + if next_version: + logger.info(" Version %s:", next_version) + logger.info(" Creating VERIFIED bug (CVE fixed in earlier version)") + + # Create task for this version + task_key = create_task( + client, + cve_group.cve_id, + cve_group.component, + next_version, + epic_key, + cache, + dry_run, + ) + if task_key: + tasks.append(task_key) + + # Create VERIFIED bug + is_latest = next_version == available_versions[-1] + bug_key = create_verified_bug( + client, + cve_group.cve_id, + cve_group.component, + next_version, + components, + cache, + is_latest, + dry_run, + ) + if bug_key: + created_bugs.append(bug_key) + main_issue_by_version[next_version] = [ + VersionIssue(key=bug_key, downstream_component="") + ] + + # Link task to created bug + if task_key: + link_task_to_bug(client, task_key, bug_key, dry_run) + + # Create dependency chain between main issues (CVEs or bugs) + link_version_dependencies( + client, main_issue_by_version, available_versions, dry_run + ) + + return ProcessedResult( + epic_key=epic_key, + cve_id=cve_group.cve_id, + component=cve_group.component, + tasks=tasks, + bugs=created_bugs, + cves_linked=all_cves_linked, + ) + + +def process_cve_groups( + client: JiraTool, + cve_groups: Dict[str, CVEGroup], + available_versions: List[str], + repo_to_component: Dict[str, str], + cve_processed_label: str, + dry_run: bool = False, +) -> List[ProcessedResult]: + """Process all CVE groups.""" + results = [] + + for _, cve_group in cve_groups.items(): + result = process_cve_group( + client, + cve_group, + available_versions, + repo_to_component, + cve_processed_label, + dry_run, + ) + results.append(result) + + return results + + +def print_summary(results: List[ProcessedResult], dry_run: bool = False): + """Print a summary of all processed CVE groups.""" + if not results: + print("\nNo CVE groups were processed") + return + + total_epics = len(results) + total_tasks = sum(len(r.tasks) for r in results) + total_bugs = sum(len(r.bugs) for r in results) + total_cves = sum(len(r.cves_linked) for r in results) + + # Count new vs existing issues + new_tasks = sum(1 for r in results for t in r.tasks if is_dry_run_key(t)) + existing_tasks = total_tasks - new_tasks + new_bugs = sum(1 for r in results for b in r.bugs if is_dry_run_key(b)) + existing_bugs = total_bugs - new_bugs + new_epics = sum(1 for r in results if is_dry_run_key(r.epic_key)) + existing_epics = total_epics - new_epics + + # Print summary header + print("") + print("=" * 70) + if dry_run: + print(" SUMMARY (DRY RUN - no changes made)") + else: + print(" SUMMARY") + print("=" * 70) + + # Totals section + print("") + print(" TOTALS") + print(" " + "-" * 40) + + if dry_run: + print( + f" Epics: {total_epics:3d} " + f"({existing_epics} existing, {new_epics} to create)" + ) + print( + f" Tasks: {total_tasks:3d} " + f"({existing_tasks} existing, {new_tasks} to create)" + ) + print( + f" Bugs: {total_bugs:3d} " + f"({existing_bugs} existing, {new_bugs} to create)" + ) + print(f" CVEs: {total_cves:3d} to link") + else: + print(f" Epics: {total_epics:3d}") + print(f" Tasks: {total_tasks:3d}") + print(f" Bugs: {total_bugs:3d}") + print(f" CVEs: {total_cves:3d} linked") + + # Per-group details + print("") + print(" DETAILS BY CVE GROUP") + print(" " + "-" * 40) + + for i, result in enumerate(results): + if i > 0: + print("") + + # Group header + print(f" [{result.cve_id}] {result.component}") + print(f" Epic: {result.epic_key}") + + # Tasks + if result.tasks: + print( + f" Tasks ({len(result.tasks)}): {', '.join(result.tasks)}") + + # Bugs + if result.bugs: + print(f" Bugs ({len(result.bugs)}): {', '.join(result.bugs)}") + + # CVEs + if result.cves_linked: + print( + f" CVEs ({len(result.cves_linked)}): " + f"{', '.join(result.cves_linked)}" + ) + + print("") + print("=" * 70) + + +def setup_logging(verbose: bool = False): + """Configure logging for the application.""" + level = logging.DEBUG if verbose else logging.INFO + logging.basicConfig( + level=level, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Process CVE Vulnerability issues from Jira" + ) + parser.add_argument( + "--config", + type=Path, + default=DEFAULT_CONFIG_PATH, + help="Path to config file (default: config.yaml)", + ) + parser.add_argument( + "--cve", + dest="cve_filter", + metavar="CVE-ID", + help="Process only a specific CVE (e.g., CVE-2024-1234)", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be done without making changes", + ) + parser.add_argument( + "-v", "--verbose", action="store_true", help="Enable verbose/debug logging" + ) + args = parser.parse_args() + + setup_logging(args.verbose) + + # Load configuration + config = load_config(args.config) + cve_processed_label = config["cve_processed_label"] + repo_to_component = config["repo_to_component"] + logger.info("Loaded config from %s", args.config) + + # Get list of downstream components to query + downstream_components = list(repo_to_component.keys()) + logger.info("Configured components: %s", downstream_components) + + client = JiraTool() + + logger.info("Fetching available target versions...") + all_versions = client.get_field_allowed_values( + "OCPBUGS", "Bug", "Target Version") + available_versions = get_sorted_versions(all_versions) + logger.info("Found %d numeric target versions", len(available_versions)) + logger.debug("Available versions: %s", available_versions) + + # Query Jira for all unprocessed Vulnerabilities + if args.cve_filter: + logger.info("Filtering for CVE: %s", args.cve_filter) + issues = query_vulnerabilities( + client, downstream_components, cve_processed_label, cve_filter=args.cve_filter + ) + + if not issues: + logger.info("No unprocessed Vulnerabilities found") + return + + cve_groups = group_issues_by_cve(issues, repo_to_component) + logger.info("Found %d CVE groups", len(cve_groups)) + + results = process_cve_groups( + client, + cve_groups, + available_versions, + repo_to_component, + cve_processed_label, + dry_run=args.dry_run, + ) + print_summary(results, dry_run=args.dry_run) + + logger.info("Done") + + +if __name__ == "__main__": + main() diff --git a/cve-jira-processing/inspect_issues.py b/cve-jira-processing/inspect_issues.py new file mode 100755 index 0000000..f9bda26 --- /dev/null +++ b/cve-jira-processing/inspect_issues.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +"""Inspect Jira issues - show status and links.""" + +import argparse +import logging +import sys +from pathlib import Path + +from lib.jira_client import JiraTool + +logger = logging.getLogger(__name__) + + +def read_issues_from_file(file_path: Path) -> list[str]: + """Read issue keys from a file (one per line).""" + issues = [] + with open(file_path, "r") as f: + for line in f: + line = line.strip() + # Skip empty lines and comments + if line and not line.startswith("#"): + # Handle comma-separated values on same line + for key in line.split(","): + key = key.strip() + if key: + issues.append(key) + return issues + + +def extract_name(field_value) -> str: + """Extract name from a Jira field value (handles dict or direct value).""" + if field_value is None: + return None + if isinstance(field_value, dict): + return field_value.get("name", str(field_value)) + return str(field_value) + + +def inspect_issue(client: JiraTool, issue_key: str) -> dict: + """Get status and links for an issue.""" + try: + issue_data = client.get_jira_issue( + issue_key, field_filter=["Status", "Summary", "Issue Type", "Resolution"] + ) + links = client.get_issue_links(issue_key) + + return { + "key": issue_key, + "summary": issue_data.get("Summary", "N/A"), + "type": extract_name(issue_data.get("Issue Type")) or "N/A", + "status": extract_name(issue_data.get("Status")) or "N/A", + "resolution": extract_name(issue_data.get("Resolution")), + "links": links, + "error": None, + } + except Exception as e: + return {"key": issue_key, "error": str(e)} + + +def format_links(links: list[dict]) -> list[str]: + """Format links for display.""" + formatted = [] + for link in links: + link_type = link.get("type", "unknown") + direction = link.get("direction", "unknown") + linked_issue = link.get("issue_key", "unknown") + + if direction == "inward": + formatted.append(f"← {link_type}: {linked_issue}") + else: + formatted.append(f"→ {link_type}: {linked_issue}") + + return formatted + + +def print_issue(issue_info: dict) -> None: + """Print issue information.""" + key = issue_info["key"] + + if issue_info.get("error"): + print(f"\n{key}: ERROR - {issue_info['error']}") + return + + status = issue_info["status"] + resolution = issue_info["resolution"] + issue_type = issue_info["type"] + summary = issue_info["summary"] + links = issue_info.get("links", []) + + status_str = status + if resolution and resolution != "None": + status_str = f"{status} ({resolution})" + + print(f"\n{key} [{issue_type}] - {status_str}") + print(f" Summary: {summary}") + + if links: + print(" Links:") + for link_str in format_links(links): + print(f" {link_str}") + else: + print(" Links: (none)") + + +def setup_logging(verbose: bool = False): + """Configure logging.""" + level = logging.DEBUG if verbose else logging.WARNING + logging.basicConfig( + level=level, + format="%(asctime)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Inspect Jira issues - show status and links" + ) + parser.add_argument( + "issues", + nargs="*", + help="Issue keys to inspect (e.g., OCPBUGS-12345 OSASINFRA-67890)", + ) + parser.add_argument( + "-f", + "--file", + type=Path, + help="File containing issue keys (one per line)", + ) + parser.add_argument( + "-v", "--verbose", action="store_true", help="Enable verbose logging" + ) + args = parser.parse_args() + + setup_logging(args.verbose) + + # Collect issues from arguments and/or file + issues = list(args.issues) if args.issues else [] + + if args.file: + if not args.file.exists(): + print(f"Error: File not found: {args.file}", file=sys.stderr) + sys.exit(1) + issues.extend(read_issues_from_file(args.file)) + + if not issues: + parser.print_help() + print("\nError: No issue keys provided", file=sys.stderr) + sys.exit(1) + + print(f"Inspecting {len(issues)} issue(s)...") + + client = JiraTool() + + for issue_key in issues: + issue_info = inspect_issue(client, issue_key) + print_issue(issue_info) + + print() + + +if __name__ == "__main__": + main() diff --git a/cve-jira-processing/lib/__init__.py b/cve-jira-processing/lib/__init__.py new file mode 100644 index 0000000..24faa97 --- /dev/null +++ b/cve-jira-processing/lib/__init__.py @@ -0,0 +1,6 @@ +"""Jira client library.""" + +from .jira_client import JiraTool +from .jira_formatter import JiraFormatter + +__all__ = ["JiraTool", "JiraFormatter"] diff --git a/cve-jira-processing/lib/jira_client.py b/cve-jira-processing/lib/jira_client.py new file mode 100644 index 0000000..e18eb69 --- /dev/null +++ b/cve-jira-processing/lib/jira_client.py @@ -0,0 +1,442 @@ +"""Jira client for interacting with Jira API.""" + +import logging +import os +from typing import Dict, List, Any + +from jira import JIRA, Issue + +from .jira_formatter import JiraFormatter + + +logger = logging.getLogger(__name__) + +DEFAULT_SERVER = "https://issues.redhat.com" + + +class JiraTool: + """Jira API client wrapper.""" + + def __init__(self, api_token: str = None, server: str = None): + """ + Initialize Jira client. + + Args: + api_token: Jira API token. Defaults to JIRA_API_TOKEN env var. + server: Jira server URL. Defaults to JIRA_SERVER env var or + issues.redhat.com. + """ + if api_token is None: + api_token = os.environ.get("JIRA_API_TOKEN") + if not api_token: + raise ValueError("JIRA_API_TOKEN environment variable not set") + + if server is None: + server = os.environ.get("JIRA_SERVER", DEFAULT_SERVER) + + logger.debug("Connecting to Jira server: %s", server) + self.jira_client = JIRA(server=server, token_auth=api_token) + self._current_user = None + self._fields_cache = None # Lazy-loaded field metadata cache + logger.info("Connected to Jira server: %s", server) + + def _get_fields(self) -> List[Dict]: + """Get all fields from Jira (cached).""" + if self._fields_cache is None: + logger.debug("Fetching field metadata from Jira...") + self._fields_cache = self.jira_client.fields() + logger.debug("Cached %d fields", len(self._fields_cache)) + return self._fields_cache + + def get_current_user(self) -> str: + """Get the username of the currently authenticated user.""" + if self._current_user is None: + self._current_user = self.jira_client.current_user() + logger.debug("Current user: %s", self._current_user) + return self._current_user + + def get_fields_name_to_id(self) -> Dict[str, str]: + """Get mapping of field name (lowercase) to jira field id.""" + return {f["name"].lower(): f["id"] for f in self._get_fields()} + + def get_fields_id_to_name(self) -> Dict[str, str]: + """Get mapping of field id to jira field name.""" + return {f["id"]: f["name"] for f in self._get_fields()} + + def get_all_available_fields(self) -> List[str]: + """Get list of all available jira fields by their name.""" + return [f["name"] for f in self._get_fields()] + + def get_fields_id_to_types(self) -> Dict[str, str]: + """Get mapping of field id to jira field type.""" + return { + f["id"]: f.get("schema", {}).get("type", "unavailable") + for f in self._get_fields() + } + + def create_jira_issue(self, + issue_fields: Dict, + issue_type: str = "task" + ) -> Issue: + """ + Create a new Jira issue. + + Args: + issue_fields: Dictionary mapping field names to values. + issue_type: Type of issue to create (e.g., 'Task', 'Bug'). + + Returns: + The newly created Jira issue object. + """ + fields_ids_to_types = self.get_fields_id_to_types() + fields_names_to_id = self.get_fields_name_to_id() + issue = {} + + for field, value in issue_fields.items(): + field_id = fields_names_to_id.get(field.lower(), field) + field_type = fields_ids_to_types.get(field_id, "any") + formatter = getattr(JiraFormatter, field_type, JiraFormatter.any) + issue[field_id] = formatter(value) + + issue["issuetype"] = JiraFormatter.issue_type(issue_type.capitalize()) + logger.debug("Creating issue with fields: %s", issue) + new_issue = self.jira_client.create_issue(fields=issue) + logger.info("Created issue: %s", new_issue.key) + return new_issue + + def get_jira_issue( + self, + issue_key: str, + all_fields: bool = False, + field_filter: List[str] = None, + ) -> Dict[str, Any]: + """ + Retrieve details of a Jira issue by its key. + + Args: + issue_key: The key of the Jira issue to retrieve. + all_fields: If True, returns all available fields. + field_filter: List of field names to retrieve. + + Returns: + Dictionary containing the requested fields and their values. + """ + fields = None + + if not all_fields: + if field_filter is None: + field_filter = ["assignee", "status", "description", "summary"] + + name_to_id = self.get_fields_name_to_id() + fields = ",".join([name_to_id.get(x.lower(), x) + for x in field_filter]) + + logger.debug("Fetching issue: %s (fields: %s)", issue_key, fields) + issue = self.jira_client.issue(issue_key, fields=fields) + logger.debug("Fetched issue: %s", issue_key) + return self.issue_to_dict(issue) + + def issue_to_dict(self, issue: Issue) -> Dict[str, Any]: + """Convert a Jira issue to a dictionary with field names as keys.""" + id_to_name = self.get_fields_id_to_name() + return {id_to_name.get(k, k): v for k, v in issue.raw.get( + "fields", {}).items()} + + def transition_issue_status( + self, issue: str, transition: str, resolution: str = None + ) -> None: + """Transition an issue to a new status with optional resolution.""" + logger.debug( + "Transitioning %s to %s (resolution: %s)", + issue, transition, resolution + ) + if resolution: + self.jira_client.transition_issue( + issue, transition=transition, resolution={"name": resolution} + ) + else: + self.jira_client.transition_issue(issue, transition=transition) + logger.info("Transitioned %s to %s", issue, transition) + + def get_issue_link_types(self) -> List[Dict[str, str]]: + """ + Get all available issue link types. + + Returns: + List of dicts with 'name', 'inward', and 'outward' keys. + """ + link_types = self.jira_client.issue_link_types() + return [ + { + "name": lt.name, + "inward": lt.inward, + "outward": lt.outward, + } + for lt in link_types + ] + + def link_issue(self, link_type: str, issue_a: str, issue_b: str) -> None: + """Create a link between two issues.""" + logger.debug("Linking %s -> %s (%s)", issue_a, issue_b, link_type) + self.jira_client.create_issue_link( + type=link_type, inwardIssue=issue_a, outwardIssue=issue_b + ) + logger.info("Linked %s -> %s (%s)", issue_a, issue_b, link_type) + + def get_issue_links( + self, issue_key: str, link_type: str = None + ) -> List[Dict[str, Any]]: + """ + Get links for an issue. + + Args: + issue_key: The issue to get links for. + link_type: Optional filter by link type name. + + Returns: + List of dicts with 'id', 'type', 'inward_desc', 'outward_desc', + 'direction', and 'issue_key' keys. + """ + logger.debug("Getting links for %s", issue_key) + issue = self.jira_client.issue(issue_key, fields="issuelinks") + links = [] + + for link in issue.fields.issuelinks: + link_type_name = link.type.name + if link_type and link_type_name != link_type: + continue + + link_info = { + "id": link.id, + "type": link_type_name, + "inward_desc": getattr(link.type, "inward", ""), + "outward_desc": getattr(link.type, "outward", ""), + } + + if hasattr(link, "inwardIssue"): + link_info["direction"] = "inward" + link_info["issue_key"] = link.inwardIssue.key + links.append(link_info) + elif hasattr(link, "outwardIssue"): + link_info["direction"] = "outward" + link_info["issue_key"] = link.outwardIssue.key + links.append(link_info) + + logger.debug("Found %d links for %s", len(links), issue_key) + return links + + def delete_issue_link(self, link_id: str) -> None: + """ + Delete an issue link by its ID. + + Args: + link_id: The ID of the link to delete. + """ + logger.debug("Deleting link %s", link_id) + self.jira_client.delete_issue_link(link_id) + logger.info("Deleted link %s", link_id) + + def add_remote_link( + self, issue_key: str, url: str, title: str = None + ) -> None: + """ + Add a remote/external link to an issue. + + Args: + issue_key: The issue to add the link to. + url: The URL of the external resource. + title: Optional title for the link (defaults to URL). + """ + if title is None: + title = url + logger.debug("Adding remote link to %s: %s", issue_key, url) + self.jira_client.add_remote_link( + issue_key, + destination={"url": url, "title": title}, + ) + logger.info("Added remote link to %s: %s", issue_key, title) + + def add_issue_comment(self, issue: str, comment: str) -> None: + """Add a comment to an issue.""" + logger.debug("Adding comment to %s", issue) + self.jira_client.add_comment(issue, comment) + logger.debug("Added comment to %s", issue) + + def search_issues( + self, jql: str, fields: List[str] = None, max_results: int = 50 + ) -> List[Issue]: + """ + Search for issues using JQL. + + Args: + jql: JQL query string. + fields: List of field names to retrieve. + max_results: Maximum number of results to return. + + Returns: + List of matching Jira issues. + """ + logger.debug("Searching issues with JQL: %s", jql) + field_ids = None + if fields: + name_to_id = self.get_fields_name_to_id() + field_ids = [name_to_id.get(f.lower(), f) for f in fields] + + issues = self.jira_client.search_issues( + jql, fields=field_ids, maxResults=max_results + ) + logger.debug("Found %d issues", len(issues)) + return issues + + def search_issues_as_dicts( + self, jql: str, fields: List[str], max_results: int = 1000 + ) -> List[Dict[str, Any]]: + """ + Search for issues and return as dictionaries with field names. + + This is more efficient than search_issues + get_jira_issue loop + as it fetches all data in a single request. + + Args: + jql: JQL query string. + fields: List of field names to retrieve. + max_results: Maximum number of results to return. + + Returns: + List of dicts with 'key' and requested fields. + """ + issues = self.search_issues( + jql, fields=fields, max_results=max_results) + result = [] + for issue in issues: + issue_dict = self.issue_to_dict(issue) + issue_dict["key"] = issue.key + result.append(issue_dict) + return result + + def add_label(self, issue_key: str, label: str) -> None: + """ + Add a label to an issue. + + Args: + issue_key: The issue to update. + label: The label to add. + """ + logger.debug("Adding label '%s' to %s", label, issue_key) + issue = self.jira_client.issue(issue_key) + existing_labels = issue.fields.labels or [] + if label not in existing_labels: + issue.update(fields={"labels": existing_labels + [label]}) + logger.info("Added label '%s' to %s", label, issue_key) + else: + logger.debug("Label '%s' already exists on %s", label, issue_key) + + def remove_label(self, issue_key: str, label: str) -> None: + """ + Remove a label from an issue. + + Args: + issue_key: The issue to update. + label: The label to remove. + """ + logger.debug("Removing label '%s' from %s", label, issue_key) + issue = self.jira_client.issue(issue_key) + existing_labels = issue.fields.labels or [] + if label in existing_labels: + new_labels = [l for l in existing_labels if l != label] + issue.update(fields={"labels": new_labels}) + logger.info("Removed label '%s' from %s", label, issue_key) + else: + logger.debug("Label '%s' not found on %s", label, issue_key) + + def update_issue(self, issue_key: str, fields: Dict[str, Any]) -> None: + """ + Update fields on an issue. + + Args: + issue_key: The issue to update. + fields: Dictionary of field names to values. + """ + if not fields: + return + + logger.debug("Updating %s with fields: %s", + issue_key, list(fields.keys())) + issue = self.jira_client.issue(issue_key) + + # Format fields using the same logic as create_jira_issue + fields_ids_to_types = self.get_fields_id_to_types() + fields_names_to_id = self.get_fields_name_to_id() + formatted = {} + + for field, value in fields.items(): + field_id = fields_names_to_id.get(field.lower(), field) + field_type = fields_ids_to_types.get(field_id, "any") + formatter = getattr(JiraFormatter, field_type, JiraFormatter.any) + formatted[field_id] = formatter(value) + + issue.update(fields=formatted) + logger.info("Updated %s: %s", issue_key, list(fields.keys())) + + def get_field_allowed_values( + self, project_key: str, issue_type_name: str, field_name: str + ) -> List[str]: + """ + Get allowed values for a field in a project/issue type context. + + Args: + project_key: The project key (e.g., 'OCPBUGS'). + issue_type_name: The issue type name (e.g., 'Bug'). + field_name: The field name (e.g., 'Target Version'). + + Returns: + List of allowed value names for the field. + """ + logger.debug( + "Fetching allowed values for field '%s' in %s/%s", + field_name, + project_key, + issue_type_name, + ) + + field_name_lower = field_name.lower() + name_to_id = self.get_fields_name_to_id() + field_id = name_to_id.get(field_name_lower) + + if not field_id: + logger.warning("Field '%s' not found", field_name) + return [] + + # Get issue type ID from project + issue_types = self.jira_client.project_issue_types(project_key) + issue_type_id = None + for it in issue_types: + if it.name.lower() == issue_type_name.lower(): + issue_type_id = it.id + break + + if not issue_type_id: + logger.warning( + "Issue type '%s' not found in project %s", + issue_type_name, project_key + ) + return [] + + # Get fields for this issue type + fields_meta = self.jira_client.project_issue_fields( + project_key, issue_type_id) + + allowed_values = [] + for field_meta in fields_meta: + if field_meta.fieldId == field_id: + for value in field_meta.allowedValues or []: + name = getattr(value, "name", None) or getattr( + value, "value", None) + if name: + allowed_values.append(name) + break + + logger.debug( + "Found %d allowed values for '%s'", len(allowed_values), field_name + ) + return allowed_values diff --git a/cve-jira-processing/lib/jira_formatter.py b/cve-jira-processing/lib/jira_formatter.py new file mode 100644 index 0000000..8bdc993 --- /dev/null +++ b/cve-jira-processing/lib/jira_formatter.py @@ -0,0 +1,75 @@ +"""Jira field formatting utilities for API requests.""" + + +class JiraFormatter: + """Utility class for formatting field values for Jira API requests.""" + + @staticmethod + def options(value): + """Format a value for option/select type fields.""" + return {"value": value} + + @staticmethod + def option(value): + """Format a value for single option/select type fields.""" + return {"value": value} + + @staticmethod + def user(value): + """Format a value for user type fields.""" + return {"name": value} + + @staticmethod + def array(value): + """Format a list of values for array type fields.""" + return [{"name": v} for v in value] + + @staticmethod + def number(value): + """Format a value for numeric type fields.""" + return value + + @staticmethod + def string(value): + """Format a value for string type fields.""" + return value + + @staticmethod + def unavailable(value): + """Format a value for unavailable/unknown type fields.""" + return value + + @staticmethod + def any(value): + """Format a value for generic/any type fields.""" + return value + + @staticmethod + def project(value): + """Format a value for project type fields.""" + return {"key": value} + + @staticmethod + def version(value): + """Format a value for version type fields.""" + return {"name": value} + + @staticmethod + def datetime(value): + """Format a value for datetime type fields (expects ISO format).""" + return value + + @staticmethod + def issue_type(value): + """Format a value for issue type fields.""" + return {"name": value} + + @staticmethod + def securitylevel(value): + """Format a value for security level fields.""" + return {"name": value} + + @staticmethod + def security(value): + """Format a value for security level fields (alias).""" + return {"name": value} diff --git a/cve-jira-processing/manage_task_cves.py b/cve-jira-processing/manage_task_cves.py new file mode 100644 index 0000000..68c29c3 --- /dev/null +++ b/cve-jira-processing/manage_task_cves.py @@ -0,0 +1,460 @@ +#!/usr/bin/env python3 +"""Close all CVE Vulnerabilities linked to a task.""" + +import argparse +import logging +import sys + +from lib.jira_client import JiraTool + +logger = logging.getLogger(__name__) + +DEFAULT_RESOLUTION = "Done" +DEFAULT_STATUS = "Closed" + + +def get_linked_issues( + client: JiraTool, + task_key: str, + issue_type_filter: str = None, +) -> list[dict]: + """ + Get issues linked to a task. + + Finds issues linked via "is caused by" (Causality) or + "is related to" (Relates) relationships. + + Args: + client: Jira client. + task_key: The task issue key. + issue_type_filter: Filter by issue type ("Vulnerability", "Bug", or + None for all). + + Returns: + List of dicts with 'key', 'summary', 'status', 'resolution', 'type' + for each linked issue. + """ + issue_keys = [] + + # Get "is caused by" links (Causality) - used for CVEs + causality_links = client.get_issue_links(task_key, link_type="Causality") + for link in causality_links: + if link.get("direction") == "inward": + issue_keys.append(link.get("issue_key")) + + # Get "is related to" links (Related) - used for bugs + relates_links = client.get_issue_links(task_key, link_type="Related") + for link in relates_links: + # Include both directions for "is related to" + issue_keys.append(link.get("issue_key")) + + if not issue_keys: + return [] + + # Remove duplicates while preserving order + issue_keys = list(dict.fromkeys(issue_keys)) + + # Fetch details for all issues + issues = [] + for key in issue_keys: + try: + issue_data = client.get_jira_issue( + key, field_filter=["Summary", "Status", + "Resolution", "Issue Type"] + ) + issue_type = issue_data.get("Issue Type", {}) + type_name = issue_type.get("name", "") if issue_type else "" + + # Filter by issue type if specified + if issue_type_filter and type_name != issue_type_filter: + continue + + status = issue_data.get("Status", {}) + resolution = issue_data.get("Resolution", {}) + issues.append( + { + "key": key, + "summary": issue_data.get("Summary", "N/A"), + "status": status.get("name", "N/A") if status else "N/A", + "resolution": resolution.get("name") if resolution else None, + "type": type_name, + } + ) + except Exception as e: + logger.warning("Failed to fetch %s: %s", key, e) + + return issues + + +def get_linked_cves(client: JiraTool, task_key: str) -> list[dict]: + """Get all CVE Vulnerabilities linked to a task.""" + return get_linked_issues(client, task_key, issue_type_filter="Vulnerability") + + +def update_or_transition_cve( + client: JiraTool, + cve_key: str, + comment: str = None, + status: str = None, + resolution: str = None, + release_note_text: str = None, + release_note_type: str = None, + release_note_status: str = None, + patch_url: str = None, + patch_title: str = None, + no_transition: bool = False, + dry_run: bool = False, +) -> bool: + """ + Update fields and/or transition a CVE to a new status. + + Args: + client: Jira client. + cve_key: The CVE issue key. + comment: Comment to add. Optional. + status: Target status (e.g., "Closed", "ON_QA"). Required unless + no_transition. + resolution: Resolution to use (e.g., "Done", "Fixed"). Optional. + release_note_text: Release note text to set. Optional. + release_note_type: Release note type to set. Optional. + release_note_status: Release note status to set. Optional. + patch_url: URL to external patch (e.g., GitHub PR). Optional. + patch_title: Title for the patch link. Optional. + no_transition: If True, only update fields without transitioning. + dry_run: If True, don't make changes. + + Returns: + True if successful, False otherwise. + """ + if dry_run: + if release_note_text: + logger.info( + " [DRY RUN] Would set Release Note Text: %s", release_note_text + ) + if release_note_type: + logger.info( + " [DRY RUN] Would set Release Note Type: %s", release_note_type + ) + if release_note_status: + logger.info( + " [DRY RUN] Would set Release Note Status: %s", release_note_status + ) + if patch_url: + logger.info( + " [DRY RUN] Would add patch link: %s", patch_url + ) + if comment: + logger.info(" [DRY RUN] Would add comment: %s", comment) + if not no_transition and status: + # Resolution only applies when transitioning to Closed + effective_resolution = resolution if status.upper() == "CLOSED" else None + res_str = ( + f" with resolution '{resolution}'" + if effective_resolution else "" + ) + logger.info( + " [DRY RUN] Would transition %s to %s%s", cve_key, status, res_str + ) + return True + + try: + # Set release note fields + release_fields = {} + if release_note_text: + release_fields["Release Note Text"] = release_note_text + if release_note_type: + release_fields["Release Note Type"] = release_note_type + if release_note_status: + release_fields["Release Note Status"] = release_note_status + + if release_fields: + client.update_issue(cve_key, release_fields) + logger.info(" Updated release note fields on %s", cve_key) + + # Add patch link if provided + if patch_url: + client.add_remote_link(cve_key, patch_url, patch_title) + logger.info(" Added patch link to %s", cve_key) + + # Add comment if provided + if comment: + client.add_issue_comment(cve_key, comment) + logger.info(" Added comment to %s", cve_key) + + # Transition to target status (unless no_transition) + if not no_transition and status: + # Resolution only applies when transitioning to Closed + effective_resolution = resolution if status.upper() == "CLOSED" else None + client.transition_issue_status( + cve_key, status, effective_resolution) + res_str = ( + f" with resolution '{resolution}'" + if effective_resolution else "" + ) + logger.info(" Transitioned %s to %s%s", cve_key, status, res_str) + + return True + except Exception as e: + logger.error(" Failed to update %s: %s", cve_key, e) + return False + + +def setup_logging(verbose: bool = False): + """Configure logging.""" + level = logging.DEBUG if verbose else logging.INFO + logging.basicConfig( + level=level, + format="%(asctime)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Close all CVE Vulnerabilities linked to a task" + ) + parser.add_argument( + "task_key", help="The task issue key (e.g., OSASINFRA-12345)") + parser.add_argument( + "--comment", + "-c", + default=None, + help=f"Comment to add (default when closing)", + ) + parser.add_argument( + "--status", + "-s", + default=DEFAULT_STATUS, + help=f"Target status (default: '{DEFAULT_STATUS}')", + ) + parser.add_argument( + "--resolution", + "-r", + default=DEFAULT_RESOLUTION, + help=f"Resolution to use (default: '{DEFAULT_RESOLUTION}')", + ) + parser.add_argument( + "--release-note-text", + help="Release note text to set on CVEs", + ) + parser.add_argument( + "--release-note-type", + nargs="?", + const="CVE - Common Vulnerabilities and Exposures", + help="Release note type (default: 'CVE - Common Vulnerabilities and Exposures' if flag used without value)", + ) + parser.add_argument( + "--release-note-status", + nargs="?", + const="Proposed", + help="Release note status (default: 'Proposed' if flag used without value)", + ) + parser.add_argument( + "--patch-url", + help="URL to external patch (e.g., GitHub PR/commit)", + ) + parser.add_argument( + "--patch-title", + help="Title for the patch link (default: URL)", + ) + parser.add_argument( + "--task-status", + help="Transition parent task to this status (e.g., POST, Closed)", + ) + parser.add_argument( + "--task-resolution", + default="Done", + help="Resolution for parent task when closing (default: Done)", + ) + parser.add_argument( + "--close-task", + action="store_true", + help="Shortcut for --task-status Closed --task-resolution Done", + ) + parser.add_argument( + "--no-transition", + action="store_true", + help="Only update fields (release notes, patch link) without transitioning", + ) + parser.add_argument( + "--bugs-only", + action="store_true", + help="Only process Bug issues (not Vulnerabilities)", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be done without making changes", + ) + parser.add_argument( + "-v", "--verbose", action="store_true", help="Enable verbose logging" + ) + args = parser.parse_args() + + setup_logging(args.verbose) + + client = JiraTool() + + # Determine issue type filter + if args.bugs_only: + issue_type_filter = "Bug" + issue_type_label = "Bugs" + else: + issue_type_filter = "Vulnerability" + issue_type_label = "CVEs" + + logger.info("Finding %s linked to %s...", issue_type_label, args.task_key) + issues = get_linked_issues(client, args.task_key, issue_type_filter) + + if not issues: + logger.info("No %s found linked to %s", + issue_type_label, args.task_key) + return + + # Filter to only open issues + open_cves = [c for c in issues if c["status"] != "Closed"] + closed_cves = [c for c in issues if c["status"] == "Closed"] + + logger.info( + "Found %d %s linked to %s (%d open, %d already closed)", + len(issues), + issue_type_label, + args.task_key, + len(open_cves), + len(closed_cves), + ) + + if closed_cves: + logger.info("") + logger.info("Already closed:") + for cve in closed_cves: + logger.info( + " %s [%s] - %s", cve["key"], cve["resolution"] or "N/A", cve["summary"] + ) + + if not open_cves: + logger.info("No open %s to process", issue_type_label) + return + + logger.info("") + if args.no_transition: + logger.info("%s to update:", issue_type_label) + else: + logger.info("%s to transition:", issue_type_label) + for cve in open_cves: + logger.info(" %s [%s] - %s", cve["key"], + cve["status"], cve["summary"]) + + logger.info("") + if args.no_transition: + if args.dry_run: + logger.info("[DRY RUN] Would update %d %s", + len(open_cves), issue_type_label) + else: + logger.info("Updating %d %s...", len(open_cves), issue_type_label) + else: + if args.dry_run: + logger.info( + "[DRY RUN] Would transition %d %s to %s:", + len(open_cves), issue_type_label, args.status + ) + logger.info(" Resolution: %s", args.resolution) + if args.comment: + logger.info(" Comment: %s", args.comment) + else: + logger.info( + "Transitioning %d %s to %s...", + len(open_cves), issue_type_label, args.status + ) + + success = 0 + failed = 0 + + for cve in open_cves: + if update_or_transition_cve( + client, + cve["key"], + comment=args.comment, + status=args.status, + resolution=args.resolution, + release_note_text=args.release_note_text, + release_note_type=args.release_note_type, + release_note_status=args.release_note_status, + patch_url=args.patch_url, + patch_title=args.patch_title, + no_transition=args.no_transition, + dry_run=args.dry_run, + ): + success += 1 + else: + failed += 1 + + logger.info("") + if args.no_transition: + if args.dry_run: + logger.info("[DRY RUN] Would have updated %d %s", + success, issue_type_label) + else: + logger.info("Done: %d updated, %d failed", success, failed) + else: + if args.dry_run: + logger.info( + "[DRY RUN] Would have transitioned %d %s", success, issue_type_label + ) + else: + logger.info("Done: %d transitioned, %d failed", success, failed) + + # Transition the parent task if requested + task_status = args.task_status + if args.close_task and not task_status: + task_status = "Closed" + + if task_status: + logger.info("") + # Resolution only applies when closing + task_resolution = ( + args.task_resolution if task_status.upper() == "CLOSED" else None + ) + res_str = f" (resolution={task_resolution})" if task_resolution else "" + + if args.dry_run: + if args.patch_url: + logger.info( + "[DRY RUN] Would add patch link to task %s: %s", + args.task_key, + args.patch_url, + ) + logger.info( + "[DRY RUN] Would transition task %s to %s%s", + args.task_key, + task_status, + res_str, + ) + else: + try: + # Add patch link to task if provided + if args.patch_url: + client.add_remote_link( + args.task_key, args.patch_url, args.patch_title + ) + logger.info("Added patch link to task %s", args.task_key) + + client.transition_issue_status( + args.task_key, task_status, task_resolution + ) + logger.info( + "Transitioned task %s to %s%s", + args.task_key, task_status, res_str + ) + except Exception as e: + logger.error( + "Failed to transition task %s: %s", args.task_key, e + ) + failed += 1 + + if failed > 0: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/cve-jira-processing/requirements.txt b/cve-jira-processing/requirements.txt new file mode 100644 index 0000000..537fbff --- /dev/null +++ b/cve-jira-processing/requirements.txt @@ -0,0 +1,2 @@ +jira>=3.5.0 +pyyaml>=6.0 diff --git a/cve-jira-processing/reset_cves.py b/cve-jira-processing/reset_cves.py new file mode 100755 index 0000000..6e0fe59 --- /dev/null +++ b/cve-jira-processing/reset_cves.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python3 +"""Remove the processed label from CVEs for configured components.""" + +import argparse +import logging +from pathlib import Path + +import yaml + +from lib.jira_client import JiraTool + +logger = logging.getLogger(__name__) + +# Default config file location (same directory as script) +DEFAULT_CONFIG_PATH = Path(__file__).parent / "config.yaml" + + +def load_config(config_path: Path) -> dict: + """Load configuration from YAML file.""" + if not config_path.exists(): + raise FileNotFoundError(f"Config file not found: {config_path}") + + with open(config_path, "r") as f: + config = yaml.safe_load(f) + + required_keys = ["cve_processed_label", "repo_to_component"] + for key in required_keys: + if key not in config: + raise ValueError(f"Missing required config key: {key}") + + return config + + +def read_issues_from_file(file_path: Path) -> list[str]: + """Read issue keys from a file (one per line).""" + issues = [] + with open(file_path, "r") as f: + for line in f: + line = line.strip() + # Skip empty lines and comments + if line and not line.startswith("#"): + # Handle comma-separated values on same line + for key in line.split(","): + key = key.strip() + if key: + issues.append(key) + return issues + + +def query_processed_cves( + client: JiraTool, + downstream_components: list[str], + cve_processed_label: str, + cve_filter: str = None, +) -> list[str]: + """Query for all CVEs with the processed label.""" + component_conditions = " OR ".join( + f'"Downstream Component Name" ~ "{c}"' for c in downstream_components + ) + + jql = ( + f"project = OCPBUGS AND type = Vulnerability " + f"AND ({component_conditions}) " + f'AND labels = "{cve_processed_label}"' + ) + + if cve_filter: + jql += f' AND "CVE ID" ~ "{cve_filter}"' + + logger.info("Querying for processed CVEs...") + logger.debug("JQL: %s", jql) + + issues = client.search_issues(jql, fields=["key"], max_results=1000) + return [issue.key for issue in issues] + + +def remove_label( + client: JiraTool, issue_key: str, label: str, dry_run: bool = False +) -> bool: + """Remove a label from an issue.""" + if dry_run: + logger.info(" [DRY RUN] Would remove label from %s", issue_key) + return True + + try: + client.remove_label(issue_key, label) + logger.info(" Removed label from %s", issue_key) + return True + except Exception as e: + logger.error(" Failed to remove label from %s: %s", issue_key, e) + return False + + +# Link types/descriptions to remove when --remove-links is specified +# We check type name, inward description, and outward description +LINK_TYPES_TO_REMOVE = { + "Dependency", # type name (may vary) + "Duplicate", # type name + "Blocks", # type name + "depends on", # outward description + "is depended on by", # inward description + "duplicates", # outward description + "is duplicated by", # inward description + "blocks", # outward description + "is blocked by", # inward description +} + + +def should_remove_link(link: dict) -> bool: + """Check if a link should be removed based on type or descriptions.""" + return ( + link["type"] in LINK_TYPES_TO_REMOVE + or link.get("inward_desc", "") in LINK_TYPES_TO_REMOVE + or link.get("outward_desc", "") in LINK_TYPES_TO_REMOVE + ) + + +def remove_links( + client: JiraTool, issue_key: str, dry_run: bool = False +) -> int: + """ + Remove dependency and duplicate links from an issue. + + Returns the number of links removed. + """ + try: + links = client.get_issue_links(issue_key) + except Exception as e: + logger.error(" Failed to get links for %s: %s", issue_key, e) + return 0 + + removed = 0 + for link in links: + if should_remove_link(link): + if dry_run: + logger.info( + " [DRY RUN] Would remove link: %s %s %s", + issue_key, + link["type"], + link["issue_key"], + ) + removed += 1 + else: + try: + client.delete_issue_link(link["id"]) + logger.info( + " Removed link: %s %s %s", + issue_key, + link["type"], + link["issue_key"], + ) + removed += 1 + except Exception as e: + logger.error( + " Failed to remove link %s: %s", link["id"], e + ) + + return removed + + +def transition_to_assigned( + client: JiraTool, issue_key: str, dry_run: bool = False +) -> bool: + """Transition an issue to ASSIGNED status.""" + if dry_run: + logger.info(" [DRY RUN] Would transition %s to ASSIGNED", issue_key) + return True + + try: + client.transition_issue_status(issue_key, "ASSIGNED") + logger.info(" Transitioned %s to ASSIGNED", issue_key) + return True + except Exception as e: + logger.error(" Failed to transition %s: %s", issue_key, e) + return False + + +def setup_logging(verbose: bool = False): + """Configure logging.""" + level = logging.DEBUG if verbose else logging.INFO + logging.basicConfig( + level=level, + format="%(asctime)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Remove processed label from CVEs for configured components" + ) + parser.add_argument( + "--config", + type=Path, + default=DEFAULT_CONFIG_PATH, + help="Path to config file (default: config.yaml)", + ) + parser.add_argument( + "--cve", + dest="cve_filter", + metavar="CVE-ID", + help="Process only a specific CVE (e.g., CVE-2024-1234)", + ) + parser.add_argument( + "-f", + "--file", + type=Path, + dest="issues_file", + help="File containing issue keys (one per line)", + ) + parser.add_argument( + "--remove-links", + action="store_true", + help="Also remove dependency and duplicate links from issues", + ) + parser.add_argument( + "--reassign", + action="store_true", + help="Transition issues back to ASSIGNED status", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be done without making changes", + ) + parser.add_argument( + "-v", "--verbose", action="store_true", help="Enable verbose logging" + ) + args = parser.parse_args() + + setup_logging(args.verbose) + + config = load_config(args.config) + cve_processed_label = config["cve_processed_label"] + repo_to_component = config["repo_to_component"] + downstream_components = list(repo_to_component.keys()) + + logger.info("Loaded config from %s", args.config) + logger.info("Label to remove: %s", cve_processed_label) + + client = JiraTool() + + # Get issues either from file or from query + if args.issues_file: + if not args.issues_file.exists(): + logger.error("File not found: %s", args.issues_file) + return + issues = read_issues_from_file(args.issues_file) + logger.info("Read %d issues from %s", len(issues), args.issues_file) + else: + logger.info("Components: %s", downstream_components) + if args.cve_filter: + logger.info("Filtering for CVE: %s", args.cve_filter) + issues = query_processed_cves( + client, downstream_components, cve_processed_label, args.cve_filter + ) + + if not issues: + logger.info("No issues found") + return + + logger.info("Found %d issues to process", len(issues)) + + if args.dry_run: + logger.info("[DRY RUN] Would process:") + for issue in issues: + logger.info(" %s", issue) + + label_success = 0 + label_failed = 0 + links_removed = 0 + transition_success = 0 + transition_failed = 0 + + for issue in issues: + logger.info("Processing %s:", issue) + + # Remove label + if remove_label(client, issue, cve_processed_label, args.dry_run): + label_success += 1 + else: + label_failed += 1 + + # Remove links if requested + if args.remove_links: + links_removed += remove_links(client, issue, args.dry_run) + + # Transition to ASSIGNED if requested + if args.reassign: + if transition_to_assigned(client, issue, args.dry_run): + transition_success += 1 + else: + transition_failed += 1 + + logger.info("") + if args.dry_run: + logger.info("[DRY RUN] Summary:") + logger.info(" Labels to remove: %d", label_success) + if args.remove_links: + logger.info(" Links to remove: %d", links_removed) + if args.reassign: + logger.info(" Issues to reassign: %d", transition_success) + else: + logger.info("Done:") + logger.info(" Labels removed: %d succeeded, %d failed", label_success, label_failed) + if args.remove_links: + logger.info(" Links removed: %d", links_removed) + if args.reassign: + logger.info( + " Transitions: %d succeeded, %d failed", + transition_success, transition_failed + ) + + +if __name__ == "__main__": + main()