Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,39 @@ def inject_global_vars():
}

def get_slack_webhook_url() -> str:
"""Return the Slack webhook URL from the environment, stripped of surrounding whitespace.

Returns an empty string when ``SLACK_WEBHOOK_URL`` is unset — callers should
treat an empty return value as "Slack notifications disabled" and return early
rather than attempting a POST to an empty URL.
"""
return os.getenv('SLACK_WEBHOOK_URL', '').strip()

def build_share_url(result_id: str, req) -> str:
"""Build a shareable URL for a scan result using the best available origin signal.

Tries three sources in descending priority to construct the base URL:

1. **Referer header** — preserves the exact scheme, host, and path prefix of
the page that triggered the request. Handles sub-path deployments and
reverse proxies correctly because it reflects what the browser actually saw.
2. **Origin header** — provides scheme + host without a path. Used when Referer
is absent (e.g. cross-origin POSTs with ``Referrer-Policy: no-referrer``).
3. **``req.host_url``** — Flask's own derived host URL as a last resort.

If all three are absent, or ``req`` is ``None`` (e.g. in offline/test contexts),
returns ``result_id`` bare — no base URL can be inferred.

The query string appended is always ``?scan_id=<result_id>``.

Args:
result_id: Unique identifier of the scan result to share.
req: Flask ``Request`` object, or ``None`` in testing / offline contexts.

Returns:
Full URL ending with ``?scan_id=<result_id>``, or just ``result_id``
when no origin can be determined.
"""
referer = req.headers.get('Referer') if req else None
if referer:
parsed = urlparse(referer)
Expand All @@ -68,6 +98,16 @@ def build_share_url(result_id: str, req) -> str:
return result_id

def send_slack_notification(message: str) -> None:
"""Post a plain-text message to Slack via the configured incoming webhook.

Silently no-ops when ``SLACK_WEBHOOK_URL`` is unset or empty — callers do
not need to guard against a missing webhook. HTTP errors (4xx/5xx) and
network exceptions are logged to stdout but do **not** propagate; a failed
Slack notification is never allowed to abort a scan in progress.

Args:
message: Plain text to send as the Slack message body.
"""
webhook_url = get_slack_webhook_url()
if not webhook_url:
return
Expand Down
14 changes: 10 additions & 4 deletions scanner/docker_scout_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,8 +278,9 @@ def run_docker_scout_scan(directory_path: str) -> Tuple[List[Dict[str, Any]], Li
k8s_files = find_kubernetes_files(directory_path)

if not compose_files and not k8s_files:
print("[i] No Docker Compose or Kubernetes files found — container scan skipped.")
return findings, extra_recommendations, False

print(f"Found {len(compose_files)} Docker Compose file(s) and {len(k8s_files)} Kubernetes file(s) to scan")

# Collect ALL images from ALL files first
Expand Down Expand Up @@ -524,13 +525,15 @@ def parse_docker_scout_output(scout_data: Dict[str, Any], image: str, compose_fi

# Group by CVE ID to avoid duplicates
vuln_map = {}

negligible_count = 0

for vuln in vulnerabilities:
cve_id = vuln.get('id', vuln.get('cve', 'UNKNOWN'))
severity = vuln.get('severity', 'Unknown')

# Skip Negligible severity vulnerabilities
if severity.lower() == 'negligible':
negligible_count += 1
continue

# Docker Scout packages affected by this CVE
Expand Down Expand Up @@ -567,7 +570,10 @@ def parse_docker_scout_output(scout_data: Dict[str, Any], image: str, compose_fi
data['count']
)
findings.append(finding)


if negligible_count > 0:
print(f"[i] Filtered {negligible_count} Negligible-severity CVE(s) from Docker Scout results (use --verbose to include them).")

except Exception as e:
print(f"Error parsing Docker Scout output: {e}")
import traceback
Expand Down
13 changes: 9 additions & 4 deletions scanner/grype_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,17 +156,19 @@ def parse_grype_output(grype_data: Dict[str, Any], image: str, compose_file: str

# Group by vulnerability ID to avoid duplicates
vuln_map = {}

negligible_count = 0

for match in matches:
vuln = match.get('vulnerability', {})
artifact = match.get('artifact', {})

vuln_id = vuln.get('id', 'UNKNOWN')
severity = vuln.get('severity', 'Unknown')
description = vuln.get('description', '')

# Skip Negligible severity vulnerabilities
if severity == 'Negligible':
negligible_count += 1
continue

# Store highest severity for each vuln
Expand Down Expand Up @@ -196,7 +198,10 @@ def parse_grype_output(grype_data: Dict[str, Any], image: str, compose_file: str
data['count']
)
findings.append(finding)


if negligible_count > 0:
print(f"[i] Filtered {negligible_count} Negligible-severity CVE(s) from Grype results (use --verbose to include them).")

except Exception as e:
print(f"Error parsing Grype output: {e}")
import traceback
Expand Down
7 changes: 6 additions & 1 deletion scanner/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,15 +288,20 @@ def scan_directory_level(directory, file_paths, rules):
# Read all files into a dictionary to keep track of content per file
file_contents = {}
all_content = ""

skipped_files = 0

for filepath in file_paths:
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
file_contents[filepath] = content
all_content += content + "\n"
except Exception as e:
skipped_files += 1
continue

if skipped_files > 0:
print(f"[!] Warning: {skipped_files} file(s) could not be read and were skipped — scan results may be incomplete.")

# Only run InverseRegexRules at directory level
for rule in rules:
Expand Down