fix: Address high-severity bandit issues

This commit is contained in:
DSS
2025-12-11 07:13:06 -03:00
parent bcb4475744
commit 5b2a328dd1
167 changed files with 7051 additions and 7168 deletions

View File

@@ -11,13 +11,13 @@ Validators:
5. Audit logging
"""
import sys
import os
import json
import subprocess
from pathlib import Path
from datetime import datetime
import os
import re
import subprocess
import sys
from datetime import datetime
from pathlib import Path
# Configuration
DSS_ROOT = Path("/home/overbits/dss")
@@ -32,11 +32,13 @@ IMMUTABLE_FILES = [
AUDIT_LOG = DSS_ROOT / ".dss/logs/git-hooks.jsonl"
TEMP_DIR = DSS_ROOT / ".dss/temp"
class Colors:
RED = '\033[0;31m'
GREEN = '\033[0;32m'
YELLOW = '\033[1;33m'
NC = '\033[0m' # No Color
RED = "\033[0;31m"
GREEN = "\033[0;32m"
YELLOW = "\033[1;33m"
NC = "\033[0m" # No Color
def log_audit(validator, status, details):
"""Log hook events to audit trail"""
@@ -53,16 +55,18 @@ def log_audit(validator, status, details):
with open(AUDIT_LOG, "a") as f:
f.write(json.dumps(log_entry) + "\n")
def get_staged_files():
"""Get list of staged files"""
result = subprocess.run(
["git", "diff", "--cached", "--name-only", "--diff-filter=ACM"],
capture_output=True,
text=True,
cwd=DSS_ROOT
cwd=DSS_ROOT,
)
return [Path(f) for f in result.stdout.strip().split("\n") if f]
def check_immutable_files(staged_files):
"""Validate that immutable files are not modified"""
from fnmatch import fnmatch
@@ -77,7 +81,7 @@ def check_immutable_files(staged_files):
["git", "ls-tree", "--name-only", "HEAD", str(file_path)],
capture_output=True,
text=True,
cwd=DSS_ROOT
cwd=DSS_ROOT,
)
if result.stdout.strip(): # File exists in HEAD
violations.append(str(file_path))
@@ -93,27 +97,30 @@ def check_immutable_files(staged_files):
commit_msg = commit_msg_file.read_text()
if "[IMMUTABLE-UPDATE]" in commit_msg:
bypass = True
log_audit("immutable_files", "bypass", {
"files": violations,
"commit_message": commit_msg.split("\n")[0],
"method": "commit_message"
})
log_audit(
"immutable_files",
"bypass",
{
"files": violations,
"commit_message": commit_msg.split("\n")[0],
"method": "commit_message",
},
)
if bypass:
log_audit("immutable_files", "bypass", {
"files": violations,
"method": "environment_variable"
})
log_audit(
"immutable_files", "bypass", {"files": violations, "method": "environment_variable"}
)
if not bypass:
print(f"{Colors.RED}✗ IMMUTABLE FILE VIOLATION{Colors.NC}")
print(f"\nThe following protected files cannot be modified:")
print("\nThe following protected files cannot be modified:")
for v in violations:
print(f" - {v}")
print(f"\nTo update immutable files:")
print(f" 1. Use commit message: [IMMUTABLE-UPDATE] Reason for change")
print(f" 2. Include justification in commit body")
print(f"\nProtected files:")
print("\nTo update immutable files:")
print(" 1. Use commit message: [IMMUTABLE-UPDATE] Reason for change")
print(" 2. Include justification in commit body")
print("\nProtected files:")
for pattern in IMMUTABLE_FILES:
print(f" - {pattern}")
@@ -123,6 +130,7 @@ def check_immutable_files(staged_files):
log_audit("immutable_files", "passed", {"files_checked": len(staged_files)})
return True
def check_temp_folder(staged_files):
"""Validate that temp files are only in .dss/temp/"""
violations = []
@@ -151,11 +159,11 @@ def check_temp_folder(staged_files):
if violations:
print(f"{Colors.RED}✗ TEMP FOLDER VIOLATION{Colors.NC}")
print(f"\nTemp files must be created in .dss/temp/ only:")
print("\nTemp files must be created in .dss/temp/ only:")
for v in violations:
print(f" - {v}")
print(f"\nAll temporary files MUST go in: .dss/temp/[session-id]/")
print(f"Use the get_temp_dir() helper function.")
print("\nAll temporary files MUST go in: .dss/temp/[session-id]/")
print("Use the get_temp_dir() helper function.")
log_audit("temp_folder", "rejected", {"files": violations})
return False
@@ -163,6 +171,7 @@ def check_temp_folder(staged_files):
log_audit("temp_folder", "passed", {"files_checked": len(staged_files)})
return True
def check_schemas(staged_files):
"""Validate JSON and YAML schemas"""
violations = []
@@ -178,20 +187,18 @@ def check_schemas(staged_files):
elif file_path.suffix in [".yaml", ".yml"]:
try:
import yaml
with open(full_path) as f:
yaml.safe_load(f)
except ImportError:
# YAML not available, skip validation
continue
except Exception as e:
violations.append({
"file": str(file_path),
"error": str(e)
})
violations.append({"file": str(file_path), "error": str(e)})
if violations:
print(f"{Colors.RED}✗ SCHEMA VALIDATION FAILED{Colors.NC}")
print(f"\nInvalid JSON/YAML files:")
print("\nInvalid JSON/YAML files:")
for v in violations:
print(f" - {v['file']}")
print(f" Error: {v['error']}")
@@ -202,6 +209,7 @@ def check_schemas(staged_files):
log_audit("schema_validation", "passed", {"files_checked": len(staged_files)})
return True
def check_documentation(staged_files):
"""Check that new implementations have documentation"""
violations = []
@@ -228,20 +236,24 @@ def check_documentation(staged_files):
missing_func_docs = re.findall(func_pattern, content)
if missing_class_docs:
warnings.append({
"file": str(file_path),
"type": "class",
"items": missing_class_docs[:5] # Limit to first 5
})
warnings.append(
{
"file": str(file_path),
"type": "class",
"items": missing_class_docs[:5], # Limit to first 5
}
)
if missing_func_docs:
warnings.append({
"file": str(file_path),
"type": "function",
"items": missing_func_docs[:5] # Limit to first 5
})
warnings.append(
{
"file": str(file_path),
"type": "function",
"items": missing_func_docs[:5], # Limit to first 5
}
)
except Exception as e:
except Exception:
continue
# Check if significant code changes have knowledge updates
@@ -251,15 +263,19 @@ def check_documentation(staged_files):
# If many code files changed but no knowledge updates, warn
if len(code_files_changed) > 5 and len(knowledge_files_changed) == 0:
warnings.append({
"file": "general",
"type": "knowledge",
"items": [f"Changed {len(code_files_changed)} code files but no .knowledge/ updates"]
})
warnings.append(
{
"file": "general",
"type": "knowledge",
"items": [
f"Changed {len(code_files_changed)} code files but no .knowledge/ updates"
],
}
)
if warnings:
print(f"{Colors.YELLOW}⚠ DOCUMENTATION WARNING{Colors.NC}")
print(f"\nMissing documentation found (non-blocking):")
print("\nMissing documentation found (non-blocking):")
for w in warnings:
if w["type"] == "class":
print(f" - {w['file']}: Classes without docstrings: {', '.join(w['items'])}")
@@ -267,8 +283,8 @@ def check_documentation(staged_files):
print(f" - {w['file']}: Functions without docstrings: {', '.join(w['items'])}")
elif w["type"] == "knowledge":
print(f" - {w['items'][0]}")
print(f"\n Tip: Add docstrings to new classes/functions")
print(f" Tip: Update .knowledge/ files when adding major features\n")
print("\n Tip: Add docstrings to new classes/functions")
print(" Tip: Update .knowledge/ files when adding major features\n")
log_audit("documentation", "warning", {"warnings": warnings})
else:
@@ -297,18 +313,16 @@ def check_terminology(staged_files):
for old_term, new_term in deprecated_terms.items():
if re.search(rf"\b{old_term}\b", content, re.IGNORECASE):
warnings.append({
"file": str(file_path),
"term": old_term,
"suggested": new_term
})
warnings.append(
{"file": str(file_path), "term": old_term, "suggested": new_term}
)
except:
# Skip binary or unreadable files
continue
if warnings:
print(f"{Colors.YELLOW}⚠ TERMINOLOGY WARNING{Colors.NC}")
print(f"\nDeprecated terminology found (non-blocking):")
print("\nDeprecated terminology found (non-blocking):")
for w in warnings:
print(f" - {w['file']}: '{w['term']}' → use '{w['suggested']}'")
print()
@@ -320,6 +334,7 @@ def check_terminology(staged_files):
# Always return True (warnings only)
return True
def main():
"""Run all validators"""
print(f"{Colors.GREEN}Running DSS pre-commit validations...{Colors.NC}\n")
@@ -356,9 +371,10 @@ def main():
return 0
else:
print(f"\n{Colors.RED}✗ Pre-commit validation failed{Colors.NC}")
print(f"Fix the issues above and try again.\n")
print("Fix the issues above and try again.\n")
log_audit("pre_commit", "failed", {"files": len(staged_files)})
return 1
if __name__ == "__main__":
sys.exit(main())