Usecase-testing mit Caldera und Splunk
Die Aufgabe:
Führe diverse Angriffsmethoden und Befehle auf einem Testsystem aus, um im SIEM (Splunk) die entsprechenden Aktivitäten zu erkennen und festzustellen, ob deine UseCases entsprechend darauf ansprechen.
Die (aktuelle) Lösung:
Eine virtuelle Laborumgebung mit:
- Windows Domain Controler
- Windows 11 "Opfer-Anode"
- Ubuntu 22.0.4 mit Caldera als Docker-Container
Caldera automatisches Testen.
Alle in Caldera vorhandenen "Operations" werden periodisch auf den Testsystemen ausgeührt.
run-all-adversaries.sh
#!/bin/bash
#===============================================
# /opt/caldera-automation/run-all-adversaries.sh
# Führt ALLE Adversarys automatisch aus
#===============================================
# KONFIGURATION
CALDERA_API="http://localhost:8888/api/v2"
API_KEY="5884d7fe832b1dff4e49a[removed]2e385cff8a643a7e8b7a"
GROUP="red"
PLANNER="atomic"
LOG_DIR="/var/log/caldera-automation"
LOG_FILE="$LOG_DIR/all-adversaries-$(date +%Y%m%d).log"
# Setup
mkdir -p "$LOG_DIR"
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "$LOG_FILE"
}
#===============================================
# PRÜFE OB AGENTS ONLINE SIND
#===============================================
check_agents() {
log "Checking for online agents..."
AGENTS=$(curl -s "$CALDERA_API/agents" -H "KEY: $API_KEY")
AGENT_COUNT=$(echo "$AGENTS" | jq '[.[] | select(.trusted==true)] | length')
if [ "$AGENT_COUNT" -lt 1 ]; then
log "⚠️ No trusted agents online! Skipping run."
return 1
fi
log "✓ Found $AGENT_COUNT trusted agent(s)"
return 0
}
#===============================================
# HOLE ALLE ADVERSARYS
#===============================================
get_adversaries() {
curl -s "$CALDERA_API/adversaries" -H "KEY: $API_KEY" | \
jq -r '.[] | .adversary_id + "|" + .name'
}
#===============================================
# STARTE OPERATION FÜR EINEN ADVERSARY
#===============================================
start_operation() {
local ADV_ID="$1"
local ADV_NAME="$2"
local OP_NAME="Auto-${ADV_NAME}-$(date +%H%M)"
log " Starting: $ADV_NAME ($ADV_ID)"
RESPONSE=$(curl -s -X POST "$CALDERA_API/operations" \
-H "KEY: $API_KEY" \
-H "Content-Type: application/json" \
-d '{
"name": "'"$OP_NAME"'",
"adversary": {"adversary_id": "'"$ADV_ID"'"},
"group": "'"$GROUP"'",
"planner": {"id": "'"$PLANNER"'"},
"auto_close": true,
"state": "running",
"jitter": "2/8"
}' 2>/dev/null)
OP_ID=$(echo "$RESPONSE" | jq -r '.id // empty')
if [ -n "$OP_ID" ] && [ "$OP_ID" != "null" ]; then
log " ✓ Operation started: $OP_ID"
echo "$OP_ID"
else
log " ✗ Failed to start operation"
log " Response: $RESPONSE"
echo ""
fi
}
#===============================================
# WARTE AUF OPERATION COMPLETION
#===============================================
wait_for_operation() {
local OP_ID="$1"
local MAX_WAIT=1800 # 30 Minuten max
local ELAPSED=0
while [ $ELAPSED -lt $MAX_WAIT ]; do
STATE=$(curl -s "$CALDERA_API/operations/$OP_ID" -H "KEY: $API_KEY" | jq -r '.state')
if [ "$STATE" == "finished" ] || [ "$STATE" == "out_of_time" ]; then
return 0
fi
sleep 15
ELAPSED=$((ELAPSED + 15))
done
log " ⚠️ Operation $OP_ID timeout"
return 1
}
#===============================================
# HAUPTPROGRAMM
#===============================================
main() {
log "=========================================="
log "🚀 ALL ADVERSARIES TEST RUN"
log "=========================================="
# Prüfe Agents
if ! check_agents; then
exit 1
fi
# Hole alle Adversarys
log "Fetching adversaries..."
ADVERSARIES=$(get_adversaries)
ADV_COUNT=$(echo "$ADVERSARIES" | wc -l)
log "Found $ADV_COUNT adversaries"
# Arrays für Operation-IDs
declare -a OP_IDS
declare -a ADV_NAMES
# Starte alle Operations
log ""
log "Starting operations..."
while IFS='|' read -r ADV_ID ADV_NAME; do
if [ -n "$ADV_ID" ]; then
OP_ID=$(start_operation "$ADV_ID" "$ADV_NAME")
if [ -n "$OP_ID" ]; then
OP_IDS+=("$OP_ID")
ADV_NAMES+=("$ADV_NAME")
fi
sleep 2 # Kleine Pause zwischen Starts
fi
done <<< "$ADVERSARIES"
log ""
log "Started ${#OP_IDS[@]} operations"
# Warte auf Completion
log ""
log "Waiting for operations to complete..."
COMPLETED=0
FAILED=0
for i in "${!OP_IDS[@]}"; do
OP_ID="${OP_IDS[$i]}"
ADV_NAME="${ADV_NAMES[$i]}"
log " Waiting for: $ADV_NAME ($OP_ID)"
if wait_for_operation "$OP_ID"; then
log " ✓ Completed"
((COMPLETED++))
else
log " ✗ Failed/Timeout"
((FAILED++))
fi
done
# Sync zu Splunk
log ""
log "Syncing to Splunk..."
if [ -f "/opt/caldera-splunk/send-to-splunk.sh" ]; then
/opt/caldera-splunk/send-to-splunk.sh >> "$LOG_FILE" 2>&1
log "✓ Splunk sync completed"
else
log "⚠️ Splunk sync script not found"
fi
# Zusammenfassung
log ""
log "=========================================="
log "📊 SUMMARY"
log "=========================================="
log "Adversaries found: $ADV_COUNT"
log "Operations started: ${#OP_IDS[@]}"
log "Completed: $COMPLETED"
log "Failed/Timeout: $FAILED"
log "=========================================="
log ""
}
# Ausführen
main "$@"
Caldera logs an Splunk senden
Um die Aktivitäten Calderas auch in Splunk auswerten zu können, gibt es ein extra Skript, dass die jeweiligen Daten an den Splunk HEC sendet. Aktuell per crontab periodisch eingerichtet:
send-to-splunk-enhanced.sh
#!/bin/bash
#===============================================
# Caldera → Splunk Integration Script
#===============================================
CALDERA_API="http://localhost:8888/api/v2"
API_KEY="5884d7fe832b1dff4e49a2[removed]7d352e385cff8a643a7e8b7a"
SPLUNK_HEC="http://10.[removed]:8088/services/collector"
SPLUNK_TOKEN="22352b46-[removed]-f37a326fc3d9"
SPLUNK_INDEX="caldera"
LOG_FILE="/var/log/caldera-splunk/sync.log"
TEMP_DIR="/tmp/caldera-splunk-$$"
mkdir -p "$TEMP_DIR"
mkdir -p "$(dirname "$LOG_FILE")"
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "$LOG_FILE"
}
cleanup() {
rm -rf "$TEMP_DIR"
}
trap cleanup EXIT
send_to_splunk() {
local data="$1"
local sourcetype="$2"
local temp_file="$TEMP_DIR/payload.json"
cat > "$temp_file" << EOF
{
"sourcetype": "$sourcetype",
"index": "$SPLUNK_INDEX",
"host": "caldera",
"event": $data
}
EOF
curl -s -k -X POST "$SPLUNK_HEC" \
-H "Authorization: Splunk $SPLUNK_TOKEN" \
-H "Content-Type: application/json" \
-d @"$temp_file" 2>&1 | grep -q '"code":0'
rm -f "$temp_file"
}
#===============================================
# SMART DECODE: Erkennt ob schon dekodiert
#===============================================
smart_decode() {
local input="$1"
# Leer oder null? Skip
if [ -z "$input" ] || [ "$input" = "null" ]; then
echo ""
return
fi
# Prüfe ob es Base64 aussieht (nur A-Za-z0-9+/= und Länge teilbar durch 4)
if echo "$input" | grep -qE '^[A-Za-z0-9+/]+=*$' && [ $((${#input} % 4)) -eq 0 ]; then
# Versuche zu dekodieren
decoded=$(echo "$input" | base64 -d 2>/dev/null)
# Prüfe ob Dekodierung sinnvoll ist (druckbare Zeichen)
if echo "$decoded" | grep -qP '^[\x20-\x7E\s]+$' 2>/dev/null; then
echo "$decoded"
else
# Dekodierung ergibt Binärdaten - behalte Original
echo "$input"
fi
else
# Schon im Klartext
echo "$input"
fi
}
#===============================================
# EXPORT COMMANDS MIT VERBESSERTER DEKODIERUNG
#===============================================
export_commands_enhanced() {
log "Exporting commands with smart decoding..."
local ops_file="$TEMP_DIR/operations.json"
curl -s "$CALDERA_API/operations" -H "KEY: $API_KEY" > "$ops_file"
local count=0
cat "$ops_file" | jq -c '.[]' | while read -r operation; do
OP_ID=$(echo "$operation" | jq -r '.id')
OP_NAME=$(echo "$operation" | jq -r '.name')
echo "$operation" | jq -c '.chain[]?' 2>/dev/null | while read -r command; do
if [ -n "$command" ] && [ "$command" != "null" ]; then
# Extrahiere Felder
PAW=$(echo "$command" | jq -r '.paw // "unknown"')
HOST=$(echo "$command" | jq -r '.host // "unknown"')
PID=$(echo "$command" | jq -r '.pid // "0"')
COLLECT=$(echo "$command" | jq -r '.collect // ""')
FINISH=$(echo "$command" | jq -r '.finish // ""')
STATUS=$(echo "$command" | jq -r '.status // -1')
# Command extrahieren (bevorzuge plaintext_command)
COMMAND_RAW=$(echo "$command" | jq -r '.plaintext_command // .command // ""')
# Smart Decode
DECODED=$(smart_decode "$COMMAND_RAW")
# Bereinige Sonderzeichen für bessere Suche
CLEANED=$(echo "$DECODED" | tr -cd '[:print:][:space:]' | sed 's/[^a-zA-Z0-9 ._-]/ /g')
# Extrahiere Schlüsselwörter für Matching (erste 3 Wörter)
KEYWORDS=$(echo "$CLEANED" | awk '{print $1, $2, $3}' | tr '[:upper:]' '[:lower:]')
# Ability-Details
ABILITY_NAME=$(echo "$command" | jq -r '.ability.name // "unknown"')
TECHNIQUE_ID=$(echo "$command" | jq -r '.ability.technique_id // "unknown"')
TACTIC=$(echo "$command" | jq -r '.ability.tactic // "unknown"')
# Erstelle Correlation Key (für zeitbasierte Korrelation)
CORR_KEY="${HOST}_${PID}_${COLLECT}"
# Enriched Event mit besseren Korrelationsfeldern
cat > "$TEMP_DIR/single_cmd.json" << EOF
{
"operation_id": "$OP_ID",
"operation_name": "$OP_NAME",
"paw": "$PAW",
"host": "$HOST",
"pid": "$PID",
"collect_time": "$COLLECT",
"finish_time": "$FINISH",
"status": $STATUS,
"ability_name": "$ABILITY_NAME",
"technique_id": "$TECHNIQUE_ID",
"tactic": "$TACTIC",
"command_raw": $(echo "$COMMAND_RAW" | jq -R .),
"command_decoded": $(echo "$DECODED" | jq -R .),
"command_cleaned": $(echo "$CLEANED" | jq -R .),
"command_keywords": $(echo "$KEYWORDS" | jq -R .),
"correlation_key": "$CORR_KEY",
"correlation_time_epoch": $(date -d "$COLLECT" +%s 2>/dev/null || echo "0")
}
EOF
if send_to_splunk "$(cat "$TEMP_DIR/single_cmd.json")" "caldera:command:enriched"; then
((count++))
fi
fi
done
done
log " ✓ Exported $count commands"
}
#===============================================
# EXPORT AGENTS
#===============================================
export_agents() {
log "Exporting agents..."
curl -s "$CALDERA_API/agents" -H "KEY: $API_KEY" | jq -c '.[]' | while read -r agent; do
echo "$agent" > "$TEMP_DIR/agent.json"
send_to_splunk "$(cat "$TEMP_DIR/agent.json")" "caldera:agent" > /dev/null
done
log " ✓ Agents exported"
}
#===============================================
# MAIN
#===============================================
log "=========================================="
log "Fixed Caldera → Splunk Sync"
log "=========================================="
export_agents
export_commands_enhanced
log "=========================================="
log "Sync completed"
log "=========================================="
