Compare commits

...

4 Commits

Author SHA1 Message Date
ecf3909a4e Added Nuke flag 2025-10-29 14:50:08 +00:00
4e30587768 copes with single and multi jobs 2025-10-29 11:14:46 +00:00
a87f09d8d9 logs 2025-10-29 10:17:11 +00:00
fa20c5484e checkpoint 2025-10-24 14:49:14 +01:00

View File

@@ -1,72 +1,30 @@
param(
[Parameter(Mandatory=$true)]
[string]$SqlInstance,
[Parameter(Mandatory=$false)]
[string]$Directories,
[Parameter(Mandatory=$true)]
[string]$MvName,
[Parameter(Mandatory=$false)]
[int]$Jobs = 2,
[Parameter(Mandatory=$false)]
[switch]$Force
[int]$LogRetentionDays = 30
,
[Parameter(Mandatory=$false)]
[switch]$Nuke
)
#
# backupmult.ps1 - Parallel database backup script using Ola Hallengren's DatabasesInParallel feature
# backupmult.ps1 - Parallel database backup script using Ola H
#
# Uses Ola H's built-in parallel processing by starting multiple concurrent backup jobs
# Each job will automatically share the database load using DatabasesInParallel=Y
#
# Each job will automatically share the database load using DatabasesInParallel=Y if Jobs>1
# Import SQL Server PowerShell module
try {
if (Get-Module -ListAvailable -Name SqlServer) {
Import-Module SqlServer -ErrorAction Stop
Write-Host "INFO: SqlServer PowerShell module loaded successfully."
}
elseif (Get-Module -ListAvailable -Name SQLPS) {
Import-Module SQLPS -ErrorAction Stop
Write-Host "INFO: SQLPS PowerShell module loaded successfully."
}
else {
throw "No SQL Server PowerShell module found"
}
if (-not (Get-Command Invoke-Sqlcmd -ErrorAction SilentlyContinue)) {
throw "Invoke-Sqlcmd command not available"
}
}
catch {
Write-Host "ERROR: Failed to import SQL Server PowerShell module. Please install it using: Install-Module -Name SqlServer -AllowClobber"
Write-Host "ERROR: $($_.Exception.Message)"
exit 1
}
$instanceName = $SqlInstance.Split('\')[1]
# Use provided directories or default to comma-separated multi-directory setup
if ($Directories) {
$directoryParam = $Directories
Write-Host "INFO: Using provided directories: $directoryParam"
} else {
$directoryParam = "C:\Rubrik\$instanceName\Dir1, C:\Rubrik\$instanceName\Dir2, C:\Rubrik\$instanceName\Dir3, C:\Rubrik\$instanceName\Dir4"
Write-Host "INFO: Using default multi-directory setup: $directoryParam"
}
# TODO: See if there is way to query QueueDatabase during backup to monitor progress
$fullBackupDay = 'Thursday'
$fullBackupOverdueDays = 7
$logFile = "C:\Rubrik\backup-multi-$instanceName.log"
# Validate job count
if ($Jobs -lt 1 -or $Jobs -gt 8) {
Write-Host "ERROR: Jobs parameter must be between 1 and 8. Provided: $Jobs"
exit 1
}
Write-Host "INFO: Starting $Jobs parallel backup jobs"
$today = (Get-Date).Date
$SAFile = "C:\Rubrik\scripts\rbksql.xml"
function Write-Log($message, $jobId = "") {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
@@ -94,56 +52,367 @@ function Write-Log($message, $jobId = "") {
Write-Host $logEntry
}
function Get-BackupType($directoryParam) {
# Use first directory to check flags (assuming shared flag logic across all directories)
$firstDir = ($directoryParam -split ',')[0].Trim()
$fullFlag = Join-Path $firstDir "last_full.flag"
$diffFlag = Join-Path $firstDir "last_diff.flag"
# Parse instance name from SQL instance parameter
$instanceParts = $SqlInstance -split '\\'
if ($instanceParts.Length -eq 2) {
$instanceName = $instanceParts[1]
} elseif ($instanceParts.Length -eq 1) {
$instanceName = $instanceParts[0]
} else {
$instanceName = $SqlInstance.Replace('\\', '_').Replace('/', '_')
}
if ([string]::IsNullOrEmpty($instanceName)) {
Write-Host "ERROR: Could not determine instance name from SqlInstance: '$SqlInstance'"
exit 1
}
# Sanitize and trim the instance name for safe filenames
$instanceName = $instanceName.Trim()
$invalidChars = [IO.Path]::GetInvalidFileNameChars()
foreach ($c in $invalidChars) {
$escaped = [regex]::Escape($c)
$instanceName = $instanceName -replace $escaped, '_'
}
$timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
$logDir = "C:\Rubrik\logs"
# Ensure log directory exists before building/using log file
if (-not (Test-Path $logDir)) {
try {
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
} catch {
Write-Host "ERROR: Could not create log directory $logDir : $($_.Exception.Message)"
exit 1
}
}
$logFileName = "backup_{0}_{1}.log" -f $instanceName, $timestamp
$logFile = Join-Path $logDir $logFileName
Write-Log "DEBUG: SqlInstance='$SqlInstance', instanceName='$instanceName', logFile='$logFile'"
# Function to clean up old log files
function Remove-OldLogs {
param([int]$retentionDays)
# Check if full backup is overdue
$logDir = "C:\Rubrik\logs"
if (-not (Test-Path $logDir)) {
try {
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
Write-Log "INFO: Created log directory: $logDir"
} catch {
Write-Log "ERROR: Failed to create log directory $logDir. $($_.Exception.Message)"
return
}
}
$cutoffDate = (Get-Date).AddDays(-$retentionDays)
Write-Log "INFO: Cleaning up log files older than $retentionDays days (before $($cutoffDate.ToString('yyyy-MM-dd')))"
$oldLogs = Get-ChildItem -Path $logDir -Filter "*.log" | Where-Object { $_.LastWriteTime -lt $cutoffDate }
$deletedCount = 0
foreach ($logFile in $oldLogs) {
try {
Remove-Item $logFile.FullName -Force
$deletedCount++
} catch {
Write-Log "WARNING: Failed to delete old log file $($logFile.Name): $($_.Exception.Message)"
}
}
Write-Log "INFO: Cleaned up $deletedCount old log files"
}
# Clean up old logs before starting
Remove-OldLogs -retentionDays $LogRetentionDays
# Import SQL Server PowerShell module
try {
if (Get-Module -ListAvailable -Name SqlServer) {
Import-Module SqlServer -ErrorAction Stop
Write-Log "INFO: SqlServer PowerShell module loaded successfully."
}
elseif (Get-Module -ListAvailable -Name SQLPS) {
Import-Module SQLPS -ErrorAction Stop
Write-Log "INFO: SQLPS PowerShell module loaded successfully."
}
else {
throw "No SQL Server PowerShell module found"
}
if (-not (Get-Command Invoke-Sqlcmd -ErrorAction SilentlyContinue)) {
throw "Invoke-Sqlcmd command not available"
}
}
catch {
Write-Log "ERROR: Failed to import SQL Server PowerShell module. Please install it using: Install-Module -Name SqlServer -AllowClobber"
Write-Log "ERROR: $($_.Exception.Message)"
exit 1
}
# Import Rubrik Security Cloud module
try {
Import-Module RubrikSecurityCloud -ErrorAction Stop
Write-Log "INFO: RubrikSecurityCloud module loaded successfully."
} catch {
Write-Log "ERROR: Failed to import RubrikSecurityCloud module. $($_.Exception.Message)"
exit 1
}
$localNode = $env:COMPUTERNAME
$clusterInstance = Get-ClusterResource | Where-Object { $_.ResourceType -eq "SQL Server" -and $_.Name -eq "SQL Server ($instanceName)" }
if ($clusterInstance) {
$ownerNode = $clusterInstance.OwnerNode
if ($ownerNode -ne $localNode) {
Write-Log "SQL instance '$SqlInstance' is not running on local node '$localNode'. Updating the MV."
Connect-Rsc -ServiceAccountFile $SAFile
Write-Log "Connected to Rubrik Security Cloud."
$newHost = Get-RscHost -Name $ownerNode -OsType WINDOWS
$query = New-RscQuery -GqlQuery slaManagedVolumes -AddField Nodes.HostDetail, Nodes.SmbShare, Nodes.ClientConfig, Nodes.ClientConfig.BackupScript, Nodes.ClientConfig.PreBackupScript
$query.var.filter = @(Get-RscType -Name Filter)
$query.var.filter[0].field = "NAME_EXACT_MATCH"
$query.var.filter[0].Texts = $mvName
$mvResult = $query.Invoke()
if (-not $mvResult.nodes -or $mvResult.nodes.Count -eq 0) {
Write-Log "ERROR: Managed Volume '$mvName' not found. This may be due to insufficient permissions or the volume not existing."
Disconnect-Rsc
exit 1
}
$mvDetail = $mvResult.nodes[0]
Write-Log "Found Managed Volume: $($mvDetail.Name) (ID: $($mvDetail.Id), Status: $($mvDetail.hostDetail.Status), HostDetail Name: $($mvDetail.hostDetail.Name))"
$query = New-RscMutation -GqlMutation updateManagedVolume
$query.Var.input = Get-RscType -Name UpdateManagedVolumeInput
$query.Var.input.update = Get-RscType -Name ManagedVolumeUpdateInput
$query.Var.input.update.config = Get-RscType -Name ManagedVolumePatchConfigInput
$query.Var.input.update.slaClientConfig = Get-RscType -Name ManagedVolumePatchSlaClientConfigInput
$query.Var.input.Id = $mvDetail.Id
$query.Var.input.update.Name = $mvName
$query.Var.input.update.config.SmbDomainName = $mvDetail.SmbShare.DomainName
$query.Var.input.update.config.SmbValidIps = $newHost.Name
$query.Var.input.update.config.SmbValidUsers = $mvDetail.SmbShare.ValidUsers + $mvDetail.SmbShare.ActiveDirectoryGroups
$query.Var.input.update.slaClientConfig.clientHostId = $newHost.Id
$query.Var.input.update.slaClientConfig.channelHostMountPaths = $mvDetail.ClientConfig.ChannelHostMountPaths
$query.Var.input.update.slaClientConfig.backupScriptCommand = $mvDetail.ClientConfig.BackupScript.ScriptCommand
# Only set pre-backup script fields if a pre-backup script was configured
if ($mvDetail.ClientConfig.PreBackupScript.ScriptCommand) {
$query.Var.input.update.slaClientConfig.preBackupScriptCommand = $mvDetail.ClientConfig.PreBackupScript.ScriptCommand
$query.Var.input.update.slaClientConfig.preBackupScriptTimeout = $mvDetail.ClientConfig.PreBackupScript.Timeout
$query.Var.input.update.slaClientConfig.shouldCancelBackupOnPreBackupScriptFailure = $mvDetail.ClientConfig.ShouldCancelBackupOnPreBackupScriptFailure
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $false
} else {
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $true
}
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupFailure = $true
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupSuccess = $true
$query.gqlRequest().Variables
if (-not $dryrun) {
$query.Invoke()
} else {
Write-Log "Dry run mode: Managed Volume update not invoked."
}
# Now must exit 1 to stop the backup continuing on the wrong node
Disconnect-Rsc
exit 1
} else {
Write-Log "SQL instance '$SqlInstance' is running on local node '$localNode'. No action needed."
}
} else {
Write-Log "ERROR: SQL instance '$SqlInstance' not found in cluster resources."
exit 1
}
# Connect to Rubrik and retrieve managed volume paths
try {
Connect-Rsc -ServiceAccountFile $SAFile
Write-Log "INFO: Connected to Rubrik Security Cloud."
$query = New-RscQuery -GqlQuery slaManagedVolumes -AddField Nodes.HostDetail, Nodes.SmbShare, Nodes.ClientConfig, Nodes.ClientConfig.BackupScript, Nodes.ClientConfig.PreBackupScript
$query.var.filter = @(Get-RscType -Name Filter)
$query.var.filter[0].field = "NAME_EXACT_MATCH"
$query.var.filter[0].Texts = $MvName
$mvDetail = $query.Invoke()
if (-not $mvDetail.nodes -or $mvDetail.nodes.Count -eq 0) {
Write-Log "ERROR: Managed Volume '$MvName' not found. This may be due to insufficient permissions or the volume not existing."
exit 1
}
$paths = $mvDetail.nodes[0].ClientConfig.ChannelHostMountPaths
Write-Log "INFO: Retrieved paths: $($paths -join ', ')"
} catch {
Write-Log "ERROR: Failed to retrieve paths from Rubrik. $($_.Exception.Message)"
exit 1
}
# If -Nuke is set, delete the contents of each retrieved path (but keep the folder itself).
if ($Nuke) {
Write-Log "INFO: -nuke flag set. Beginning recursive deletion of contents for retrieved paths."
foreach ($p in $paths) {
if (-not $p) { continue }
$pathToCheck = $p.Trim()
# Determine root to avoid deleting drive root like C:\
try { $root = [IO.Path]::GetPathRoot($pathToCheck) } catch { $root = $null }
if ([string]::IsNullOrEmpty($pathToCheck)) {
Write-Log "WARNING: Skipping empty path entry"
continue
}
if ($root -and ($pathToCheck.TrimEnd('\') -eq $root.TrimEnd('\'))) {
Write-Log "ERROR: Refusing to nuke root path '$pathToCheck'. Skipping."
continue
}
if (-not (Test-Path -LiteralPath $pathToCheck)) {
Write-Log "WARNING: Path '$pathToCheck' does not exist. Skipping."
continue
}
Write-Log "INFO: NUKING contents of '$pathToCheck' (deleting all files & subfolders inside)."
try {
# Enumerate children and delete each item so the folder itself remains
Get-ChildItem -LiteralPath $pathToCheck -Force -ErrorAction SilentlyContinue | ForEach-Object {
try {
Remove-Item -LiteralPath $_.FullName -Recurse -Force -ErrorAction Stop
Write-Log "INFO: Deleted: $($_.FullName)"
} catch {
Write-Log "WARNING: Failed to delete $($_.FullName): $($_.Exception.Message)"
}
}
} catch {
Write-Log "ERROR: Failed to enumerate or delete contents of '$pathToCheck': $($_.Exception.Message)"
}
}
Write-Log "INFO: -nuke operation complete. Continuing with backup flow."
}
$directoryParam = $paths -join ', '
# Validate job count
if ($Jobs -lt 1 -or $Jobs -gt 4) {
Write-Log "ERROR: Jobs parameter must be between 1 and 4. Provided: $Jobs"
exit 1
}
Write-Log "INFO: Starting $Jobs parallel backup jobs"
$today = (Get-Date).Date
function Get-BackupType($directoryParam) {
# Support multiple candidate directories. Scan them in deterministic order for existing flags.
$dirs = @()
if ($directoryParam) {
$dirs = $directoryParam -split ',' | ForEach-Object { $_.Trim() } | Where-Object { $_ -ne '' }
}
# Build lists of found flags (in candidate order)
$foundFull = @()
$foundDiff = @()
foreach ($d in $dirs) {
$full = Join-Path $d "last_full.flag"
$diff = Join-Path $d "last_diff.flag"
if (Test-Path $full) { $foundFull += $full }
if (Test-Path $diff) { $foundDiff += $diff }
}
# Determine if full backup is overdue using the first-found full flag (if any)
$isFullBackupOverdue = $false
if (Test-Path $fullFlag) {
if ($foundFull.Count -gt 0) {
$fullFlag = $foundFull[0]
try {
$lastFullDate = [DateTime]::ParseExact((Get-Content $fullFlag).Trim(), "yyyy-MM-dd", $null)
$daysSinceLastFull = ($today - $lastFullDate).Days
$isFullBackupOverdue = $daysSinceLastFull -gt $fullBackupOverdueDays
Write-Log "INFO: Last full backup was $daysSinceLastFull days ago. Overdue threshold: $fullBackupOverdueDays days."
}
catch {
Write-Log "INFO: Last full backup was $daysSinceLastFull days ago (from $fullFlag). Overdue threshold: $fullBackupOverdueDays days."
} catch {
$isFullBackupOverdue = $true
Write-Log "WARNING: Could not parse last full backup date. Treating as overdue."
Write-Log "WARNING: Could not parse last full backup date in $fullFlag. Treating as overdue."
}
} else {
$isFullBackupOverdue = $true
Write-Log "WARNING: No last full backup date found. Treating as overdue."
Write-Log "WARNING: No last full backup date found in any candidate directories. Treating as overdue."
}
# Determine backup type
# Helper to ensure directory exists
function Ensure-DirExists([string]$path) {
if (-not (Test-Path $path)) {
try { New-Item -ItemType Directory -Path $path -Force | Out-Null } catch { }
}
}
# Determine preferred write location: prefer existing related flag location, otherwise first candidate dir
$firstDir = $dirs[0]
# If it's a full backup day or overdue, plan for full backup
if ((Get-Date).DayOfWeek -eq $fullBackupDay -or $isFullBackupOverdue) {
if (-not (Test-Path $fullFlag) -or (Get-Content $fullFlag).Trim() -ne $today.ToString("yyyy-MM-dd")) {
# Create flag directory if it doesn't exist
$flagDir = Split-Path $fullFlag -Parent
if (-not (Test-Path $flagDir)) {
New-Item -ItemType Directory -Path $flagDir -Force | Out-Null
# If a full flag exists, use its location; else use firstDir
$targetFullFlag = if ($foundFull.Count -gt 0) { $foundFull[0] } else { Join-Path $firstDir "last_full.flag" }
$targetDir = Split-Path $targetFullFlag -Parent
Ensure-DirExists $targetDir
$currentValue = $null
if (Test-Path $targetFullFlag) {
try { $currentValue = (Get-Content $targetFullFlag).Trim() } catch { $currentValue = $null }
}
if (-not $currentValue -or $currentValue -ne $today.ToString("yyyy-MM-dd")) {
try {
Set-Content -Path $targetFullFlag -Value $today.ToString("yyyy-MM-dd") -Encoding UTF8
Write-Log "INFO: Created/Updated full backup flag file: $targetFullFlag"
} catch {
Write-Log "ERROR: Failed to create/update full backup flag file: $targetFullFlag. $($_.Exception.Message)"
}
Set-Content $fullFlag $today.ToString("yyyy-MM-dd") -Encoding UTF8
$reason = if($isFullBackupOverdue) { "overdue" } else { "scheduled" }
$reason = if ($isFullBackupOverdue) { "overdue" } else { "scheduled" }
return @{ Type = "FULL"; CleanupTime = 168; Reason = $reason }
} else {
return @{ Type = "LOG"; CleanupTime = 24; Reason = "full already taken today" }
}
}
# Otherwise, plan for differential
# Prefer an existing diff flag location if present; else prefer the existing full flag location (write diff alongside full); otherwise firstDir
if ($foundDiff.Count -gt 0) {
$targetDiffFlag = $foundDiff[0]
} elseif ($foundFull.Count -gt 0) {
$targetDiffFlag = Join-Path (Split-Path $foundFull[0] -Parent) "last_diff.flag"
} else {
if (-not (Test-Path $diffFlag) -or (Get-Content $diffFlag).Trim() -ne $today.ToString("yyyy-MM-dd")) {
# Create flag directory if it doesn't exist
$flagDir = Split-Path $diffFlag -Parent
if (-not (Test-Path $flagDir)) {
New-Item -ItemType Directory -Path $flagDir -Force | Out-Null
}
Set-Content $diffFlag $today.ToString("yyyy-MM-dd") -Encoding UTF8
return @{ Type = "DIFF"; CleanupTime = 168; Reason = "differential scheduled" }
} else {
return @{ Type = "LOG"; CleanupTime = 24; Reason = "diff already taken today" }
$targetDiffFlag = Join-Path $firstDir "last_diff.flag"
}
$targetDir = Split-Path $targetDiffFlag -Parent
Ensure-DirExists $targetDir
$currentDiffValue = $null
if (Test-Path $targetDiffFlag) {
try { $currentDiffValue = (Get-Content $targetDiffFlag).Trim() } catch { $currentDiffValue = $null }
}
if (-not $currentDiffValue -or $currentDiffValue -ne $today.ToString("yyyy-MM-dd")) {
try {
Set-Content -Path $targetDiffFlag -Value $today.ToString("yyyy-MM-dd") -Encoding UTF8
Write-Log "INFO: Created/Updated diff backup flag file: $targetDiffFlag"
} catch {
Write-Log "ERROR: Failed to create/update diff backup flag file: $targetDiffFlag. $($_.Exception.Message)"
}
return @{ Type = "DIFF"; CleanupTime = 168; Reason = "differential scheduled" }
} else {
return @{ Type = "LOG"; CleanupTime = 24; Reason = "diff already taken today" }
}
}
@@ -151,18 +420,27 @@ function Get-BackupType($directoryParam) {
$backupInfo = Get-BackupType $directoryParam
Write-Log "Selected $($backupInfo.Type) backup ($($backupInfo.Reason))"
# Build the Ola H query with DatabasesInParallel enabled
$query = @"
EXECUTE [dbo].[DatabaseBackup]
@Databases = 'ALL_DATABASES',
@Directory = '$directoryParam',
@BackupType = '$($backupInfo.Type)',
@Verify = 'N',
@CleanupTime = $($backupInfo.CleanupTime),
@CheckSum = 'Y',
@LogToTable = 'Y',
@DatabasesInParallel = 'Y'
"@
# Build the Ola H query. Include DatabasesInParallel only when multiple jobs are used
# Build parameter lines so we can avoid leaving a trailing comma when omitting DatabasesInParallel
$paramLines = @(
"@Databases = 'ALL_DATABASES'",
"@Directory = '$directoryParam'",
"@BackupType = '$($backupInfo.Type)'",
"@Verify = 'N'",
"@CleanupTime = $($backupInfo.CleanupTime)",
"@CheckSum = 'Y'",
"@LogToTable = 'Y'"
)
# Only enable DatabasesInParallel when we run more than one job
if ($Jobs -gt 1) {
$paramLines += "@DatabasesInParallel = 'Y'"
}
# Join with commas and indentation to produce clean SQL parameter list
$params = $paramLines -join ",`n "
$query = "EXECUTE [dbo].[DatabaseBackup] `n $params"
Write-Log "SQL Query: $query"
@@ -185,7 +463,7 @@ function Start-BackupJob {
$jobLogFile = $BaseLogFile -replace '\.log$', "-job$JobId.log"
} else {
# Fallback log file path
$jobLogFile = "C:\Rubrik\backup-multi-job$JobId.log"
$jobLogFile = "C:\Rubrik\logs\backup-multi-job$JobId.log"
}
Write-Output "DEBUG: Job log file will be: '$jobLogFile'"
@@ -336,14 +614,14 @@ function Start-BackupJob {
}
# Start parallel backup jobs
Write-Log "Starting $Jobs parallel backup jobs using DatabasesInParallel feature"
Write-Log "Starting $Jobs parallel backup jobs"
[System.Collections.ArrayList]$jobList = @()
for ($i = 1; $i -le $Jobs; $i++) {
$job = Start-BackupJob -jobId $i -sqlInstance $SqlInstance -query $query -baseLogFile $logFile
$null = $jobList.Add($job)
Write-Log "Started backup job $i (Job ID: $($job.Id))"
Start-Sleep -Milliseconds 100 # Small delay to stagger job starts
Start-Sleep -Milliseconds 4000 # Delay to stagger job starts
}
# Monitor jobs and capture output
@@ -437,28 +715,6 @@ foreach ($job in $jobList) {
}
}
# Consolidate job logs into main log file
Write-Log "Consolidating job logs..."
for ($i = 1; $i -le $Jobs; $i++) {
$jobLogFile = $logFile -replace '\.log$', "-job$i.log"
Write-Log "Checking for job log file: $jobLogFile"
if (Test-Path $jobLogFile) {
try {
$jobContent = Get-Content $jobLogFile -ErrorAction Stop
Write-Log "Found $($jobContent.Count) lines in job $i log"
foreach ($line in $jobContent) {
Add-Content -Path $logFile -Value $line -Encoding UTF8
}
Remove-Item $jobLogFile -Force
Write-Log "Consolidated log from job $i"
} catch {
Write-Log "WARNING: Could not consolidate log from job $i : $($_.Exception.Message)"
}
} else {
Write-Log "WARNING: Job log file not found for job $i"
}
}
# Final status check using job output analysis
$failedJobIds = $jobResults.Keys | Where-Object { $jobResults[$_].Failed -eq $true }