copes with single and multi jobs
This commit is contained in:
136
backupmult.ps1
136
backupmult.ps1
@@ -6,22 +6,21 @@ param(
|
|||||||
[string]$MvName,
|
[string]$MvName,
|
||||||
|
|
||||||
[Parameter(Mandatory=$false)]
|
[Parameter(Mandatory=$false)]
|
||||||
[int]$Jobs = 2
|
[int]$Jobs = 2,
|
||||||
|
|
||||||
|
[Parameter(Mandatory=$false)]
|
||||||
|
[int]$LogRetentionDays = 30
|
||||||
)
|
)
|
||||||
|
|
||||||
#
|
|
||||||
# backupmult.ps1 - Parallel database backup script using Ola H
|
# backupmult.ps1 - Parallel database backup script using Ola H
|
||||||
#
|
#
|
||||||
# Uses Ola H's built-in parallel processing by starting multiple concurrent backup jobs
|
# Uses Ola H's built-in parallel processing by starting multiple concurrent backup jobs
|
||||||
# Each job will automatically share the database load using DatabasesInParallel=Y
|
# Each job will automatically share the database load using DatabasesInParallel=Y if Jobs>1
|
||||||
|
|
||||||
# TODO: Log file management (don't just overwrite existing logs)
|
|
||||||
# TODO: See if there is way to query QueueDatabase during backup to monitor progress
|
# TODO: See if there is way to query QueueDatabase during backup to monitor progress
|
||||||
|
|
||||||
$fullBackupDay = 'Thursday'
|
$fullBackupDay = 'Thursday'
|
||||||
$fullBackupOverdueDays = 7
|
$fullBackupOverdueDays = 7
|
||||||
$instanceName = $SqlInstance.Split('\')[1]
|
|
||||||
$logFile = "C:\Rubrik\logs\backup_$instanceName.log"
|
|
||||||
$SAFile = "C:\Rubrik\scripts\rbksql.xml"
|
$SAFile = "C:\Rubrik\scripts\rbksql.xml"
|
||||||
|
|
||||||
function Write-Log($message, $jobId = "") {
|
function Write-Log($message, $jobId = "") {
|
||||||
@@ -50,6 +49,82 @@ function Write-Log($message, $jobId = "") {
|
|||||||
Write-Host $logEntry
|
Write-Host $logEntry
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Parse instance name from SQL instance parameter
|
||||||
|
$instanceParts = $SqlInstance -split '\\'
|
||||||
|
if ($instanceParts.Length -eq 2) {
|
||||||
|
$instanceName = $instanceParts[1]
|
||||||
|
} elseif ($instanceParts.Length -eq 1) {
|
||||||
|
$instanceName = $instanceParts[0]
|
||||||
|
} else {
|
||||||
|
$instanceName = $SqlInstance.Replace('\\', '_').Replace('/', '_')
|
||||||
|
}
|
||||||
|
|
||||||
|
if ([string]::IsNullOrEmpty($instanceName)) {
|
||||||
|
Write-Host "ERROR: Could not determine instance name from SqlInstance: '$SqlInstance'"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sanitize and trim the instance name for safe filenames
|
||||||
|
$instanceName = $instanceName.Trim()
|
||||||
|
$invalidChars = [IO.Path]::GetInvalidFileNameChars()
|
||||||
|
foreach ($c in $invalidChars) {
|
||||||
|
$escaped = [regex]::Escape($c)
|
||||||
|
$instanceName = $instanceName -replace $escaped, '_'
|
||||||
|
}
|
||||||
|
|
||||||
|
$timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
|
||||||
|
$logDir = "C:\Rubrik\logs"
|
||||||
|
# Ensure log directory exists before building/using log file
|
||||||
|
if (-not (Test-Path $logDir)) {
|
||||||
|
try {
|
||||||
|
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
|
||||||
|
} catch {
|
||||||
|
Write-Host "ERROR: Could not create log directory $logDir : $($_.Exception.Message)"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$logFileName = "backup_{0}_{1}.log" -f $instanceName, $timestamp
|
||||||
|
$logFile = Join-Path $logDir $logFileName
|
||||||
|
|
||||||
|
Write-Log "DEBUG: SqlInstance='$SqlInstance', instanceName='$instanceName', logFile='$logFile'"
|
||||||
|
|
||||||
|
# Function to clean up old log files
|
||||||
|
function Remove-OldLogs {
|
||||||
|
param([int]$retentionDays)
|
||||||
|
|
||||||
|
$logDir = "C:\Rubrik\logs"
|
||||||
|
if (-not (Test-Path $logDir)) {
|
||||||
|
try {
|
||||||
|
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
|
||||||
|
Write-Log "INFO: Created log directory: $logDir"
|
||||||
|
} catch {
|
||||||
|
Write-Log "ERROR: Failed to create log directory $logDir. $($_.Exception.Message)"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$cutoffDate = (Get-Date).AddDays(-$retentionDays)
|
||||||
|
Write-Log "INFO: Cleaning up log files older than $retentionDays days (before $($cutoffDate.ToString('yyyy-MM-dd')))"
|
||||||
|
|
||||||
|
$oldLogs = Get-ChildItem -Path $logDir -Filter "*.log" | Where-Object { $_.LastWriteTime -lt $cutoffDate }
|
||||||
|
$deletedCount = 0
|
||||||
|
|
||||||
|
foreach ($logFile in $oldLogs) {
|
||||||
|
try {
|
||||||
|
Remove-Item $logFile.FullName -Force
|
||||||
|
$deletedCount++
|
||||||
|
} catch {
|
||||||
|
Write-Log "WARNING: Failed to delete old log file $($logFile.Name): $($_.Exception.Message)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Log "INFO: Cleaned up $deletedCount old log files"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean up old logs before starting
|
||||||
|
Remove-OldLogs -retentionDays $LogRetentionDays
|
||||||
|
|
||||||
# Import SQL Server PowerShell module
|
# Import SQL Server PowerShell module
|
||||||
try {
|
try {
|
||||||
if (Get-Module -ListAvailable -Name SqlServer) {
|
if (Get-Module -ListAvailable -Name SqlServer) {
|
||||||
@@ -69,8 +144,8 @@ try {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch {
|
catch {
|
||||||
Write-Host "ERROR: Failed to import SQL Server PowerShell module. Please install it using: Install-Module -Name SqlServer -AllowClobber"
|
Write-Log "ERROR: Failed to import SQL Server PowerShell module. Please install it using: Install-Module -Name SqlServer -AllowClobber"
|
||||||
Write-Host "ERROR: $($_.Exception.Message)"
|
Write-Log "ERROR: $($_.Exception.Message)"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,7 +216,7 @@ if ($clusterInstance) {
|
|||||||
$query.gqlRequest().Variables
|
$query.gqlRequest().Variables
|
||||||
|
|
||||||
if (-not $dryrun) {
|
if (-not $dryrun) {
|
||||||
$result = $query.Invoke()
|
$query.Invoke()
|
||||||
} else {
|
} else {
|
||||||
Write-Log "Dry run mode: Managed Volume update not invoked."
|
Write-Log "Dry run mode: Managed Volume update not invoked."
|
||||||
}
|
}
|
||||||
@@ -184,12 +259,12 @@ try {
|
|||||||
$directoryParam = $paths -join ', '
|
$directoryParam = $paths -join ', '
|
||||||
|
|
||||||
# Validate job count
|
# Validate job count
|
||||||
if ($Jobs -lt 1 -or $Jobs -gt 8) {
|
if ($Jobs -lt 1 -or $Jobs -gt 4) {
|
||||||
Write-Host "ERROR: Jobs parameter must be between 1 and 8. Provided: $Jobs"
|
Write-Log "ERROR: Jobs parameter must be between 1 and 4. Provided: $Jobs"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
Write-Host "INFO: Starting $Jobs parallel backup jobs"
|
Write-Log "INFO: Starting $Jobs parallel backup jobs"
|
||||||
|
|
||||||
$today = (Get-Date).Date
|
$today = (Get-Date).Date
|
||||||
|
|
||||||
@@ -262,18 +337,27 @@ function Get-BackupType($directoryParam) {
|
|||||||
$backupInfo = Get-BackupType $directoryParam
|
$backupInfo = Get-BackupType $directoryParam
|
||||||
Write-Log "Selected $($backupInfo.Type) backup ($($backupInfo.Reason))"
|
Write-Log "Selected $($backupInfo.Type) backup ($($backupInfo.Reason))"
|
||||||
|
|
||||||
# Build the Ola H query with DatabasesInParallel enabled
|
# Build the Ola H query. Include DatabasesInParallel only when multiple jobs are used
|
||||||
$query = @"
|
# Build parameter lines so we can avoid leaving a trailing comma when omitting DatabasesInParallel
|
||||||
EXECUTE [dbo].[DatabaseBackup]
|
$paramLines = @(
|
||||||
@Databases = 'ALL_DATABASES',
|
"@Databases = 'ALL_DATABASES'",
|
||||||
@Directory = '$directoryParam',
|
"@Directory = '$directoryParam'",
|
||||||
@BackupType = '$($backupInfo.Type)',
|
"@BackupType = '$($backupInfo.Type)'",
|
||||||
@Verify = 'N',
|
"@Verify = 'N'",
|
||||||
@CleanupTime = $($backupInfo.CleanupTime),
|
"@CleanupTime = $($backupInfo.CleanupTime)",
|
||||||
@CheckSum = 'Y',
|
"@CheckSum = 'Y'",
|
||||||
@LogToTable = 'Y',
|
"@LogToTable = 'Y'"
|
||||||
@DatabasesInParallel = 'Y'
|
)
|
||||||
"@
|
|
||||||
|
# Only enable DatabasesInParallel when we run more than one job
|
||||||
|
if ($Jobs -gt 1) {
|
||||||
|
$paramLines += "@DatabasesInParallel = 'Y'"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Join with commas and indentation to produce clean SQL parameter list
|
||||||
|
$params = $paramLines -join ",`n "
|
||||||
|
|
||||||
|
$query = "EXECUTE [dbo].[DatabaseBackup] `n $params"
|
||||||
|
|
||||||
Write-Log "SQL Query: $query"
|
Write-Log "SQL Query: $query"
|
||||||
|
|
||||||
@@ -296,7 +380,7 @@ function Start-BackupJob {
|
|||||||
$jobLogFile = $BaseLogFile -replace '\.log$', "-job$JobId.log"
|
$jobLogFile = $BaseLogFile -replace '\.log$', "-job$JobId.log"
|
||||||
} else {
|
} else {
|
||||||
# Fallback log file path
|
# Fallback log file path
|
||||||
$jobLogFile = "C:\Rubrik\backup-multi-job$JobId.log"
|
$jobLogFile = "C:\Rubrik\logs\backup-multi-job$JobId.log"
|
||||||
}
|
}
|
||||||
|
|
||||||
Write-Output "DEBUG: Job log file will be: '$jobLogFile'"
|
Write-Output "DEBUG: Job log file will be: '$jobLogFile'"
|
||||||
@@ -447,7 +531,7 @@ function Start-BackupJob {
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Start parallel backup jobs
|
# Start parallel backup jobs
|
||||||
Write-Log "Starting $Jobs parallel backup jobs using DatabasesInParallel feature"
|
Write-Log "Starting $Jobs parallel backup jobs"
|
||||||
[System.Collections.ArrayList]$jobList = @()
|
[System.Collections.ArrayList]$jobList = @()
|
||||||
|
|
||||||
for ($i = 1; $i -le $Jobs; $i++) {
|
for ($i = 1; $i -le $Jobs; $i++) {
|
||||||
|
|||||||
Reference in New Issue
Block a user