Compare commits

...

21 Commits

Author SHA1 Message Date
00f3f3c92e Ready for final release 2025-11-25 11:31:51 +00:00
1758cbc5f8 Latest updated 2025-11-14 12:12:11 +00:00
fbb8726e2a Verification improvements 2025-10-30 13:01:02 +00:00
a7737ebe6f Added verify 2025-10-30 11:30:24 +00:00
8c203251ad first restore script 2025-10-30 11:00:36 +00:00
ecf3909a4e Added Nuke flag 2025-10-29 14:50:08 +00:00
4e30587768 copes with single and multi jobs 2025-10-29 11:14:46 +00:00
a87f09d8d9 logs 2025-10-29 10:17:11 +00:00
fa20c5484e checkpoint 2025-10-24 14:49:14 +01:00
57ec688996 Logging fixes 2025-10-22 17:41:14 +01:00
88bdab08d2 Error checking 2025-10-22 17:31:50 +01:00
df988abfd8 First parallel script 2025-10-22 15:19:48 +01:00
5a26b80bb6 Use SQL PS module 2025-10-22 13:19:32 +01:00
e576ee3954 WIP 2025-10-22 10:25:44 +01:00
907449914a Parameterise setActiveNode 2025-10-22 10:10:09 +01:00
14966f87a1 Pre scriupt wrapper 2025-10-09 10:38:37 +01:00
d964631532 Correct crash on PowerShell 5 2025-10-09 10:36:25 +01:00
9d0e2f7fde Parameterise instance name for backup script 2025-10-09 10:09:16 +01:00
1ac04e0b79 Tidy up and new file to create SA creds 2025-10-07 17:59:04 +01:00
4182f24f3d Tidy up 2025-09-24 15:01:42 +01:00
0b69876b40 New file to set Active Node 2025-09-12 17:34:30 +01:00
14 changed files with 2799 additions and 68 deletions

2
.gitignore vendored
View File

@@ -4,6 +4,8 @@
# Ignore backup files and flags
*.bak
*.flag
*.json
*.xml
# Ignore PowerShell/Batch temporary files
*.ps1~

237
README.md Normal file
View File

@@ -0,0 +1,237 @@
# backupmult.ps1 - Parallel SQL Server Database Backup Script
A PowerShell script that performs parallel database backups using Ola Hallengren's DatabaseBackup stored procedure with the `DatabasesInParallel` feature for optimal performance and load distribution.
## Features
- **Parallel Processing**: Utilizes Ola Hallengren's `DatabasesInParallel=Y` feature to automatically distribute database backup workloads across multiple concurrent jobs
- **Smart Backup Type Detection**: Automatically determines whether to run FULL, DIFFERENTIAL, or LOG backups based on schedule and overdue status
- **Multi-Directory Support**: Supports striped backups across multiple directories for improved I/O performance
- **Comprehensive Logging**: Thread-safe logging with job-specific log files that are consolidated into a main log
- **Error Handling**: Robust error detection and reporting with detailed SQL error information
- **Job Monitoring**: Real-time monitoring of backup job progress and completion status
- **Mount Point Validation**: Optional mount point checking to ensure backup destinations are accessible
## Requirements
### System Requirements
- **PowerShell**: Version 5.1 or higher
- **Operating System**: Windows Server 2016 or later (or Windows 10/11 for development/testing)
- **SQL Server**: SQL Server 2016 or later (Express, Standard, Enterprise, or Developer editions)
- **Permissions**: SQL Server sysadmin privileges or appropriate database backup permissions
### Software Dependencies
- **Ola Hallengren Maintenance Solution**: Must be installed on the target SQL Server instance
- Download from: https://ola.hallengren.com/
- Install the DatabaseBackup stored procedure
- **SQL Server PowerShell Module**: Either `SqlServer` or `SQLPS` module must be available
- Install with: `Install-Module -Name SqlServer -AllowClobber`
### Network Requirements
- **SQL Server Connectivity**: The script must be able to connect to the target SQL Server instance
- **Backup Destination Access**: Read/write access to all specified backup directories
- **Mount Points**: If using mount points, they must be accessible and have sufficient space
## Installation
1. **Download the Script**:
```powershell
# Place backupmult.ps1 in your preferred scripts directory
# Example: C:\Rubrik\Scripts\
```
2. **Install Ola Hallengren Maintenance Solution**:
```sql
-- Run on your SQL Server instance
-- Download from https://ola.hallengren.com/
-- Execute the installation script
```
3. **Install SQL Server PowerShell Module** (if not already installed):
```powershell
Install-Module -Name SqlServer -AllowClobber -Force
```
4. **Verify Permissions**:
- Ensure the account running the script has SQL Server sysadmin privileges
- Verify write access to backup directories and log file locations
## Usage
### Basic Syntax
```powershell
.\backupmult.ps1 -SqlInstance "ServerName\InstanceName" [parameters]
```
### Parameters
| Parameter | Type | Required | Default | Description |
|-----------|------|----------|---------|-------------|
| `SqlInstance` | String | Yes | - | SQL Server instance name (e.g., "SERVER01\SQL2019") |
| `Directories` | String | No | Auto-generated | Comma-separated list of backup directories |
| `Jobs` | Integer | No | 2 | Number of parallel backup jobs (1-8) |
| `Force` | Switch | No | False | Skip mount point accessibility checks |
### Examples
#### Basic Usage with Default Settings
```powershell
.\backupmult.ps1 -SqlInstance "PRODSQL01\SQL2019"
```
- Uses 2 parallel jobs
- Auto-generates 4 backup directories under `C:\Rubrik\SQL2019\`
- Performs appropriate backup type based on schedule
#### Custom Directories and Job Count
```powershell
.\backupmult.ps1 -SqlInstance "TESTSQL\SQL2022" -Directories "D:\Backups\Dir1, E:\Backups\Dir2, F:\Backups\Dir3" -Jobs 3
```
- Uses 3 parallel jobs
- Backs up to specified directories
- Striped backup across 3 volumes
#### Force Mode (Skip Mount Checks)
```powershell
.\backupmult.ps1 -SqlInstance "DEVSQL" -Force
```
- Skips mount point accessibility validation
- Useful for scenarios where mount points may not be immediately accessible
### Backup Type Logic
The script automatically determines the backup type based on:
1. **FULL Backup**:
- Scheduled: Every Thursday (configurable)
- Overdue: If more than 7 days since last full backup
- Flag file: `last_full.flag` in the first backup directory
2. **DIFFERENTIAL Backup**:
- Scheduled: Daily (except full backup days)
- Flag file: `last_diff.flag` in the first backup directory
3. **LOG Backup**:
- Default: When full or diff already completed for the day
- Continuous: For transaction log backups
## Output and Logging
### Console Output
- Clean, essential status messages
- Job progress and completion notifications
- Error messages and warnings
- Suppresses verbose Ola Hallengren messages for better readability
### Log Files
- **Main Log**: `C:\Rubrik\backup-multi-{InstanceName}.log`
- Consolidated log with all job outputs
- Timestamped entries with job identification
- **Job Logs**: `C:\Rubrik\backup-multi-{InstanceName}-job{N}.log`
- Individual job-specific logs
- Automatically consolidated into main log after completion
- Include detailed SQL messages and errors
### Log Format
```
2025-10-22 14:30:15 [JOB-1] Starting backup job
2025-10-22 14:30:15 [JOB-1] Executing backup command...
2025-10-22 14:30:16 [JOB-1] Backup completed successfully
```
## Error Handling
### SQL Connection Errors
- Detected and reported with detailed error information
- Includes procedure name and line number for SQL errors
- Jobs marked as "FAILED" with comprehensive error details
### Mount Point Issues
- Validates backup directory accessibility (unless `-Force` is used)
- Reports inaccessible mount points before starting backups
- Prevents failed backups due to storage issues
### Job Monitoring
- Real-time monitoring of PowerShell background jobs
- Automatic cleanup of completed jobs
- Timeout handling for unresponsive jobs
## Troubleshooting
### Common Issues
#### "No SQL Server PowerShell module found"
**Solution**: Install the SqlServer module
```powershell
Install-Module -Name SqlServer -AllowClobber -Force
```
#### "Ola Hallengren procedures not found"
**Solution**: Install Ola Hallengren Maintenance Solution on the SQL Server instance
```sql
-- Download and execute the installation script from https://ola.hallengren.com/
```
#### "Access denied" to backup directories
**Solution**: Ensure the service account has read/write permissions to all backup directories
#### Jobs fail with mount point errors
**Solution**: Use `-Force` parameter to skip mount checks, or ensure mount points are accessible
#### Log files not consolidating
**Solution**: Check permissions on log file directory and ensure no file locks exist
### Debug Mode
Enable debug output by modifying the script to show debug messages:
- Look for `Write-Output "DEBUG:"` lines in the job scriptblock
- Check job-specific log files for detailed error information
### Performance Tuning
- **Job Count**: Start with 2-4 jobs and monitor SQL Server performance
- **Directories**: Use separate physical disks/volumes for striped backups
- **Memory**: Ensure adequate memory for parallel operations
- **Network**: Consider network bandwidth for remote backup destinations
## Configuration
### Backup Schedule
- **Full Backups**: Thursdays (modify `$fullBackupDay` variable)
- **Overdue Threshold**: 7 days (modify `$fullBackupOverdueDays` variable)
- **Cleanup Time**: 168 hours (7 days) for full/diff, 24 hours for log
### Directory Structure
Default structure: `C:\Rubrik\{InstanceName}\Dir1, Dir2, Dir3, Dir4`
- Modify the default directory logic in the script for custom structures
- Ensure all directories exist and are writable
### Log File Location
Default: `C:\Rubrik\backup-multi-{InstanceName}.log`
- Modify `$logFile` variable for custom log locations
- Ensure log directory exists and is writable
## Security Considerations
- **SQL Permissions**: Requires sysadmin or appropriate backup permissions
- **File System Access**: Read/write access to backup directories and log locations
- **Service Account**: Use dedicated service account with minimal required permissions
- **Log Security**: Log files may contain sensitive database information
## Support and Maintenance
### Monitoring
- Regularly review log files for errors and warnings
- Monitor backup completion times and success rates
- Check disk space usage in backup directories
### Maintenance Tasks
- Clean up old backup files according to retention policies
- Archive and rotate log files periodically
- Update Ola Hallengren scripts when new versions are available
### Version History
- Track changes to the backup script
- Test updates in development environments before production deployment
## License
This script is provided as-is for database backup automation. Ensure compliance with your organization's backup and retention policies.</content>
<parameter name="filePath">README.md

304
RestoreScript.ps1 Normal file
View File

@@ -0,0 +1,304 @@
param(
[Parameter(Mandatory=$true)]
[string]$LiveMountRoot,
[Parameter(Mandatory=$false)]
[string]$DatabaseName,
[Parameter(Mandatory=$true)]
[ValidateSet("catalog", "restore", "verify")]
[string]$Action,
[Parameter(Mandatory=$true)]
[string]$SqlInstance = "sqlfcsql\TESTINST"
)
# Function to catalog backups
function Catalog-Backups {
param([string]$Path)
$backupFiles = Get-ChildItem -Path $Path -Recurse -File | Where-Object { $_.Extension -eq '.bak' -or $_.Extension -eq '.trn' }
$catalog = @{}
foreach ($file in $backupFiles) {
$baseName = $file.BaseName
$parts = $baseName -split '_'
if ($parts.Length -lt 4) { continue }
# Find the type position (FULL, DIFF, LOG)
$typeIndex = -1
$validTypes = @('FULL', 'DIFF', 'LOG')
for ($i = 0; $i -lt $parts.Length; $i++) {
if ($validTypes -contains $parts[$i]) {
$typeIndex = $i
break
}
}
if ($typeIndex -eq -1 -or ($parts.Length - $typeIndex) -lt 3) { continue }
# Assume parts[0] is prefix if typeIndex > 1, else dbName is parts[0]
if ($typeIndex -eq 1) {
$dbName = $parts[0]
} else {
$dbName = $parts[1..($typeIndex-1)] -join '_'
}
$type = $parts[$typeIndex]
$dateStr = $parts[$typeIndex + 1]
$timeStr = $parts[$typeIndex + 2]
$stripe = 0
if ($parts.Length -gt $typeIndex + 3) {
$stripe = [int]$parts[$typeIndex + 3]
}
$key = "$dateStr$timeStr"
if (-not $catalog.ContainsKey($dbName)) { $catalog[$dbName] = @{} }
if (-not $catalog[$dbName].ContainsKey($type)) { $catalog[$dbName][$type] = @{} }
if (-not $catalog[$dbName][$type].ContainsKey($key)) { $catalog[$dbName][$type][$key] = @() }
$catalog[$dbName][$type][$key] += @{File = $file; Stripe = $stripe}
}
return $catalog
}
# Function to report catalog
function Report-Catalog {
param([hashtable]$Catalog)
Write-Host "Database Backups Catalog:"
Write-Host "========================="
foreach ($db in $catalog.Keys | Sort-Object) {
Write-Host "Database: $db"
foreach ($type in $catalog[$db].Keys | Sort-Object) {
Write-Host " Type: $type"
foreach ($key in $catalog[$db][$type].Keys | Sort-Object -Descending) {
Write-Host " Backup: $key"
$files = $catalog[$db][$type][$key] | Sort-Object { $_.Stripe }
foreach ($item in $files) {
Write-Host " $($item.File.FullName)"
}
}
}
Write-Host ""
}
}
# Function to restore database
function Restore-Database {
param([string]$DbName, [hashtable]$Catalog, [string]$Instance)
if (-not $catalog.ContainsKey($DbName)) {
Write-Error "Database $DbName not found in catalog"
return
}
$dbCatalog = $catalog[$DbName]
# Find the latest FULL backup
if (-not $dbCatalog.ContainsKey('FULL')) {
Write-Error "No FULL backup found for database $DbName"
return
}
$latestFullKey = $dbCatalog['FULL'].Keys | Sort-Object -Descending | Select-Object -First 1
$fullFiles = $dbCatalog['FULL'][$latestFullKey] | Sort-Object { $_.Stripe } | ForEach-Object { $_.File }
# Restore FULL with NORECOVERY
$fileList = $fullFiles | ForEach-Object { "DISK = '$($_.FullName)'" }
$restoreQuery = "RESTORE DATABASE [$DbName] FROM $($fileList -join ', ') WITH NORECOVERY"
Write-Host "Restoring FULL backup for $DbName..."
Invoke-Sqlcmd -ServerInstance $Instance -Query $restoreQuery -QueryTimeout 0 -ErrorAction Stop
# Apply DIFF backups after the FULL
if ($dbCatalog.ContainsKey('DIFF')) {
$diffKeys = $dbCatalog['DIFF'].Keys | Where-Object { $_ -gt $latestFullKey } | Sort-Object
foreach ($key in $diffKeys) {
$diffFiles = $dbCatalog['DIFF'][$key] | Sort-Object { $_.Stripe } | ForEach-Object { $_.File }
$fileList = $diffFiles | ForEach-Object { "DISK = '$($_.FullName)'" }
$restoreQuery = "RESTORE DATABASE [$DbName] FROM $($fileList -join ', ') WITH NORECOVERY"
Write-Host "Applying DIFF backup $key for $DbName..."
Invoke-Sqlcmd -ServerInstance $Instance -Query $restoreQuery -QueryTimeout 0 -ErrorAction Stop
}
}
# Apply LOG backups after the FULL
if ($dbCatalog.ContainsKey('LOG')) {
$logKeys = $dbCatalog['LOG'].Keys | Where-Object { $_ -gt $latestFullKey } | Sort-Object
foreach ($key in $logKeys) {
$logFiles = $dbCatalog['LOG'][$key] | Sort-Object { $_.Stripe } | ForEach-Object { $_.File }
$fileList = $logFiles | ForEach-Object { "DISK = '$($_.FullName)'" }
$restoreQuery = "RESTORE LOG [$DbName] FROM $($fileList -join ', ') WITH NORECOVERY"
Write-Host "Applying LOG backup $key for $DbName..."
Invoke-Sqlcmd -ServerInstance $Instance -Query $restoreQuery -QueryTimeout 0 -ErrorAction Stop
}
}
# Final recovery
$restoreQuery = "RESTORE DATABASE [$DbName] WITH RECOVERY"
Write-Host "Finalizing restore for $DbName..."
Invoke-Sqlcmd -ServerInstance $Instance -Query $restoreQuery -QueryTimeout 0 -ErrorAction Stop
Write-Host "Restore completed for $DbName"
}
# Function to print backup summary
function Print-BackupSummary {
param([string]$DbName, [hashtable]$Headers)
Write-Host "Backup Summary for database: $DbName"
Write-Host "==================================="
# FULL backups
if ($Headers.ContainsKey('FULL')) {
Write-Host "FULL Backups:"
foreach ($item in $Headers['FULL'] | Sort-Object { $_.Key }) {
$header = $item.Header
Write-Host " Date: $($header.BackupFinishDate) | LSN Range: $($header.FirstLSN) - $($header.LastLSN)"
}
}
# DIFF backups
if ($Headers.ContainsKey('DIFF')) {
Write-Host "DIFFERENTIAL Backups:"
foreach ($item in $Headers['DIFF'] | Sort-Object { $_.Key }) {
$header = $item.Header
Write-Host " Date: $($header.BackupFinishDate) | Base LSN: $($header.DifferentialBaseLSN) | LSN Range: $($header.FirstLSN) - $($header.LastLSN)"
}
}
# LOG backups
if ($Headers.ContainsKey('LOG')) {
$logItems = $Headers['LOG'] | Sort-Object { $_.Key }
if ($logItems.Count -gt 0) {
$firstLog = $logItems[0].Header
$lastLog = $logItems[-1].Header
Write-Host "LOG Backups:"
Write-Host " Point-in-Time Range: $($firstLog.BackupStartDate) to $($lastLog.BackupFinishDate)"
Write-Host " LSN Range: $($firstLog.FirstLSN) - $($lastLog.LastLSN)"
# Check for gaps
$gaps = @()
for ($i = 1; $i -lt $logItems.Count; $i++) {
$prevLast = $logItems[$i-1].Header.LastLSN
$currFirst = $logItems[$i].Header.FirstLSN
if ($prevLast -ne $currFirst) {
$gaps += "Gap between $($logItems[$i-1].Key) (LSN $($prevLast)) and $($logItems[$i].Key) (LSN $($currFirst))"
}
}
if ($gaps.Count -gt 0) {
Write-Host " *** MISSING RANGES ***"
foreach ($gap in $gaps) {
Write-Host " $gap"
}
} else {
Write-Host " No gaps detected in LOG sequence"
}
}
}
if (-not ($Headers.ContainsKey('FULL') -or $Headers.ContainsKey('DIFF') -or $Headers.ContainsKey('LOG'))) {
Write-Host "No backup headers retrieved"
}
}
# Function to verify backups
function Verify-Backups {
param([hashtable]$Catalog, [string]$Instance)
foreach ($db in $catalog.Keys | Sort-Object) {
Write-Host "Verifying backups for database: $db"
$headers = @{}
foreach ($type in $catalog[$db].Keys | Sort-Object) {
foreach ($key in $catalog[$db][$type].Keys | Sort-Object) {
$files = $catalog[$db][$type][$key] | Sort-Object { $_.Stripe } | ForEach-Object { $_.File }
$fileList = $files | ForEach-Object { "DISK = '$($_.FullName)'" }
$verifyQuery = "RESTORE VERIFYONLY FROM $($fileList -join ', ')"
Write-Host "Verifying $type backup $key for $db..."
$verified = $false
try {
Invoke-Sqlcmd -ServerInstance $Instance -Query $verifyQuery -QueryTimeout 0 -ErrorAction Stop
Write-Host "Verification successful for $type $key"
$verified = $true
} catch {
Write-Host "Verification failed for $type $key : $($_.Exception.Message)"
}
# Get backup header information only if verified
if ($verified) {
$header = Get-BackupInfo -Files $files -Instance $Instance
if ($header) {
Write-Host " Backup Details:"
Write-Host " Start Date: $($header.BackupStartDate)"
Write-Host " Finish Date: $($header.BackupFinishDate)"
Write-Host " First LSN: $($header.FirstLSN)"
Write-Host " Last LSN: $($header.LastLSN)"
if ($header.DatabaseBackupLSN) {
Write-Host " Database Backup LSN: $($header.DatabaseBackupLSN)"
}
if ($header.DifferentialBaseLSN) {
Write-Host " Differential Base LSN: $($header.DifferentialBaseLSN)"
}
# Collect headers for summary
if (-not $headers.ContainsKey($type)) { $headers[$type] = @() }
$headers[$type] += @{ Key = $key; Header = $header }
}
}
}
}
# Print summary
Print-BackupSummary -DbName $db -Headers $headers
Write-Host ""
}
}
# Function to get backup header information
function Get-BackupInfo {
param([System.IO.FileInfo[]]$Files, [string]$Instance)
$fileList = $Files | ForEach-Object { "DISK = '$($_.FullName)'" }
$headerQuery = "RESTORE HEADERONLY FROM $($fileList -join ', ')"
try {
$header = Invoke-Sqlcmd -ServerInstance $Instance -Query $headerQuery -QueryTimeout 0 -ErrorAction Stop
return $header
} catch {
Write-Warning "Failed to get header for $($Files[0].Name): $($_.Exception.Message)"
return $null
}
}
# Main script
if ($Action -eq "catalog") {
$catalog = Catalog-Backups -Path $LiveMountRoot
if ($DatabaseName) {
$filteredCatalog = @{}
if ($catalog.ContainsKey($DatabaseName)) {
$filteredCatalog[$DatabaseName] = $catalog[$DatabaseName]
}
Report-Catalog -Catalog $filteredCatalog
} else {
Report-Catalog -Catalog $catalog
}
} elseif ($Action -eq "restore") {
if (-not $DatabaseName) {
Write-Error "DatabaseName is required for restore action"
exit 1
}
$catalog = Catalog-Backups -Path $LiveMountRoot
Restore-Database -DbName $DatabaseName -Catalog $catalog -Instance $SqlInstance
} elseif ($Action -eq "verify") {
$catalog = Catalog-Backups -Path $LiveMountRoot
if ($DatabaseName) {
$filteredCatalog = @{}
if ($catalog.ContainsKey($DatabaseName)) {
$filteredCatalog[$DatabaseName] = $catalog[$DatabaseName]
}
} else {
$filteredCatalog = $catalog
}
Verify-Backups -Catalog $filteredCatalog -Instance $SqlInstance
}

Binary file not shown.

View File

@@ -1,89 +1,742 @@
param(
[Parameter(Mandatory=$true)]
[string]$SqlInstance,
[Parameter(Mandatory=$true)]
[string]$MvName,
[Parameter(Mandatory=$false)]
[int]$Jobs = 1,
[Parameter(Mandatory=$false)]
[int]$LogRetentionDays = 30
,
[Parameter(Mandatory=$false)]
[switch]$Nuke
)
# backup.ps1 - Parallel database backup script using Ola H
#
# backup.ps1
#
# TODO: Update cleanup time based on backup type
# Uses Ola H's built-in parallel processing by starting multiple concurrent backup jobs
# Each job will automatically share the database load using DatabasesInParallel=Y if Jobs>1
# TODO: See if there is way to query QueueDatabase during backup to monitor progress
# TODO: Better trapping when RSC connection fails
$sqlInstance = "sqlfcsql\TESTINST"
#$directory = "H:\Backup"
$directory = "C:\Rubrik\mount"
$fullBackupDay = 'Tuesday'
$cleanupTime = 24
$checkCluster = $false
$fullBackupDay = 'Thursday'
$fullBackupOverdueDays = 7
$SAFile = "C:\Rubrik\scripts\rbksql.xml"
$logDir = "C:\Rubrik\logs"
$fullFlag = $directory + "\last_full.flag"
$diffFlag = $directory + "\last_diff.flag"
$today = (Get-Date).Date
$logFile = "C:\Rubrik\backup.log"
function FlagTakenToday($flagPath) {
if (Test-Path $flagPath) {
$flagDate = (Get-Content $flagPath | Out-String).Trim()
return ($flagDate -eq $today.ToString("yyyy-MM-dd"))
}
return $false
}
function Write-Log($message) {
function Write-Log($message, $jobId = "") {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logEntry = "$timestamp $message"
Add-Content -Path $logFile -Value $logEntry
$jobPrefix = if ($jobId) { "[JOB-$jobId] " } else { "" }
$logEntry = "$timestamp $jobPrefix$message"
# Use mutex for thread-safe logging to main log file
$mutex = $null
try {
$mutex = [System.Threading.Mutex]::new($false, "BackupLogMutex")
if ($mutex.WaitOne(5000)) { # 5 second timeout
Add-Content -Path $logFile -Value $logEntry -Encoding UTF8
} else {
Write-Warning "Could not acquire log mutex, writing to console only"
}
} catch {
Write-Warning "Logging error: $($_.Exception.Message)"
} finally {
if ($mutex) {
$mutex.ReleaseMutex()
$mutex.Dispose()
}
}
Write-Host $logEntry
}
if ($checkCluster) {
# Check if SQL instance is running locally
$localNode = $env:COMPUTERNAME
$instanceName = $sqlInstance.Split('\')[1]
$clusterInstance = Get-ClusterResource | Where-Object { $_.ResourceType -eq "SQL Server" -and $_.Name -eq "SQL Server ($instanceName)" }
if ($clusterInstance) {
$ownerNode = $clusterInstance.OwnerNode.Name
if ($ownerNode -ne $localNode) {
Write-Log "SQL instance '$sqlInstance' is not running on local node '$localNode'. Exiting script."
exit 1
} else {
Write-Log "SQL instance '$sqlInstance' is running on local node '$localNode'. Proceeding."
}
# Parse instance name from SQL instance parameter
$instanceParts = $SqlInstance -split '\\'
if ($instanceParts.Length -eq 2) {
# Format: HOSTNAME\INSTANCENAME or CLUSTERNAME\INSTANCENAME
$instanceName = $instanceParts[1]
# SqlInstance stays as provided (HOSTNAME\INSTANCENAME)
} elseif ($instanceParts.Length -eq 1) {
# Single value provided - could be hostname (default instance) or instance name on local host
$singleValue = $instanceParts[0]
# If it's "MSSQLSERVER" (default instance name), treat as default instance on local host
if ($singleValue -eq "MSSQLSERVER") {
$instanceName = "MSSQLSERVER"
$SqlInstance = $env:COMPUTERNAME # Connect to default instance (no instance name)
} else {
Write-Log "ERROR: SQL instance '$sqlInstance' not found in cluster resources."
# Assume it's an instance name on the local host
$instanceName = $singleValue
$SqlInstance = "$($env:COMPUTERNAME)\$singleValue"
}
} else {
$instanceName = $SqlInstance.Replace('\\', '_').Replace('/', '_')
}
if ([string]::IsNullOrEmpty($instanceName)) {
Write-Host "ERROR: Could not determine instance name from SqlInstance: '$SqlInstance'"
exit 1
}
# Sanitize and trim the instance name for safe filenames
$instanceName = $instanceName.Trim()
$invalidChars = [IO.Path]::GetInvalidFileNameChars()
foreach ($c in $invalidChars) {
$escaped = [regex]::Escape($c)
$instanceName = $instanceName -replace $escaped, '_'
}
$timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
# Ensure log directory exists before building/using log file
if (-not (Test-Path $logDir)) {
try {
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
} catch {
Write-Host "ERROR: Could not create log directory $logDir : $($_.Exception.Message)"
exit 1
}
} else {
Write-Log "INFO: Cluster check is disabled. Proceeding without verification."
}
if ((Get-Date).DayOfWeek -eq $fullBackupDay) {
if (-not (FlagTakenToday $fullFlag)) {
$backupType = "FULL"
Set-Content $fullFlag $today.ToString("yyyy-MM-dd")
Write-Log "Selected FULL backup. Flag updated."
$logFileName = "backup_{0}_{1}.log" -f $instanceName, $timestamp
$logFile = Join-Path $logDir $logFileName
Write-Log "DEBUG: SqlInstance='$SqlInstance', instanceName='$instanceName', logFile='$logFile'"
# Function to clean up old log files
function Remove-OldLogs {
param([int]$retentionDays)
if (-not (Test-Path $logDir)) {
try {
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
Write-Log "INFO: Created log directory: $logDir"
} catch {
Write-Log "ERROR: Failed to create log directory $logDir. $($_.Exception.Message)"
return
}
}
$cutoffDate = (Get-Date).AddDays(-$retentionDays)
Write-Log "INFO: Cleaning up log files older than $retentionDays days (before $($cutoffDate.ToString('yyyy-MM-dd')))"
$oldLogs = Get-ChildItem -Path $logDir -Filter "*.log" | Where-Object { $_.LastWriteTime -lt $cutoffDate }
$deletedCount = 0
foreach ($logFile in $oldLogs) {
try {
Remove-Item $logFile.FullName -Force
$deletedCount++
} catch {
Write-Log "WARNING: Failed to delete old log file $($logFile.Name): $($_.Exception.Message)"
}
}
Write-Log "INFO: Cleaned up $deletedCount old log files"
}
# Clean up old logs before starting
Remove-OldLogs -retentionDays $LogRetentionDays
# Import SQL Server PowerShell module
try {
if (Get-Module -ListAvailable -Name SqlServer) {
Import-Module SqlServer -ErrorAction Stop
Write-Log "INFO: SqlServer PowerShell module loaded successfully."
}
elseif (Get-Module -ListAvailable -Name SQLPS) {
Import-Module SQLPS -ErrorAction Stop
Write-Log "INFO: SQLPS PowerShell module loaded successfully."
}
else {
throw "No SQL Server PowerShell module found"
}
if (-not (Get-Command Invoke-Sqlcmd -ErrorAction SilentlyContinue)) {
throw "Invoke-Sqlcmd command not available"
}
}
catch {
Write-Log "ERROR: Failed to import SQL Server PowerShell module. Please install it using: Install-Module -Name SqlServer -AllowClobber"
Write-Log "ERROR: $($_.Exception.Message)"
exit 1
}
# Import Rubrik Security Cloud module
try {
Import-Module RubrikSecurityCloud -ErrorAction Stop
Write-Log "INFO: RubrikSecurityCloud module loaded successfully."
} catch {
Write-Log "ERROR: Failed to import RubrikSecurityCloud module. $($_.Exception.Message)"
exit 1
}
$localNode = $env:COMPUTERNAME
$clusterInstance = Get-ClusterResource | Where-Object { $_.ResourceType -eq "SQL Server" -and $_.Name -eq "SQL Server ($instanceName)" }
if ($clusterInstance) {
$ownerNode = $clusterInstance.OwnerNode
if ($ownerNode -ne $localNode) {
Write-Log "SQL instance '$SqlInstance' is not running on local node '$localNode'. Updating the MV."
Connect-Rsc -ServiceAccountFile $SAFile
Write-Log "Connected to Rubrik Security Cloud."
$newHost = Get-RscHost -Name $ownerNode -OsType WINDOWS
$query = New-RscQuery -GqlQuery slaManagedVolumes -AddField Nodes.HostDetail, Nodes.SmbShare, Nodes.ClientConfig, Nodes.ClientConfig.BackupScript, Nodes.ClientConfig.PreBackupScript
$query.var.filter = @(Get-RscType -Name Filter)
$query.var.filter[0].field = "NAME_EXACT_MATCH"
$query.var.filter[0].Texts = $mvName
$mvResult = $query.Invoke()
if (-not $mvResult.nodes -or $mvResult.nodes.Count -eq 0) {
Write-Log "ERROR: Managed Volume '$mvName' not found. This may be due to insufficient permissions or the volume not existing."
Disconnect-Rsc
exit 1
}
$mvDetail = $mvResult.nodes[0]
Write-Log "Found Managed Volume: $($mvDetail.Name) (ID: $($mvDetail.Id), Status: $($mvDetail.hostDetail.Status), HostDetail Name: $($mvDetail.hostDetail.Name))"
$query = New-RscMutation -GqlMutation updateManagedVolume
$query.Var.input = Get-RscType -Name UpdateManagedVolumeInput
$query.Var.input.update = Get-RscType -Name ManagedVolumeUpdateInput
$query.Var.input.update.config = Get-RscType -Name ManagedVolumePatchConfigInput
$query.Var.input.update.slaClientConfig = Get-RscType -Name ManagedVolumePatchSlaClientConfigInput
$query.Var.input.Id = $mvDetail.Id
$query.Var.input.update.Name = $mvName
$query.Var.input.update.config.SmbDomainName = $mvDetail.SmbShare.DomainName
$query.Var.input.update.config.SmbValidIps = $newHost.Name
$query.Var.input.update.config.SmbValidUsers = $mvDetail.SmbShare.ValidUsers + $mvDetail.SmbShare.ActiveDirectoryGroups
$query.Var.input.update.slaClientConfig.clientHostId = $newHost.Id
$query.Var.input.update.slaClientConfig.channelHostMountPaths = $mvDetail.ClientConfig.ChannelHostMountPaths
$query.Var.input.update.slaClientConfig.backupScriptCommand = $mvDetail.ClientConfig.BackupScript.ScriptCommand
# Only set pre-backup script fields if a pre-backup script was configured
if ($mvDetail.ClientConfig.PreBackupScript.ScriptCommand) {
$query.Var.input.update.slaClientConfig.preBackupScriptCommand = $mvDetail.ClientConfig.PreBackupScript.ScriptCommand
$query.Var.input.update.slaClientConfig.preBackupScriptTimeout = $mvDetail.ClientConfig.PreBackupScript.Timeout
$query.Var.input.update.slaClientConfig.shouldCancelBackupOnPreBackupScriptFailure = $mvDetail.ClientConfig.ShouldCancelBackupOnPreBackupScriptFailure
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $false
} else {
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $true
}
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupFailure = $true
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupSuccess = $true
$query.gqlRequest().Variables
if (-not $dryrun) {
$query.Invoke()
} else {
Write-Log "Dry run mode: Managed Volume update not invoked."
}
# Now must exit 1 to stop the backup continuing on the wrong node
Disconnect-Rsc
exit 1
} else {
$backupType = "LOG"
Write-Log "FULL backup already taken today. Selected LOG backup."
Write-Log "SQL instance '$SqlInstance' is running on local node '$localNode'. No action needed."
}
} else {
if (-not (FlagTakenToday $diffFlag)) {
$backupType = "DIFF"
Set-Content $diffFlag $today.ToString("yyyy-MM-dd")
Write-Log "Selected DIFF backup. Flag updated."
Write-Log "ERROR: SQL instance '$SqlInstance' not found in cluster resources. Continuing assuming standalone instance."
}
# Connect to Rubrik and retrieve managed volume paths
try {
Connect-Rsc -ServiceAccountFile $SAFile
Write-Log "INFO: Connected to Rubrik Security Cloud."
$query = New-RscQuery -GqlQuery slaManagedVolumes -AddField Nodes.HostDetail, Nodes.SmbShare, Nodes.ClientConfig, Nodes.ClientConfig.BackupScript, Nodes.ClientConfig.PreBackupScript
$query.var.filter = @(Get-RscType -Name Filter)
$query.var.filter[0].field = "NAME_EXACT_MATCH"
$query.var.filter[0].Texts = $MvName
$mvDetail = $query.Invoke()
if (-not $mvDetail.nodes -or $mvDetail.nodes.Count -eq 0) {
Write-Log "ERROR: Managed Volume '$MvName' not found. This may be due to insufficient permissions or the volume not existing."
exit 1
}
$paths = $mvDetail.nodes[0].ClientConfig.ChannelHostMountPaths
Write-Log "INFO: Retrieved paths: $($paths -join ', ')"
} catch {
Write-Log "ERROR: Failed to retrieve paths from Rubrik. $($_.Exception.Message)"
exit 1
}
# If -Nuke is set, delete the contents of each retrieved path (but keep the folder itself).
if ($Nuke) {
Write-Log "INFO: -nuke flag set. Beginning recursive deletion of contents for retrieved paths."
foreach ($p in $paths) {
if (-not $p) { continue }
$pathToCheck = $p.Trim()
# Determine root to avoid deleting drive root like C:\
try { $root = [IO.Path]::GetPathRoot($pathToCheck) } catch { $root = $null }
if ([string]::IsNullOrEmpty($pathToCheck)) {
Write-Log "WARNING: Skipping empty path entry"
continue
}
if ($root -and ($pathToCheck.TrimEnd('\') -eq $root.TrimEnd('\'))) {
Write-Log "ERROR: Refusing to nuke root path '$pathToCheck'. Skipping."
continue
}
if (-not (Test-Path -LiteralPath $pathToCheck)) {
Write-Log "WARNING: Path '$pathToCheck' does not exist. Skipping."
continue
}
Write-Log "INFO: NUKING contents of '$pathToCheck' (deleting all files & subfolders inside)."
try {
# Enumerate children and delete each item so the folder itself remains
Get-ChildItem -LiteralPath $pathToCheck -Force -ErrorAction SilentlyContinue | ForEach-Object {
try {
Remove-Item -LiteralPath $_.FullName -Recurse -Force -ErrorAction Stop
Write-Log "INFO: Deleted: $($_.FullName)"
} catch {
Write-Log "WARNING: Failed to delete $($_.FullName): $($_.Exception.Message)"
}
}
} catch {
Write-Log "ERROR: Failed to enumerate or delete contents of '$pathToCheck': $($_.Exception.Message)"
}
}
Write-Log "INFO: -nuke operation complete. Continuing with backup flow."
}
$directoryParam = $paths -join ', '
# Validate job count
if ($Jobs -lt 1 -or $Jobs -gt 4) {
Write-Log "ERROR: Jobs parameter must be between 1 and 4. Provided: $Jobs"
exit 1
}
Write-Log "INFO: Starting $Jobs parallel backup jobs"
$today = (Get-Date).Date
function Get-BackupType($directoryParam) {
# Support multiple candidate directories. Scan them in deterministic order for existing flags.
$dirs = @()
if ($directoryParam) {
$dirs = $directoryParam -split ',' | ForEach-Object { $_.Trim() } | Where-Object { $_ -ne '' }
}
# Build lists of found flags (in candidate order)
$foundFull = @()
$foundDiff = @()
foreach ($d in $dirs) {
$full = Join-Path $d "last_full.flag"
$diff = Join-Path $d "last_diff.flag"
if (Test-Path $full) { $foundFull += $full }
if (Test-Path $diff) { $foundDiff += $diff }
}
# Determine if full backup is overdue using the first-found full flag (if any)
$isFullBackupOverdue = $false
if ($foundFull.Count -gt 0) {
$fullFlag = $foundFull[0]
try {
$lastFullDate = [DateTime]::ParseExact((Get-Content $fullFlag).Trim(), "yyyy-MM-dd", $null)
$daysSinceLastFull = ($today - $lastFullDate).Days
$isFullBackupOverdue = $daysSinceLastFull -gt $fullBackupOverdueDays
Write-Log "INFO: Last full backup was $daysSinceLastFull days ago (from $fullFlag). Overdue threshold: $fullBackupOverdueDays days."
} catch {
$isFullBackupOverdue = $true
Write-Log "WARNING: Could not parse last full backup date in $fullFlag. Treating as overdue."
}
} else {
$backupType = "LOG"
Write-Log "DIFF backup already taken today. Selected LOG backup."
$isFullBackupOverdue = $true
Write-Log "WARNING: No last full backup date found in any candidate directories. Treating as overdue."
}
# Helper to ensure directory exists
function Ensure-DirExists([string]$path) {
if (-not (Test-Path $path)) {
try { New-Item -ItemType Directory -Path $path -Force | Out-Null } catch { }
}
}
# Determine preferred write location: prefer existing related flag location, otherwise first candidate dir
$firstDir = $dirs[0]
# If it's a full backup day or overdue, plan for full backup
if ((Get-Date).DayOfWeek -eq $fullBackupDay -or $isFullBackupOverdue) {
# If a full flag exists, use its location; else use firstDir
$targetFullFlag = if ($foundFull.Count -gt 0) { $foundFull[0] } else { Join-Path $firstDir "last_full.flag" }
$targetDir = Split-Path $targetFullFlag -Parent
Ensure-DirExists $targetDir
$currentValue = $null
if (Test-Path $targetFullFlag) {
try { $currentValue = (Get-Content $targetFullFlag).Trim() } catch { $currentValue = $null }
}
if (-not $currentValue -or $currentValue -ne $today.ToString("yyyy-MM-dd")) {
try {
Set-Content -Path $targetFullFlag -Value $today.ToString("yyyy-MM-dd") -Encoding UTF8
Write-Log "INFO: Created/Updated full backup flag file: $targetFullFlag"
} catch {
Write-Log "ERROR: Failed to create/update full backup flag file: $targetFullFlag. $($_.Exception.Message)"
}
$reason = if ($isFullBackupOverdue) { "overdue" } else { "scheduled" }
return @{ Type = "FULL"; CleanupTime = 168; Reason = $reason }
} else {
return @{ Type = "LOG"; CleanupTime = 24; Reason = "full already taken today" }
}
}
# Otherwise, plan for differential
# Prefer an existing diff flag location if present; else prefer the existing full flag location (write diff alongside full); otherwise firstDir
if ($foundDiff.Count -gt 0) {
$targetDiffFlag = $foundDiff[0]
} elseif ($foundFull.Count -gt 0) {
$targetDiffFlag = Join-Path (Split-Path $foundFull[0] -Parent) "last_diff.flag"
} else {
$targetDiffFlag = Join-Path $firstDir "last_diff.flag"
}
$targetDir = Split-Path $targetDiffFlag -Parent
Ensure-DirExists $targetDir
$currentDiffValue = $null
if (Test-Path $targetDiffFlag) {
try { $currentDiffValue = (Get-Content $targetDiffFlag).Trim() } catch { $currentDiffValue = $null }
}
if (-not $currentDiffValue -or $currentDiffValue -ne $today.ToString("yyyy-MM-dd")) {
try {
Set-Content -Path $targetDiffFlag -Value $today.ToString("yyyy-MM-dd") -Encoding UTF8
Write-Log "INFO: Created/Updated diff backup flag file: $targetDiffFlag"
} catch {
Write-Log "ERROR: Failed to create/update diff backup flag file: $targetDiffFlag. $($_.Exception.Message)"
}
return @{ Type = "DIFF"; CleanupTime = 168; Reason = "differential scheduled" }
} else {
return @{ Type = "LOG"; CleanupTime = 24; Reason = "diff already taken today" }
}
}
$query = "EXECUTE [dbo].[DatabaseBackup] @Databases = 'ALL_DATABASES', @Directory = '$directory', @BackupType = '$backupType', @Verify = 'N', @CleanupTime = $cleanupTime, @CheckSum = 'Y', @LogToTable = 'Y'"
Write-Log "Executing backup type: $backupType"
# Determine backup type
$backupInfo = Get-BackupType $directoryParam
Write-Log "Selected $($backupInfo.Type) backup ($($backupInfo.Reason))"
$sqlcmdOutput = & sqlcmd -S $sqlInstance -Q $query 2>&1
$sqlcmdExitCode = $LASTEXITCODE
# Build the Ola H query. Include DatabasesInParallel only when multiple jobs are used
# Build parameter lines so we can avoid leaving a trailing comma when omitting DatabasesInParallel
$paramLines = @(
"@Databases = 'ALL_DATABASES'",
"@Directory = '$directoryParam'",
"@BackupType = '$($backupInfo.Type)'",
"@Verify = 'N'",
"@CleanupTime = $($backupInfo.CleanupTime)",
"@CheckSum = 'Y'",
"@LogToTable = 'Y'"
)
if ($sqlcmdExitCode -eq 0) {
foreach ($line in $sqlcmdOutput) {
Write-Log $line
# Only enable DatabasesInParallel when we run more than one job
if ($Jobs -gt 1) {
$paramLines += "@DatabasesInParallel = 'Y'"
}
# Join with commas and indentation to produce clean SQL parameter list
$params = $paramLines -join ",`n "
$query = "EXECUTE [dbo].[DatabaseBackup] `n $params"
Write-Log "SQL Query: $query"
# Function to execute backup job with message capture
function Start-BackupJob {
param(
[int]$jobId,
[string]$sqlInstance,
[string]$query,
[string]$baseLogFile
)
$scriptBlock = {
param($JobId, $SqlInstance, $Query, $BaseLogFile)
# Debug the base log file parameter
Write-Output "DEBUG: BaseLogFile parameter = '$BaseLogFile'"
# Create job-specific log file path with fallback
if ($BaseLogFile -and $BaseLogFile.Trim() -ne "") {
$jobLogFile = $BaseLogFile -replace '\.log$', "-job$JobId.log"
} else {
# Fallback log file path using logDir
$jobLogFile = Join-Path $using:logDir "backup-multi-job$JobId.log"
}
Write-Output "DEBUG: Job log file will be: '$jobLogFile'"
function Write-JobLog($message, $suppressConsole = $false) {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logEntry = "$timestamp [JOB-$JobId] $message"
if ($jobLogFile -and $jobLogFile.Trim() -ne "") {
try {
Add-Content -Path $jobLogFile -Value $logEntry -Encoding UTF8
# Output to console for debugging (unless suppressed)
if (-not $suppressConsole) {
Write-Output "LOGGED TO $jobLogFile : $logEntry"
}
} catch {
Write-Output "LOG ERROR: $($_.Exception.Message) - File: $jobLogFile"
}
} else {
Write-Output "NO LOG FILE: jobLogFile is empty or null"
}
# Always output to console for job monitoring (unless suppressed)
if (-not $suppressConsole) {
Write-Output $logEntry
}
}
try {
Write-JobLog "Starting backup job"
# Create SQL connection with message capture
$connection = New-Object System.Data.SqlClient.SqlConnection
$connection.ConnectionString = "Server=$SqlInstance;Integrated Security=true;Connection Timeout=30"
$infoMessages = @()
# Event handler for informational messages (PRINT statements)
$connection.add_InfoMessage({
param($sqlSender, $e)
$message = $e.Message
if ($message -and $message.Trim() -ne "") {
$script:infoMessages += $message
Write-JobLog "SQL INFO: $message" $true # Suppress console output for verbose messages
}
})
try {
Write-JobLog "Attempting to connect to SQL Server: $SqlInstance"
$connection.Open()
Write-JobLog "Connected to SQL Server successfully"
$command = New-Object System.Data.SqlClient.SqlCommand
$command.Connection = $connection
$command.CommandText = $Query
$command.CommandTimeout = 0 # No timeout for backup operations
Write-JobLog "Executing backup command..."
# Execute and capture any result sets
$reader = $command.ExecuteReader()
# Process any result sets
while ($reader.Read()) {
$rowData = @()
for ($i = 0; $i -lt $reader.FieldCount; $i++) {
$rowData += "$($reader.GetName($i)): $($reader.GetValue($i))"
}
if ($rowData.Count -gt 0) {
$resultLine = "SQL RESULT: $($rowData -join ', ')"
Write-JobLog $resultLine
Write-Output $resultLine # Also output for Receive-Job
}
}
$reader.Close()
$summaryMessage = "Backup completed successfully. Captured $($infoMessages.Count) messages."
Write-JobLog $summaryMessage
Write-Output $summaryMessage # Also output for Receive-Job
# Output all captured SQL messages for debugging (only to log file, not console)
Write-JobLog "=== SQL MESSAGES START ===" $true
foreach ($msg in $infoMessages) {
Write-JobLog "SQL: $msg" $true
}
Write-JobLog "=== SQL MESSAGES END ===" $true
# Don't return hashtable - just output success message
Write-Output "JOB-${JobId}: SUCCESS"
}
finally {
if ($connection.State -eq [System.Data.ConnectionState]::Open) {
$connection.Close()
}
$connection.Dispose()
}
}
catch {
$errorMessage = "ERROR: Backup failed - $($_.Exception.Message)"
Write-JobLog $errorMessage
Write-Output $errorMessage # Also output for Receive-Job
# Check for specific connection errors
if ($_.Exception.Message -like "*server*not found*" -or
$_.Exception.Message -like "*network-related*" -or
$_.Exception.Message -like "*instance*" -or
$_.Exception.Message -like "*login*failed*") {
$connError = "ERROR: CONNECTION FAILURE - Check SQL Server instance name and connectivity"
Write-JobLog $connError
Write-Output $connError
}
# Log SQL Server specific errors
if ($_.Exception -is [System.Data.SqlClient.SqlException]) {
Write-JobLog "ERROR: SQL Server Error Details:"
Write-Output "ERROR: SQL Server Error Details:"
foreach ($sqlError in $_.Exception.Errors) {
$errorDetail = "ERROR: Severity: $($sqlError.Class), State: $($sqlError.State), Number: $($sqlError.Number)"
Write-JobLog $errorDetail
Write-Output $errorDetail
$errorMsg = "ERROR: Message: $($sqlError.Message)"
Write-JobLog $errorMsg
Write-Output $errorMsg
if ($sqlError.Procedure) {
$procError = "ERROR: Procedure: $($sqlError.Procedure), Line: $($sqlError.LineNumber)"
Write-JobLog $procError
Write-Output $procError
}
}
}
# Log full exception details for debugging
$fullError = "ERROR: Full Exception Type: $($_.Exception.GetType().Name)"
Write-JobLog $fullError
Write-Output $fullError
if ($_.Exception.InnerException) {
$innerError = "ERROR: Inner Exception: $($_.Exception.InnerException.Message)"
Write-JobLog $innerError
Write-Output $innerError
}
Write-Output "JOB-${JobId}: FAILED"
}
}
Write-Log "$backupType Backup execution completed."
return Start-Job -ScriptBlock $scriptBlock -ArgumentList $jobId, $sqlInstance, $query, $baseLogFile
}
# Start parallel backup jobs
Write-Log "Starting $Jobs parallel backup jobs"
[System.Collections.ArrayList]$jobList = @()
for ($i = 1; $i -le $Jobs; $i++) {
$job = Start-BackupJob -jobId $i -sqlInstance $SqlInstance -query $query -baseLogFile $logFile
$null = $jobList.Add($job)
Write-Log "Started backup job $i (Job ID: $($job.Id))"
Start-Sleep -Milliseconds 4000 # Delay to stagger job starts
}
# Monitor jobs and capture output
Write-Log "Monitoring $($jobList.Count) backup jobs..."
$allJobsCompleted = $false
[System.Collections.ArrayList]$completedJobs = @()
while (-not $allJobsCompleted) {
Start-Sleep -Seconds 5
foreach ($job in $jobList) {
if ($job.Id -notin $completedJobs) {
# Check if job is no longer running
if ($job.State -eq "Completed" -or $job.State -eq "Failed" -or $job.State -eq "Stopped") {
$null = $completedJobs.Add($job.Id)
# Get all job output
$jobOutput = Receive-Job -Job $job -Keep # Use -Keep to preserve output
if ($job.State -eq "Completed") {
Write-Log "Job $($job.Id) completed successfully"
# Log all job output to main log
if ($jobOutput) {
Write-Log "=== Job $($job.Id) Output ==="
foreach ($line in $jobOutput) {
Write-Log "$line"
}
Write-Log "=== End Job $($job.Id) Output ==="
}
} else {
Write-Log "ERROR: Job $($job.Id) failed with state: $($job.State)"
if ($jobOutput) {
Write-Log "=== Job $($job.Id) Error Output ==="
foreach ($line in $jobOutput) {
Write-Log "ERROR: $line"
}
Write-Log "=== End Job $($job.Id) Error Output ==="
}
}
}
}
}
$allJobsCompleted = $completedJobs.Count -eq $jobList.Count
# Progress update
$runningCount = ($jobList | Where-Object { $_.State -eq "Running" }).Count
if ($runningCount -gt 0) {
Write-Log "Progress: $($completedJobs.Count)/$($jobList.Count) jobs completed, $runningCount still running..."
}
}
Write-Log "All backup jobs completed"
# Collect job states and outputs before cleanup for final status check
$jobResults = @{}
foreach ($job in $jobList) {
$jobOutput = Receive-Job -Job $job -Keep -ErrorAction SilentlyContinue
$hasFailed = $false
# Check if job output contains failure indicator
if ($jobOutput) {
foreach ($line in $jobOutput) {
if ($line -like "*JOB-*: FAILED") {
$hasFailed = $true
break
}
}
}
$jobResults[$job.Id] = @{
State = $job.State
Failed = $hasFailed
}
}
# Clean up jobs
Write-Log "Cleaning up completed jobs..."
foreach ($job in $jobList) {
try {
if ($job.State -eq "Running") {
Write-Log "WARNING: Job $($job.Id) still running, stopping it..."
Stop-Job -Job $job -Force
Start-Sleep -Seconds 2
}
Remove-Job -Job $job -Force -ErrorAction SilentlyContinue
Write-Log "Cleaned up job $($job.Id)"
} catch {
Write-Log "WARNING: Could not clean up job $($job.Id): $($_.Exception.Message)"
}
}
# Final status check using job output analysis
$failedJobIds = $jobResults.Keys | Where-Object { $jobResults[$_].Failed -eq $true }
if ($failedJobIds.Count -gt 0) {
Write-Log "ERROR: $($failedJobIds.Count) out of $($jobResults.Count) backup jobs failed"
foreach ($jobId in $failedJobIds) {
Write-Log "ERROR: Job ID $jobId failed"
}
Write-Log "CRITICAL: Backup operation failed - check errors above"
exit 1
} else {
Write-Log "ERROR: Backup execution failed. Exit code: $sqlcmdExitCode. Output: $sqlcmdOutput"
}
Write-Log "SUCCESS: All $($jobResults.Count) backup jobs completed successfully"
}

271
backupSingle.ps1 Normal file
View File

@@ -0,0 +1,271 @@
param(
[Parameter(Mandatory=$true)]
[string]$SqlInstance,
[Parameter(Mandatory=$false)]
[string]$Directory,
[Parameter(Mandatory=$false)]
[switch]$Force
)
# backup.ps1
#
# TODO: Parallelize backups for multiple DBs in the instance
# Import SQL Server PowerShell module
try {
# Try to import the newer SqlServer module first
if (Get-Module -ListAvailable -Name SqlServer) {
Import-Module SqlServer -ErrorAction Stop
Write-Host "INFO: SqlServer PowerShell module loaded successfully."
}
# Fall back to older SQLPS module if available
elseif (Get-Module -ListAvailable -Name SQLPS) {
Import-Module SQLPS -ErrorAction Stop
Write-Host "INFO: SQLPS PowerShell module loaded successfully."
}
else {
throw "No SQL Server PowerShell module found"
}
# Verify Invoke-Sqlcmd is available
if (-not (Get-Command Invoke-Sqlcmd -ErrorAction SilentlyContinue)) {
throw "Invoke-Sqlcmd command not available"
}
}
catch {
Write-Host "ERROR: Failed to import SQL Server PowerShell module. Please install it using: Install-Module -Name SqlServer -AllowClobber"
Write-Host "ERROR: $($_.Exception.Message)"
exit 1
}
$instanceName = $SqlInstance.Split('\')[1]
# Use provided directory parameter or default to instance-based path
if ($Directory) {
$directory = $Directory
Write-Host "INFO: Using provided directory: $directory"
} else {
$directory = "C:\Rubrik\$instanceName"
Write-Host "INFO: Using default directory: $directory"
}
$fullBackupDay = 'Thursday'
$fullBackupOverdueDays = 7 # Force full backup if last full backup is older than this many days
$checkCluster = $false
#$logFile = "C:\Rubrik\backup-$instanceName.log"
$logFile = "H:\Backup\backup-$instanceName.log"
$fullFlag = $directory + "\last_full.flag"
$diffFlag = $directory + "\last_diff.flag"
$today = (Get-Date).Date
function FlagTakenToday($flagPath) {
if (Test-Path $flagPath) {
$flagDate = (Get-Content $flagPath | Out-String).Trim()
return ($flagDate -eq $today.ToString("yyyy-MM-dd"))
}
return $false
}
function GetLastFullBackupDate($flagPath) {
if (Test-Path $flagPath) {
$flagDate = (Get-Content $flagPath | Out-String).Trim()
try {
return [DateTime]::ParseExact($flagDate, "yyyy-MM-dd", $null)
}
catch {
Write-Log "WARNING: Could not parse last full backup date from flag file: $flagDate"
return $null
}
}
return $null
}
function IsFullBackupOverdue($flagPath, $overdueDays) {
$lastFullDate = GetLastFullBackupDate $flagPath
if ($null -eq $lastFullDate) {
Write-Log "WARNING: No last full backup date found. Full backup is considered overdue."
return $true
}
$daysSinceLastFull = ($today - $lastFullDate).Days
$isOverdue = $daysSinceLastFull -gt $overdueDays
Write-Log "INFO: Last full backup was $daysSinceLastFull days ago on $($lastFullDate.ToString('yyyy-MM-dd')). Overdue threshold: $overdueDays days."
return $isOverdue
}
function Write-Log($message) {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logEntry = "$timestamp $message"
Add-Content -Path $logFile -Value $logEntry
Write-Host $logEntry
}
# Check if directory exists and is a symbolic link (unless -Force is specified)
if (-not (Test-Path $directory)) {
Write-Log "ERROR: Directory '$directory' does not exist. Exiting script."
exit 1
}
if (-not $Force) {
$directoryInfo = Get-Item $directory
if (-not ($directoryInfo.Attributes -band [System.IO.FileAttributes]::ReparsePoint)) {
Write-Log "ERROR: Directory '$directory' is not a symbolic link. Exiting script."
exit 1
}
Write-Log "INFO: Directory '$directory' exists and is a symbolic link. Target: $($directoryInfo.Target). Proceeding."
} else {
Write-Log "INFO: Force parameter specified. Skipping symbolic link check for directory '$directory'."
}
if ($checkCluster) {
# Check if SQL instance is running locally
$localNode = $env:COMPUTERNAME
$clusterInstance = Get-ClusterResource | Where-Object { $_.ResourceType -eq "SQL Server" -and $_.Name -eq "SQL Server ($instanceName)" }
if ($clusterInstance) {
$ownerNode = $clusterInstance.OwnerNode.Name
if ($ownerNode -ne $localNode) {
Write-Log "SQL instance '$SqlInstance' is not running on local node '$localNode'. Exiting script."
exit 1
} else {
Write-Log "SQL instance '$SqlInstance' is running on local node '$localNode'. Proceeding."
}
} else {
Write-Log "ERROR: SQL instance '$SqlInstance' not found in cluster resources."
exit 1
}
} else {
Write-Log "INFO: Cluster check is disabled. Proceeding without verification."
}
# Check if full backup is overdue regardless of the day
$isFullBackupOverdue = IsFullBackupOverdue $fullFlag $fullBackupOverdueDays
if ((Get-Date).DayOfWeek -eq $fullBackupDay) {
if (-not (FlagTakenToday $fullFlag)) {
$backupType = "FULL"
$cleanupTime = 168
Set-Content $fullFlag $today.ToString("yyyy-MM-dd")
Write-Log "Selected FULL backup (scheduled day). Flag updated."
} else {
$backupType = "LOG"
$cleanupTime = 24
Write-Log "FULL backup already taken today. Selected LOG backup."
}
} elseif ($isFullBackupOverdue) {
if (-not (FlagTakenToday $fullFlag)) {
$backupType = "FULL"
$cleanupTime = 168
Set-Content $fullFlag $today.ToString("yyyy-MM-dd")
Write-Log "Selected FULL backup (overdue - forcing full backup). Flag updated."
} else {
$backupType = "LOG"
$cleanupTime = 24
Write-Log "FULL backup already taken today (was overdue). Selected LOG backup."
}
} else {
if (-not (FlagTakenToday $diffFlag)) {
$backupType = "DIFF"
$cleanupTime = 168
Set-Content $diffFlag $today.ToString("yyyy-MM-dd")
Write-Log "Selected DIFF backup. Flag updated."
} else {
$backupType = "LOG"
$cleanupTime = 24
Write-Log "DIFF backup already taken today. Selected LOG backup."
}
}
$query = "EXECUTE [dbo].[DatabaseBackup] @Databases = 'ALL_DATABASES', @Directory = '$directory', @BackupType = '$backupType', @Verify = 'N', @CleanupTime = $cleanupTime, @CheckSum = 'Y', @LogToTable = 'Y'"
Write-Log "Executing backup type: $backupType"
Write-Log "SQL Query: $query"
try {
# Execute the backup using PowerShell SQL module with better error handling
# Capture verbose output from Ola H scripts
$infoMessages = @()
# Create event handlers to capture SQL Server messages
$connection = New-Object System.Data.SqlClient.SqlConnection
$connection.ConnectionString = "Server=$SqlInstance;Integrated Security=true;Connection Timeout=30"
# Event handler for informational messages (PRINT statements)
$connection.add_InfoMessage({
param($sqlSender, $e)
$message = $e.Message
if ($message -and $message.Trim() -ne "") {
$script:infoMessages += $message
Write-Log "SQL INFO: $message"
}
})
try {
$connection.Open()
$command = New-Object System.Data.SqlClient.SqlCommand
$command.Connection = $connection
$command.CommandText = $query
$command.CommandTimeout = 0 # No timeout for backup operations
Write-Log "Executing SQL command with message capture..."
# Execute and capture any result sets
$reader = $command.ExecuteReader()
# Process any result sets
while ($reader.Read()) {
$rowData = @()
for ($i = 0; $i -lt $reader.FieldCount; $i++) {
$rowData += "$($reader.GetName($i)): $($reader.GetValue($i))"
}
if ($rowData.Count -gt 0) {
Write-Log "SQL RESULT: $($rowData -join ', ')"
}
}
$reader.Close()
}
finally {
if ($connection.State -eq [System.Data.ConnectionState]::Open) {
$connection.Close()
}
$connection.Dispose()
}
Write-Log "$backupType Backup execution completed successfully."
Write-Log "Total informational messages captured: $($infoMessages.Count)"
}
catch {
Write-Log "ERROR: Backup execution failed with exception: $($_.Exception.Message)"
# Log additional SQL Server error details if available
if ($_.Exception.InnerException) {
Write-Log "ERROR: Inner Exception: $($_.Exception.InnerException.Message)"
}
# Check for SQL Server specific errors
if ($_.Exception -is [System.Data.SqlClient.SqlException]) {
Write-Log "ERROR: SQL Server Error Details:"
foreach ($sqlError in $_.Exception.Errors) {
Write-Log "ERROR: Severity: $($sqlError.Class), State: $($sqlError.State), Number: $($sqlError.Number)"
Write-Log "ERROR: Message: $($sqlError.Message)"
if ($sqlError.Procedure) {
Write-Log "ERROR: Procedure: $($sqlError.Procedure), Line: $($sqlError.LineNumber)"
}
}
}
# Clean up connection if it exists
if ($connection -and $connection.State -eq [System.Data.ConnectionState]::Open) {
$connection.Close()
$connection.Dispose()
}
exit 1
}

301
createSAcreds.ps1 Normal file
View File

@@ -0,0 +1,301 @@
<#
.SYNOPSIS
Create a one-shot scheduled task that runs as a gMSA or service account to create a Rubrik service-account file.
.DESCRIPTION
- Creates a temporary PowerShell script that calls Set-RscServiceAccountFile with given parameters.
- Registers a scheduled task whose principal is either a gMSA or regular service account.
- Starts the task, waits for completion, checks LastTaskResult, then optionally cleans up.
.PARAMETER Domain
The AD domain (e.g. AD). If already providing fully-qualified account, set to empty string.
.PARAMETER AccountName
The name of the service account. For gMSA, do not include trailing $. For regular accounts, use the username.
.PARAMETER AccountType
Type of account: 'gMSA' or 'ServiceAccount'. Default: 'gMSA'
.PARAMETER Password
Password for regular service accounts. Not used for gMSA accounts. Can be SecureString or plain text.
.PARAMETER SaJsonPath
Full local path to the sa.json file that RubrikSecurityCloud module will use.
.PARAMETER OutputXmlPath
Full local path to the output xml service account file (sa-rbksql.xml).
.PARAMETER TaskName
(Optional) Scheduled task name. Default: CreateRubrikSAFile-<timestamp>
.PARAMETER KeepArtifacts
If $true, keep the temporary script and task after completion. Default $false = cleanup.
.EXAMPLE
# Using gMSA
.\createSAcreds.ps1 -Domain AD -AccountName rubrikgmsa -AccountType gMSA -SaJsonPath C:\temp\sa.json -OutputXmlPath C:\temp\sa-rbksql.xml
.EXAMPLE
# Using regular service account with password prompt
.\createSAcreds.ps1 -Domain AD -AccountName rbksql -AccountType ServiceAccount -Password (Read-Host -AsSecureString -Prompt "Enter SA password") -SaJsonPath C:\Rubrik\scripts\sa.json -OutputXmlPath C:\Rubrik\scripts\sa-real.xml
.EXAMPLE
# Using regular service account with plain text password
.\createSAcreds.ps1 -Domain AD -AccountName rbksql -AccountType ServiceAccount -Password "MyPassword123" -SaJsonPath C:\Rubrik\scripts\sa.json -OutputXmlPath C:\Rubrik\scripts\sa-real.xml
#>
param(
[string]$Domain,
[Parameter(Mandatory=$true)][string]$AccountName,
[ValidateSet('gMSA', 'ServiceAccount')][string]$AccountType = 'gMSA',
[object]$Password,
[Parameter(Mandatory=$true)][string]$SaJsonPath,
[Parameter(Mandatory=$true)][string]$OutputXmlPath,
[string]$TaskName = "CreateRubrikSAFile-$((Get-Date).ToString('yyyyMMdd-HHmmss'))",
[switch]$KeepArtifacts
)
try {
# ---- Parameter validation ----
if ($AccountType -eq 'ServiceAccount' -and -not $Password) {
# Prompt for password if not provided for service accounts
Write-Host "Password required for service account '$AccountName'"
$Password = Read-Host -AsSecureString -Prompt "Enter password for $AccountName"
}
if ($AccountType -eq 'gMSA' -and $Password) {
Write-Warning "Password parameter ignored for gMSA accounts"
}
# ---- Basic validation ----
if (-not (Test-Path -Path $SaJsonPath)) { throw "SA JSON not found at: $SaJsonPath" }
$tempDir = Join-Path -Path $env:TEMP -ChildPath "CreateRubrikSAFile_$([guid]::NewGuid().ToString().Substring(0,8))"
New-Item -Path $tempDir -ItemType Directory -Force | Out-Null
$oneShotScript = Join-Path $tempDir "Create-SA-File.ps1"
$logFile = Join-Path $tempDir "Create-SA-File.log"
# ---- Create the one-shot script that will run under the service account ----
$escapedSaJsonPath = $SaJsonPath -replace '\\', '\\'
$escapedOutputXmlPath = $OutputXmlPath -replace '\\', '\\'
$escapedLogFile = $logFile -replace '\\', '\\'
$oneShotContent = @"
# One-shot script created by create-and-run-one-shot-via-gMSA.ps1
# Runs RubrikSecurityCloud command to create service-account file
# Start transcript for detailed logging
Start-Transcript -Path "$escapedLogFile" -Append
Write-Output "Script started at: `$(Get-Date)"
Write-Output "Running as user: `$([System.Security.Principal.WindowsIdentity]::GetCurrent().Name)"
Write-Output "PowerShell version: `$(`$PSVersionTable.PSVersion)"
Try {
Write-Output "Attempting to import RubrikSecurityCloud module..."
Import-Module RubrikSecurityCloud -ErrorAction Stop
Write-Output "Successfully imported RubrikSecurityCloud module"
} Catch {
Write-Error "Failed to import RubrikSecurityCloud module: `$(`$_.Exception.Message)"
Write-Error "Full exception: `$(`$_.Exception | Format-List * | Out-String)"
Stop-Transcript
Exit 2
}
Try {
Write-Output "Checking input file: $escapedSaJsonPath"
# Ensure the input file exists
if (-not (Test-Path -Path "$escapedSaJsonPath")) {
Write-Error "Input SA JSON not found: $escapedSaJsonPath"
Stop-Transcript
Exit 3
}
Write-Output "Input file found, size: `$((Get-Item "$escapedSaJsonPath").Length) bytes"
Write-Output "Calling Set-RscServiceAccountFile..."
Write-Output " Input: $escapedSaJsonPath"
Write-Output " Output: $escapedOutputXmlPath"
Set-RscServiceAccountFile "$escapedSaJsonPath" -OutputFilePath "$escapedOutputXmlPath" -Verbose
Write-Output "Set-RscServiceAccountFile completed"
if (Test-Path -Path "$escapedOutputXmlPath") {
Write-Output "Service account XML created successfully: $escapedOutputXmlPath"
Write-Output "Output file size: `$((Get-Item "$escapedOutputXmlPath").Length) bytes"
Stop-Transcript
Exit 0
} else {
Write-Error "Set-RscServiceAccountFile completed but output file not found: $escapedOutputXmlPath"
Write-Error "Checking parent directory: `$(Split-Path "$escapedOutputXmlPath")"
if (Test-Path (Split-Path "$escapedOutputXmlPath")) {
Write-Output "Parent directory exists, listing contents:"
Get-ChildItem (Split-Path "$escapedOutputXmlPath") | ForEach-Object { Write-Output " `$(`$_.Name)" }
} else {
Write-Error "Parent directory does not exist: `$(Split-Path "$escapedOutputXmlPath")"
}
Stop-Transcript
Exit 4
}
} Catch {
Write-Error "Error creating RBK service-account file: `$(`$_.Exception.Message)"
Write-Error "Full exception: `$(`$_.Exception | Format-List * | Out-String)"
Write-Error "Stack trace: `$(`$_.ScriptStackTrace)"
Stop-Transcript
Exit 5
}
"@
Set-Content -Path $oneShotScript -Value $oneShotContent -Encoding UTF8
# Make sure executable by scheduled task
try {
icacls $oneShotScript /grant "BUILTIN\Administrators:(R,W)" | Out-Null
} catch {
Write-Warning "Could not set permissions on script file: $($_.Exception.Message)"
}
# ---- Build Scheduled Task objects ----
# Construct the UserId based on account type
if ($AccountType -eq 'gMSA') {
if ([string]::IsNullOrWhiteSpace($Domain)) {
$userId = "$AccountName`$"
} else {
$userId = "$Domain\$AccountName`$"
}
$logonType = 'Password' # For gMSA, use Password logon type
} else {
if ([string]::IsNullOrWhiteSpace($Domain)) {
$userId = $AccountName
} else {
$userId = "$Domain\$AccountName"
}
$logonType = 'Password' # For regular service accounts, use Password logon type
}
# Action: run PowerShell to execute the one-shot script with output redirection
$psExePath = Join-Path $env:WINDIR 'System32\WindowsPowerShell\v1.0\powershell.exe'
$psArgs = "-NoProfile -NonInteractive -ExecutionPolicy Bypass -File `"$oneShotScript`""
$action = New-ScheduledTaskAction -Execute $psExePath -Argument $psArgs
# Trigger: once, a short time in the future (1 minute from now)
$startTime = (Get-Date).AddMinutes(1)
$trigger = New-ScheduledTaskTrigger -Once -At $startTime
# Principal: service account or gMSA
$principal = New-ScheduledTaskPrincipal -UserId $userId -LogonType $logonType -RunLevel Highest
# Settings: one-shot, don't persist run as logged on user UI
$settings = New-ScheduledTaskSettingsSet -AllowStartIfOnBatteries -DontStopIfGoingOnBatteries -StartWhenAvailable -ExecutionTimeLimit (New-TimeSpan -Hours 1)
$task = New-ScheduledTask -Action $action -Principal $principal -Trigger $trigger -Settings $settings
# ---- Register the scheduled task ----
if ($AccountType -eq 'gMSA') {
# For gMSA, register without password (AD will handle authentication)
Register-ScheduledTask -TaskName $TaskName -InputObject $task -Force
Write-Host "Registered scheduled task '$TaskName' to run as gMSA $userId at $startTime."
} else {
# For regular service accounts, register with password
if ($Password -is [SecureString]) {
$securePassword = $Password
} else {
$securePassword = ConvertTo-SecureString $Password -AsPlainText -Force
}
$plainPassword = [Runtime.InteropServices.Marshal]::PtrToStringAuto([Runtime.InteropServices.Marshal]::SecureStringToBSTR($securePassword))
Register-ScheduledTask -TaskName $TaskName -InputObject $task -User $userId -Password $plainPassword -Force
Write-Host "Registered scheduled task '$TaskName' to run as service account $userId at $startTime."
}
# Optionally start immediately (Start-ScheduledTask will ignore trigger time and try to run it)
Start-ScheduledTask -TaskName $TaskName
Write-Host "Started task. Waiting for completion..."
# ---- Wait for completion and inspect result ----
$maxWaitSeconds = 600
$pollInterval = 3
$elapsed = 0
$lastResult = $null
$taskCompleted = $false
while ($true) {
Start-Sleep -Seconds $pollInterval
$elapsed += $pollInterval
try {
$info = Get-ScheduledTaskInfo -TaskName $TaskName -ErrorAction SilentlyContinue
if ($null -eq $info) {
Write-Warning "Could not query task info yet."
} else {
# LastTaskResult returns Win32 error code; 0 = success
$lastResult = $info.LastTaskResult
$state = $info.State
Write-Host "Task state: '$state'; LastResult: $lastResult"
# Task is complete if:
# 1. State is Ready/Disabled/Unknown AND we have a valid LastResult
# 2. OR if LastResult changed from 267009 (SCHED_S_TASK_RUNNING) to something else
if (($state -eq 'Ready' -or $state -eq 'Disabled' -or $state -eq 'Unknown' -or [string]::IsNullOrEmpty($state)) -and
($null -ne $lastResult -and $lastResult -ne 267009)) {
$taskCompleted = $true
break
}
if ($state -eq 'Running') {
Write-Host "Task still running..."
}
}
} catch {
Write-Warning "Error querying task status: $($_.Exception.Message)"
}
if ($elapsed -ge $maxWaitSeconds) {
throw "Timed out waiting for scheduled task to finish (waited $maxWaitSeconds seconds)."
}
}
Write-Host "Task completed after $elapsed seconds."
# ---- Check exit status and output ----
if ($lastResult -eq 0) {
Write-Host "Task finished successfully (LastTaskResult=0)."
if (Test-Path -Path $OutputXmlPath) {
Write-Host "Found output XML: $OutputXmlPath"
} else {
Write-Warning "Task indicated success but output file not found at $OutputXmlPath"
}
} else {
Write-Host "Scheduled task finished with non-zero LastTaskResult: $lastResult"
# Display log file contents for troubleshooting
if (Test-Path -Path $logFile) {
Write-Host "`n--- Log file contents ($logFile) ---"
Get-Content -Path $logFile | ForEach-Object { Write-Host $_ }
Write-Host "--- End of log file ---`n"
} else {
Write-Warning "Log file not found at: $logFile"
}
throw "Scheduled task finished with non-zero LastTaskResult: $lastResult. Check Event Viewer > Applications and Services Logs > Microsoft > Windows > TaskScheduler for details, or review the log output above."
}
# ---- Cleanup ----
if (-not $KeepArtifacts) {
Write-Host "Cleaning up task and temporary files..."
try { Unregister-ScheduledTask -TaskName $TaskName -Confirm:$false -ErrorAction SilentlyContinue } catch {}
try { Remove-Item -Path $tempDir -Recurse -Force -ErrorAction SilentlyContinue } catch {}
Write-Host "Cleanup complete."
} else {
Write-Host "Kept task '$TaskName' and temporary script at: $oneShotScript"
}
} catch {
Write-Error "ERROR: $($_.Exception.Message)"
if (-not $KeepArtifacts -and $TaskName) {
try { Unregister-ScheduledTask -TaskName $TaskName -Confirm:$false -ErrorAction SilentlyContinue } catch {}
}
if (-not $KeepArtifacts -and $tempDir -and (Test-Path $tempDir)) {
try { Remove-Item -Path $tempDir -Recurse -Force -ErrorAction SilentlyContinue } catch {}
}
throw
}

View File

@@ -1,2 +0,0 @@
@echo off
"C:\Program Files\PowerShell\7\pwsh.exe" -ExecutionPolicy Bypass -File "%~dp0claimInstance.ps1"

2
prescript.cmd Normal file
View File

@@ -0,0 +1,2 @@
@echo off
powershell -ExecutionPolicy Bypass -File "%~dp0setActiveNode.ps1" %*

90
release_v0/backup.txt Normal file
View File

@@ -0,0 +1,90 @@
#
# backup.ps1
#
# TODO: Update cleanup time based on backup type
$sqlInstance = "sqlfcsql\TESTINST"
#$directory = "H:\Backup"
$directory = "C:\Rubrik\mount"
$fullBackupDay = 'Thursday'
$checkCluster = $false
$logFile = "C:\Rubrik\backup.log"
$fullFlag = $directory + "\last_full.flag"
$diffFlag = $directory + "\last_diff.flag"
$today = (Get-Date).Date
function FlagTakenToday($flagPath) {
if (Test-Path $flagPath) {
$flagDate = (Get-Content $flagPath | Out-String).Trim()
return ($flagDate -eq $today.ToString("yyyy-MM-dd"))
}
return $false
}
function Write-Log($message) {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logEntry = "$timestamp $message"
Add-Content -Path $logFile -Value $logEntry
Write-Host $logEntry
}
if ($checkCluster) {
# Check if SQL instance is running locally
$localNode = $env:COMPUTERNAME
$instanceName = $sqlInstance.Split('\')[1]
$clusterInstance = Get-ClusterResource | Where-Object { $_.ResourceType -eq "SQL Server" -and $_.Name -eq "SQL Server ($instanceName)" }
if ($clusterInstance) {
$ownerNode = $clusterInstance.OwnerNode.Name
if ($ownerNode -ne $localNode) {
Write-Log "SQL instance '$sqlInstance' is not running on local node '$localNode'. Exiting script."
exit 1
} else {
Write-Log "SQL instance '$sqlInstance' is running on local node '$localNode'. Proceeding."
}
} else {
Write-Log "ERROR: SQL instance '$sqlInstance' not found in cluster resources."
exit 1
}
} else {
Write-Log "INFO: Cluster check is disabled. Proceeding without verification."
}
if ((Get-Date).DayOfWeek -eq $fullBackupDay) {
if (-not (FlagTakenToday $fullFlag)) {
$backupType = "FULL"
$cleanupTime = 168
Set-Content $fullFlag $today.ToString("yyyy-MM-dd")
Write-Log "Selected FULL backup. Flag updated."
} else {
$backupType = "LOG"
Write-Log "FULL backup already taken today. Selected LOG backup."
}
} else {
if (-not (FlagTakenToday $diffFlag)) {
$backupType = "DIFF"
$cleanupTime = 168
Set-Content $diffFlag $today.ToString("yyyy-MM-dd")
Write-Log "Selected DIFF backup. Flag updated."
} else {
$backupType = "LOG"
$cleanupTime = 24
Write-Log "DIFF backup already taken today. Selected LOG backup."
}
}
$query = "EXECUTE [dbo].[DatabaseBackup] @Databases = 'ALL_DATABASES', @Directory = '$directory', @BackupType = '$backupType', @Verify = 'N', @CleanupTime = $cleanupTime, @CheckSum = 'Y', @LogToTable = 'Y'"
Write-Log "Executing backup type: $backupType"
$sqlcmdOutput = & sqlcmd -S $sqlInstance -Q $query 2>&1
$sqlcmdExitCode = $LASTEXITCODE
if ($sqlcmdExitCode -eq 0) {
foreach ($line in $sqlcmdOutput) {
Write-Log $line
}
Write-Log "$backupType Backup execution completed."
} else {
Write-Log "ERROR: Backup execution failed. Exit code: $sqlcmdExitCode. Output: $sqlcmdOutput"
}

3
release_v0/start.txt Normal file
View File

@@ -0,0 +1,3 @@
:: filepath: c:\Rubrik\Scripts\start.cmd
@echo off
powershell -ExecutionPolicy Bypass -File "%~dp0backup.ps1" %*

742
release_v1/backup.ps1 Normal file
View File

@@ -0,0 +1,742 @@
param(
[Parameter(Mandatory=$true)]
[string]$SqlInstance,
[Parameter(Mandatory=$true)]
[string]$MvName,
[Parameter(Mandatory=$false)]
[int]$Jobs = 1,
[Parameter(Mandatory=$false)]
[int]$LogRetentionDays = 30
,
[Parameter(Mandatory=$false)]
[switch]$Nuke
)
# backup.ps1 - Parallel database backup script using Ola H
#
# Uses Ola H's built-in parallel processing by starting multiple concurrent backup jobs
# Each job will automatically share the database load using DatabasesInParallel=Y if Jobs>1
# TODO: See if there is way to query QueueDatabase during backup to monitor progress
# TODO: Better trapping when RSC connection fails
$fullBackupDay = 'Thursday'
$fullBackupOverdueDays = 7
$SAFile = "C:\Rubrik\scripts\rbksql.xml"
$logDir = "C:\Rubrik\logs"
function Write-Log($message, $jobId = "") {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$jobPrefix = if ($jobId) { "[JOB-$jobId] " } else { "" }
$logEntry = "$timestamp $jobPrefix$message"
# Use mutex for thread-safe logging to main log file
$mutex = $null
try {
$mutex = [System.Threading.Mutex]::new($false, "BackupLogMutex")
if ($mutex.WaitOne(5000)) { # 5 second timeout
Add-Content -Path $logFile -Value $logEntry -Encoding UTF8
} else {
Write-Warning "Could not acquire log mutex, writing to console only"
}
} catch {
Write-Warning "Logging error: $($_.Exception.Message)"
} finally {
if ($mutex) {
$mutex.ReleaseMutex()
$mutex.Dispose()
}
}
Write-Host $logEntry
}
# Parse instance name from SQL instance parameter
$instanceParts = $SqlInstance -split '\\'
if ($instanceParts.Length -eq 2) {
# Format: HOSTNAME\INSTANCENAME or CLUSTERNAME\INSTANCENAME
$instanceName = $instanceParts[1]
# SqlInstance stays as provided (HOSTNAME\INSTANCENAME)
} elseif ($instanceParts.Length -eq 1) {
# Single value provided - could be hostname (default instance) or instance name on local host
$singleValue = $instanceParts[0]
# If it's "MSSQLSERVER" (default instance name), treat as default instance on local host
if ($singleValue -eq "MSSQLSERVER") {
$instanceName = "MSSQLSERVER"
$SqlInstance = $env:COMPUTERNAME # Connect to default instance (no instance name)
} else {
# Assume it's an instance name on the local host
$instanceName = $singleValue
$SqlInstance = "$($env:COMPUTERNAME)\$singleValue"
}
} else {
$instanceName = $SqlInstance.Replace('\\', '_').Replace('/', '_')
}
if ([string]::IsNullOrEmpty($instanceName)) {
Write-Host "ERROR: Could not determine instance name from SqlInstance: '$SqlInstance'"
exit 1
}
# Sanitize and trim the instance name for safe filenames
$instanceName = $instanceName.Trim()
$invalidChars = [IO.Path]::GetInvalidFileNameChars()
foreach ($c in $invalidChars) {
$escaped = [regex]::Escape($c)
$instanceName = $instanceName -replace $escaped, '_'
}
$timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
# Ensure log directory exists before building/using log file
if (-not (Test-Path $logDir)) {
try {
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
} catch {
Write-Host "ERROR: Could not create log directory $logDir : $($_.Exception.Message)"
exit 1
}
}
$logFileName = "backup_{0}_{1}.log" -f $instanceName, $timestamp
$logFile = Join-Path $logDir $logFileName
Write-Log "DEBUG: SqlInstance='$SqlInstance', instanceName='$instanceName', logFile='$logFile'"
# Function to clean up old log files
function Remove-OldLogs {
param([int]$retentionDays)
if (-not (Test-Path $logDir)) {
try {
New-Item -ItemType Directory -Path $logDir -Force | Out-Null
Write-Log "INFO: Created log directory: $logDir"
} catch {
Write-Log "ERROR: Failed to create log directory $logDir. $($_.Exception.Message)"
return
}
}
$cutoffDate = (Get-Date).AddDays(-$retentionDays)
Write-Log "INFO: Cleaning up log files older than $retentionDays days (before $($cutoffDate.ToString('yyyy-MM-dd')))"
$oldLogs = Get-ChildItem -Path $logDir -Filter "*.log" | Where-Object { $_.LastWriteTime -lt $cutoffDate }
$deletedCount = 0
foreach ($logFile in $oldLogs) {
try {
Remove-Item $logFile.FullName -Force
$deletedCount++
} catch {
Write-Log "WARNING: Failed to delete old log file $($logFile.Name): $($_.Exception.Message)"
}
}
Write-Log "INFO: Cleaned up $deletedCount old log files"
}
# Clean up old logs before starting
Remove-OldLogs -retentionDays $LogRetentionDays
# Import SQL Server PowerShell module
try {
if (Get-Module -ListAvailable -Name SqlServer) {
Import-Module SqlServer -ErrorAction Stop
Write-Log "INFO: SqlServer PowerShell module loaded successfully."
}
elseif (Get-Module -ListAvailable -Name SQLPS) {
Import-Module SQLPS -ErrorAction Stop
Write-Log "INFO: SQLPS PowerShell module loaded successfully."
}
else {
throw "No SQL Server PowerShell module found"
}
if (-not (Get-Command Invoke-Sqlcmd -ErrorAction SilentlyContinue)) {
throw "Invoke-Sqlcmd command not available"
}
}
catch {
Write-Log "ERROR: Failed to import SQL Server PowerShell module. Please install it using: Install-Module -Name SqlServer -AllowClobber"
Write-Log "ERROR: $($_.Exception.Message)"
exit 1
}
# Import Rubrik Security Cloud module
try {
Import-Module RubrikSecurityCloud -ErrorAction Stop
Write-Log "INFO: RubrikSecurityCloud module loaded successfully."
} catch {
Write-Log "ERROR: Failed to import RubrikSecurityCloud module. $($_.Exception.Message)"
exit 1
}
$localNode = $env:COMPUTERNAME
$clusterInstance = Get-ClusterResource | Where-Object { $_.ResourceType -eq "SQL Server" -and $_.Name -eq "SQL Server ($instanceName)" }
if ($clusterInstance) {
$ownerNode = $clusterInstance.OwnerNode
if ($ownerNode -ne $localNode) {
Write-Log "SQL instance '$SqlInstance' is not running on local node '$localNode'. Updating the MV."
Connect-Rsc -ServiceAccountFile $SAFile
Write-Log "Connected to Rubrik Security Cloud."
$newHost = Get-RscHost -Name $ownerNode -OsType WINDOWS
$query = New-RscQuery -GqlQuery slaManagedVolumes -AddField Nodes.HostDetail, Nodes.SmbShare, Nodes.ClientConfig, Nodes.ClientConfig.BackupScript, Nodes.ClientConfig.PreBackupScript
$query.var.filter = @(Get-RscType -Name Filter)
$query.var.filter[0].field = "NAME_EXACT_MATCH"
$query.var.filter[0].Texts = $mvName
$mvResult = $query.Invoke()
if (-not $mvResult.nodes -or $mvResult.nodes.Count -eq 0) {
Write-Log "ERROR: Managed Volume '$mvName' not found. This may be due to insufficient permissions or the volume not existing."
Disconnect-Rsc
exit 1
}
$mvDetail = $mvResult.nodes[0]
Write-Log "Found Managed Volume: $($mvDetail.Name) (ID: $($mvDetail.Id), Status: $($mvDetail.hostDetail.Status), HostDetail Name: $($mvDetail.hostDetail.Name))"
$query = New-RscMutation -GqlMutation updateManagedVolume
$query.Var.input = Get-RscType -Name UpdateManagedVolumeInput
$query.Var.input.update = Get-RscType -Name ManagedVolumeUpdateInput
$query.Var.input.update.config = Get-RscType -Name ManagedVolumePatchConfigInput
$query.Var.input.update.slaClientConfig = Get-RscType -Name ManagedVolumePatchSlaClientConfigInput
$query.Var.input.Id = $mvDetail.Id
$query.Var.input.update.Name = $mvName
$query.Var.input.update.config.SmbDomainName = $mvDetail.SmbShare.DomainName
$query.Var.input.update.config.SmbValidIps = $newHost.Name
$query.Var.input.update.config.SmbValidUsers = $mvDetail.SmbShare.ValidUsers + $mvDetail.SmbShare.ActiveDirectoryGroups
$query.Var.input.update.slaClientConfig.clientHostId = $newHost.Id
$query.Var.input.update.slaClientConfig.channelHostMountPaths = $mvDetail.ClientConfig.ChannelHostMountPaths
$query.Var.input.update.slaClientConfig.backupScriptCommand = $mvDetail.ClientConfig.BackupScript.ScriptCommand
# Only set pre-backup script fields if a pre-backup script was configured
if ($mvDetail.ClientConfig.PreBackupScript.ScriptCommand) {
$query.Var.input.update.slaClientConfig.preBackupScriptCommand = $mvDetail.ClientConfig.PreBackupScript.ScriptCommand
$query.Var.input.update.slaClientConfig.preBackupScriptTimeout = $mvDetail.ClientConfig.PreBackupScript.Timeout
$query.Var.input.update.slaClientConfig.shouldCancelBackupOnPreBackupScriptFailure = $mvDetail.ClientConfig.ShouldCancelBackupOnPreBackupScriptFailure
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $false
} else {
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $true
}
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupFailure = $true
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupSuccess = $true
$query.gqlRequest().Variables
if (-not $dryrun) {
$query.Invoke()
} else {
Write-Log "Dry run mode: Managed Volume update not invoked."
}
# Now must exit 1 to stop the backup continuing on the wrong node
Disconnect-Rsc
exit 1
} else {
Write-Log "SQL instance '$SqlInstance' is running on local node '$localNode'. No action needed."
}
} else {
Write-Log "ERROR: SQL instance '$SqlInstance' not found in cluster resources. Continuing assuming standalone instance."
}
# Connect to Rubrik and retrieve managed volume paths
try {
Connect-Rsc -ServiceAccountFile $SAFile
Write-Log "INFO: Connected to Rubrik Security Cloud."
$query = New-RscQuery -GqlQuery slaManagedVolumes -AddField Nodes.HostDetail, Nodes.SmbShare, Nodes.ClientConfig, Nodes.ClientConfig.BackupScript, Nodes.ClientConfig.PreBackupScript
$query.var.filter = @(Get-RscType -Name Filter)
$query.var.filter[0].field = "NAME_EXACT_MATCH"
$query.var.filter[0].Texts = $MvName
$mvDetail = $query.Invoke()
if (-not $mvDetail.nodes -or $mvDetail.nodes.Count -eq 0) {
Write-Log "ERROR: Managed Volume '$MvName' not found. This may be due to insufficient permissions or the volume not existing."
exit 1
}
$paths = $mvDetail.nodes[0].ClientConfig.ChannelHostMountPaths
Write-Log "INFO: Retrieved paths: $($paths -join ', ')"
} catch {
Write-Log "ERROR: Failed to retrieve paths from Rubrik. $($_.Exception.Message)"
exit 1
}
# If -Nuke is set, delete the contents of each retrieved path (but keep the folder itself).
if ($Nuke) {
Write-Log "INFO: -nuke flag set. Beginning recursive deletion of contents for retrieved paths."
foreach ($p in $paths) {
if (-not $p) { continue }
$pathToCheck = $p.Trim()
# Determine root to avoid deleting drive root like C:\
try { $root = [IO.Path]::GetPathRoot($pathToCheck) } catch { $root = $null }
if ([string]::IsNullOrEmpty($pathToCheck)) {
Write-Log "WARNING: Skipping empty path entry"
continue
}
if ($root -and ($pathToCheck.TrimEnd('\') -eq $root.TrimEnd('\'))) {
Write-Log "ERROR: Refusing to nuke root path '$pathToCheck'. Skipping."
continue
}
if (-not (Test-Path -LiteralPath $pathToCheck)) {
Write-Log "WARNING: Path '$pathToCheck' does not exist. Skipping."
continue
}
Write-Log "INFO: NUKING contents of '$pathToCheck' (deleting all files & subfolders inside)."
try {
# Enumerate children and delete each item so the folder itself remains
Get-ChildItem -LiteralPath $pathToCheck -Force -ErrorAction SilentlyContinue | ForEach-Object {
try {
Remove-Item -LiteralPath $_.FullName -Recurse -Force -ErrorAction Stop
Write-Log "INFO: Deleted: $($_.FullName)"
} catch {
Write-Log "WARNING: Failed to delete $($_.FullName): $($_.Exception.Message)"
}
}
} catch {
Write-Log "ERROR: Failed to enumerate or delete contents of '$pathToCheck': $($_.Exception.Message)"
}
}
Write-Log "INFO: -nuke operation complete. Continuing with backup flow."
}
$directoryParam = $paths -join ', '
# Validate job count
if ($Jobs -lt 1 -or $Jobs -gt 4) {
Write-Log "ERROR: Jobs parameter must be between 1 and 4. Provided: $Jobs"
exit 1
}
Write-Log "INFO: Starting $Jobs parallel backup jobs"
$today = (Get-Date).Date
function Get-BackupType($directoryParam) {
# Support multiple candidate directories. Scan them in deterministic order for existing flags.
$dirs = @()
if ($directoryParam) {
$dirs = $directoryParam -split ',' | ForEach-Object { $_.Trim() } | Where-Object { $_ -ne '' }
}
# Build lists of found flags (in candidate order)
$foundFull = @()
$foundDiff = @()
foreach ($d in $dirs) {
$full = Join-Path $d "last_full.flag"
$diff = Join-Path $d "last_diff.flag"
if (Test-Path $full) { $foundFull += $full }
if (Test-Path $diff) { $foundDiff += $diff }
}
# Determine if full backup is overdue using the first-found full flag (if any)
$isFullBackupOverdue = $false
if ($foundFull.Count -gt 0) {
$fullFlag = $foundFull[0]
try {
$lastFullDate = [DateTime]::ParseExact((Get-Content $fullFlag).Trim(), "yyyy-MM-dd", $null)
$daysSinceLastFull = ($today - $lastFullDate).Days
$isFullBackupOverdue = $daysSinceLastFull -gt $fullBackupOverdueDays
Write-Log "INFO: Last full backup was $daysSinceLastFull days ago (from $fullFlag). Overdue threshold: $fullBackupOverdueDays days."
} catch {
$isFullBackupOverdue = $true
Write-Log "WARNING: Could not parse last full backup date in $fullFlag. Treating as overdue."
}
} else {
$isFullBackupOverdue = $true
Write-Log "WARNING: No last full backup date found in any candidate directories. Treating as overdue."
}
# Helper to ensure directory exists
function Ensure-DirExists([string]$path) {
if (-not (Test-Path $path)) {
try { New-Item -ItemType Directory -Path $path -Force | Out-Null } catch { }
}
}
# Determine preferred write location: prefer existing related flag location, otherwise first candidate dir
$firstDir = $dirs[0]
# If it's a full backup day or overdue, plan for full backup
if ((Get-Date).DayOfWeek -eq $fullBackupDay -or $isFullBackupOverdue) {
# If a full flag exists, use its location; else use firstDir
$targetFullFlag = if ($foundFull.Count -gt 0) { $foundFull[0] } else { Join-Path $firstDir "last_full.flag" }
$targetDir = Split-Path $targetFullFlag -Parent
Ensure-DirExists $targetDir
$currentValue = $null
if (Test-Path $targetFullFlag) {
try { $currentValue = (Get-Content $targetFullFlag).Trim() } catch { $currentValue = $null }
}
if (-not $currentValue -or $currentValue -ne $today.ToString("yyyy-MM-dd")) {
try {
Set-Content -Path $targetFullFlag -Value $today.ToString("yyyy-MM-dd") -Encoding UTF8
Write-Log "INFO: Created/Updated full backup flag file: $targetFullFlag"
} catch {
Write-Log "ERROR: Failed to create/update full backup flag file: $targetFullFlag. $($_.Exception.Message)"
}
$reason = if ($isFullBackupOverdue) { "overdue" } else { "scheduled" }
return @{ Type = "FULL"; CleanupTime = 168; Reason = $reason }
} else {
return @{ Type = "LOG"; CleanupTime = 24; Reason = "full already taken today" }
}
}
# Otherwise, plan for differential
# Prefer an existing diff flag location if present; else prefer the existing full flag location (write diff alongside full); otherwise firstDir
if ($foundDiff.Count -gt 0) {
$targetDiffFlag = $foundDiff[0]
} elseif ($foundFull.Count -gt 0) {
$targetDiffFlag = Join-Path (Split-Path $foundFull[0] -Parent) "last_diff.flag"
} else {
$targetDiffFlag = Join-Path $firstDir "last_diff.flag"
}
$targetDir = Split-Path $targetDiffFlag -Parent
Ensure-DirExists $targetDir
$currentDiffValue = $null
if (Test-Path $targetDiffFlag) {
try { $currentDiffValue = (Get-Content $targetDiffFlag).Trim() } catch { $currentDiffValue = $null }
}
if (-not $currentDiffValue -or $currentDiffValue -ne $today.ToString("yyyy-MM-dd")) {
try {
Set-Content -Path $targetDiffFlag -Value $today.ToString("yyyy-MM-dd") -Encoding UTF8
Write-Log "INFO: Created/Updated diff backup flag file: $targetDiffFlag"
} catch {
Write-Log "ERROR: Failed to create/update diff backup flag file: $targetDiffFlag. $($_.Exception.Message)"
}
return @{ Type = "DIFF"; CleanupTime = 168; Reason = "differential scheduled" }
} else {
return @{ Type = "LOG"; CleanupTime = 24; Reason = "diff already taken today" }
}
}
# Determine backup type
$backupInfo = Get-BackupType $directoryParam
Write-Log "Selected $($backupInfo.Type) backup ($($backupInfo.Reason))"
# Build the Ola H query. Include DatabasesInParallel only when multiple jobs are used
# Build parameter lines so we can avoid leaving a trailing comma when omitting DatabasesInParallel
$paramLines = @(
"@Databases = 'ALL_DATABASES'",
"@Directory = '$directoryParam'",
"@BackupType = '$($backupInfo.Type)'",
"@Verify = 'N'",
"@CleanupTime = $($backupInfo.CleanupTime)",
"@CheckSum = 'Y'",
"@LogToTable = 'Y'"
)
# Only enable DatabasesInParallel when we run more than one job
if ($Jobs -gt 1) {
$paramLines += "@DatabasesInParallel = 'Y'"
}
# Join with commas and indentation to produce clean SQL parameter list
$params = $paramLines -join ",`n "
$query = "EXECUTE [dbo].[DatabaseBackup] `n $params"
Write-Log "SQL Query: $query"
# Function to execute backup job with message capture
function Start-BackupJob {
param(
[int]$jobId,
[string]$sqlInstance,
[string]$query,
[string]$baseLogFile
)
$scriptBlock = {
param($JobId, $SqlInstance, $Query, $BaseLogFile)
# Debug the base log file parameter
Write-Output "DEBUG: BaseLogFile parameter = '$BaseLogFile'"
# Create job-specific log file path with fallback
if ($BaseLogFile -and $BaseLogFile.Trim() -ne "") {
$jobLogFile = $BaseLogFile -replace '\.log$', "-job$JobId.log"
} else {
# Fallback log file path using logDir
$jobLogFile = Join-Path $using:logDir "backup-multi-job$JobId.log"
}
Write-Output "DEBUG: Job log file will be: '$jobLogFile'"
function Write-JobLog($message, $suppressConsole = $false) {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logEntry = "$timestamp [JOB-$JobId] $message"
if ($jobLogFile -and $jobLogFile.Trim() -ne "") {
try {
Add-Content -Path $jobLogFile -Value $logEntry -Encoding UTF8
# Output to console for debugging (unless suppressed)
if (-not $suppressConsole) {
Write-Output "LOGGED TO $jobLogFile : $logEntry"
}
} catch {
Write-Output "LOG ERROR: $($_.Exception.Message) - File: $jobLogFile"
}
} else {
Write-Output "NO LOG FILE: jobLogFile is empty or null"
}
# Always output to console for job monitoring (unless suppressed)
if (-not $suppressConsole) {
Write-Output $logEntry
}
}
try {
Write-JobLog "Starting backup job"
# Create SQL connection with message capture
$connection = New-Object System.Data.SqlClient.SqlConnection
$connection.ConnectionString = "Server=$SqlInstance;Integrated Security=true;Connection Timeout=30"
$infoMessages = @()
# Event handler for informational messages (PRINT statements)
$connection.add_InfoMessage({
param($sqlSender, $e)
$message = $e.Message
if ($message -and $message.Trim() -ne "") {
$script:infoMessages += $message
Write-JobLog "SQL INFO: $message" $true # Suppress console output for verbose messages
}
})
try {
Write-JobLog "Attempting to connect to SQL Server: $SqlInstance"
$connection.Open()
Write-JobLog "Connected to SQL Server successfully"
$command = New-Object System.Data.SqlClient.SqlCommand
$command.Connection = $connection
$command.CommandText = $Query
$command.CommandTimeout = 0 # No timeout for backup operations
Write-JobLog "Executing backup command..."
# Execute and capture any result sets
$reader = $command.ExecuteReader()
# Process any result sets
while ($reader.Read()) {
$rowData = @()
for ($i = 0; $i -lt $reader.FieldCount; $i++) {
$rowData += "$($reader.GetName($i)): $($reader.GetValue($i))"
}
if ($rowData.Count -gt 0) {
$resultLine = "SQL RESULT: $($rowData -join ', ')"
Write-JobLog $resultLine
Write-Output $resultLine # Also output for Receive-Job
}
}
$reader.Close()
$summaryMessage = "Backup completed successfully. Captured $($infoMessages.Count) messages."
Write-JobLog $summaryMessage
Write-Output $summaryMessage # Also output for Receive-Job
# Output all captured SQL messages for debugging (only to log file, not console)
Write-JobLog "=== SQL MESSAGES START ===" $true
foreach ($msg in $infoMessages) {
Write-JobLog "SQL: $msg" $true
}
Write-JobLog "=== SQL MESSAGES END ===" $true
# Don't return hashtable - just output success message
Write-Output "JOB-${JobId}: SUCCESS"
}
finally {
if ($connection.State -eq [System.Data.ConnectionState]::Open) {
$connection.Close()
}
$connection.Dispose()
}
}
catch {
$errorMessage = "ERROR: Backup failed - $($_.Exception.Message)"
Write-JobLog $errorMessage
Write-Output $errorMessage # Also output for Receive-Job
# Check for specific connection errors
if ($_.Exception.Message -like "*server*not found*" -or
$_.Exception.Message -like "*network-related*" -or
$_.Exception.Message -like "*instance*" -or
$_.Exception.Message -like "*login*failed*") {
$connError = "ERROR: CONNECTION FAILURE - Check SQL Server instance name and connectivity"
Write-JobLog $connError
Write-Output $connError
}
# Log SQL Server specific errors
if ($_.Exception -is [System.Data.SqlClient.SqlException]) {
Write-JobLog "ERROR: SQL Server Error Details:"
Write-Output "ERROR: SQL Server Error Details:"
foreach ($sqlError in $_.Exception.Errors) {
$errorDetail = "ERROR: Severity: $($sqlError.Class), State: $($sqlError.State), Number: $($sqlError.Number)"
Write-JobLog $errorDetail
Write-Output $errorDetail
$errorMsg = "ERROR: Message: $($sqlError.Message)"
Write-JobLog $errorMsg
Write-Output $errorMsg
if ($sqlError.Procedure) {
$procError = "ERROR: Procedure: $($sqlError.Procedure), Line: $($sqlError.LineNumber)"
Write-JobLog $procError
Write-Output $procError
}
}
}
# Log full exception details for debugging
$fullError = "ERROR: Full Exception Type: $($_.Exception.GetType().Name)"
Write-JobLog $fullError
Write-Output $fullError
if ($_.Exception.InnerException) {
$innerError = "ERROR: Inner Exception: $($_.Exception.InnerException.Message)"
Write-JobLog $innerError
Write-Output $innerError
}
Write-Output "JOB-${JobId}: FAILED"
}
}
return Start-Job -ScriptBlock $scriptBlock -ArgumentList $jobId, $sqlInstance, $query, $baseLogFile
}
# Start parallel backup jobs
Write-Log "Starting $Jobs parallel backup jobs"
[System.Collections.ArrayList]$jobList = @()
for ($i = 1; $i -le $Jobs; $i++) {
$job = Start-BackupJob -jobId $i -sqlInstance $SqlInstance -query $query -baseLogFile $logFile
$null = $jobList.Add($job)
Write-Log "Started backup job $i (Job ID: $($job.Id))"
Start-Sleep -Milliseconds 4000 # Delay to stagger job starts
}
# Monitor jobs and capture output
Write-Log "Monitoring $($jobList.Count) backup jobs..."
$allJobsCompleted = $false
[System.Collections.ArrayList]$completedJobs = @()
while (-not $allJobsCompleted) {
Start-Sleep -Seconds 5
foreach ($job in $jobList) {
if ($job.Id -notin $completedJobs) {
# Check if job is no longer running
if ($job.State -eq "Completed" -or $job.State -eq "Failed" -or $job.State -eq "Stopped") {
$null = $completedJobs.Add($job.Id)
# Get all job output
$jobOutput = Receive-Job -Job $job -Keep # Use -Keep to preserve output
if ($job.State -eq "Completed") {
Write-Log "Job $($job.Id) completed successfully"
# Log all job output to main log
if ($jobOutput) {
Write-Log "=== Job $($job.Id) Output ==="
foreach ($line in $jobOutput) {
Write-Log "$line"
}
Write-Log "=== End Job $($job.Id) Output ==="
}
} else {
Write-Log "ERROR: Job $($job.Id) failed with state: $($job.State)"
if ($jobOutput) {
Write-Log "=== Job $($job.Id) Error Output ==="
foreach ($line in $jobOutput) {
Write-Log "ERROR: $line"
}
Write-Log "=== End Job $($job.Id) Error Output ==="
}
}
}
}
}
$allJobsCompleted = $completedJobs.Count -eq $jobList.Count
# Progress update
$runningCount = ($jobList | Where-Object { $_.State -eq "Running" }).Count
if ($runningCount -gt 0) {
Write-Log "Progress: $($completedJobs.Count)/$($jobList.Count) jobs completed, $runningCount still running..."
}
}
Write-Log "All backup jobs completed"
# Collect job states and outputs before cleanup for final status check
$jobResults = @{}
foreach ($job in $jobList) {
$jobOutput = Receive-Job -Job $job -Keep -ErrorAction SilentlyContinue
$hasFailed = $false
# Check if job output contains failure indicator
if ($jobOutput) {
foreach ($line in $jobOutput) {
if ($line -like "*JOB-*: FAILED") {
$hasFailed = $true
break
}
}
}
$jobResults[$job.Id] = @{
State = $job.State
Failed = $hasFailed
}
}
# Clean up jobs
Write-Log "Cleaning up completed jobs..."
foreach ($job in $jobList) {
try {
if ($job.State -eq "Running") {
Write-Log "WARNING: Job $($job.Id) still running, stopping it..."
Stop-Job -Job $job -Force
Start-Sleep -Seconds 2
}
Remove-Job -Job $job -Force -ErrorAction SilentlyContinue
Write-Log "Cleaned up job $($job.Id)"
} catch {
Write-Log "WARNING: Could not clean up job $($job.Id): $($_.Exception.Message)"
}
}
# Final status check using job output analysis
$failedJobIds = $jobResults.Keys | Where-Object { $jobResults[$_].Failed -eq $true }
if ($failedJobIds.Count -gt 0) {
Write-Log "ERROR: $($failedJobIds.Count) out of $($jobResults.Count) backup jobs failed"
foreach ($jobId in $failedJobIds) {
Write-Log "ERROR: Job ID $jobId failed"
}
Write-Log "CRITICAL: Backup operation failed - check errors above"
exit 1
} else {
Write-Log "SUCCESS: All $($jobResults.Count) backup jobs completed successfully"
}

128
setActiveNode.ps1 Normal file
View File

@@ -0,0 +1,128 @@
##########################################################################
#
# Update an SLA MV to point to the correct host
# Created by Rubrik PS for ZF, September 2025
#
# Must be run with a Global service account.
#
# Requires RubrikSecurityCloud module to be installed and working with
# a Global Service Account with the following rights (TBC)
#
# Create the service account file with:
# Set-RscServiceAccountFile sa.json -OutputFilePath sa-rbksql.xml
#
# Example invocation
# .\setActiveNode.ps1 -SqlInstance "sqlfcsql\TESTINST" -mvName "JP-ZF-SQL"
#
# v0.1 Initial Release
#
##########################################################################
param (
[Parameter(Mandatory=$True,
HelpMessage="Instance to claim")]
[string]$SqlInstance,
[Parameter(Mandatory=$True,
HelpMessage="Managed Volume name")]
[string]$mvName,
[Parameter(Mandatory=$False,
HelpMessage="Do not change the MV")]
[switch]$dryrun
)
# SA File must be an absolute path
$SAFile = "C:\Rubrik\scripts\rbksql.xml"
$logFile = "C:\Rubrik\scripts\setActiveNode.log"
$checkCluster = $true
###########################
# Script begins
###########################
$ErrorActionPreference = 'Stop'
function Write-Log($message) {
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logEntry = "$timestamp $message"
Add-Content -Path $logFile -Value $logEntry
Write-Host $logEntry
}
Import-Module RubrikSecurityCloud
if ($checkCluster) {
# Check if SQL instance is running locally
$localNode = $env:COMPUTERNAME
$instanceName = $SqlInstance.Split('\')[1]
$clusterInstance = Get-ClusterResource | Where-Object { $_.ResourceType -eq "SQL Server" -and $_.Name -eq "SQL Server ($instanceName)" }
if ($clusterInstance) {
$ownerNode = $clusterInstance.OwnerNode
if ($ownerNode -ne $localNode) {
Write-Log "SQL instance '$SqlInstance' is not running on local node '$localNode'. Updating the MV."
Connect-Rsc -ServiceAccountFile $SAFile
Write-Log "Connected to Rubrik Security Cloud."
$newHost = Get-RscHost -Name $ownerNode -OsType WINDOWS
$query = New-RscQuery -GqlQuery slaManagedVolumes -AddField Nodes.HostDetail, Nodes.SmbShare, Nodes.ClientConfig, Nodes.ClientConfig.BackupScript, Nodes.ClientConfig.PreBackupScript
$query.var.filter = @(Get-RscType -Name Filter)
$query.var.filter[0].field = "NAME_EXACT_MATCH"
$query.var.filter[0].Texts = $mvName
$mvDetail = $query.Invoke().nodes[0]
Write-Log "Found Managed Volume: $($mvDetail.Name) (ID: $($mvDetail.Id), Status: $($mvDetail.hostDetail.Status), HostDetail Name: $($mvDetail.hostDetail.Name))"
$query = New-RscMutation -GqlMutation updateManagedVolume
$query.Var.input = Get-RscType -Name UpdateManagedVolumeInput
$query.Var.input.update = Get-RscType -Name ManagedVolumeUpdateInput
$query.Var.input.update.config = Get-RscType -Name ManagedVolumePatchConfigInput
$query.Var.input.update.slaClientConfig = Get-RscType -Name ManagedVolumePatchSlaClientConfigInput
$query.Var.input.Id = $mvDetail.Id
$query.Var.input.update.Name = $mvName
$query.Var.input.update.config.SmbDomainName = $mvDetail.SmbShare.DomainName
$query.Var.input.update.config.SmbValidIps = $newHost.Name
$query.Var.input.update.config.SmbValidUsers = $mvDetail.SmbShare.ValidUsers + $mvDetail.SmbShare.ActiveDirectoryGroups
$query.Var.input.update.slaClientConfig.clientHostId = $newHost.Id
$query.Var.input.update.slaClientConfig.channelHostMountPaths = $mvDetail.ClientConfig.ChannelHostMountPaths
$query.Var.input.update.slaClientConfig.backupScriptCommand = $mvDetail.ClientConfig.BackupScript.ScriptCommand
# Only set pre-backup script fields if a pre-backup script was configured
if ($mvDetail.ClientConfig.PreBackupScript.ScriptCommand) {
$query.Var.input.update.slaClientConfig.preBackupScriptCommand = $mvDetail.ClientConfig.PreBackupScript.ScriptCommand
$query.Var.input.update.slaClientConfig.preBackupScriptTimeout = $mvDetail.ClientConfig.PreBackupScript.Timeout
$query.Var.input.update.slaClientConfig.shouldCancelBackupOnPreBackupScriptFailure = $mvDetail.ClientConfig.ShouldCancelBackupOnPreBackupScriptFailure
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $false
} else {
$query.Var.input.update.slaClientConfig.shouldDisablePreBackupScript = $true
}
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupFailure = $true
$query.Var.input.update.slaClientConfig.shouldDisablePostBackupScriptOnBackupSuccess = $true
$query.gqlRequest().Variables
if (-not $dryrun) {
$result = $query.Invoke()
} else {
Write-Log "Dry run mode: Managed Volume update not invoked."
}
# Now must exit 1 to stop the backup continuing on the wrong node
Disconnect-Rsc
exit 1
} else {
Write-Log "SQL instance '$SqlInstance' is running on local node '$localNode'. No action needed."
}
} else {
Write-Log "ERROR: SQL instance '$SqlInstance' not found in cluster resources."
exit 1
}
} else {
Write-Log "INFO: Cluster check is disabled. Proceeding without verification."
}

View File

@@ -1,3 +1,3 @@
:: filepath: c:\Rubrik\Scripts\start.cmd
@echo off
powershell -ExecutionPolicy Bypass -File "%~dp0backup.ps1"
powershell -ExecutionPolicy Bypass -File "%~dp0backup.ps1" %*