Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save ll4mat/d297a2d1aecfe9e77122fb2733958f99 to your computer and use it in GitHub Desktop.
Save ll4mat/d297a2d1aecfe9e77122fb2733958f99 to your computer and use it in GitHub Desktop.
<#
.SYNOPSIS
This script automates the process of copying files from a source path (share) to a destination path (local fs).
.DESCRIPTION
The script monitors specified folders for new files using dot net filesystemwatcher and copies them to a destination path.
It includes a catch-up mechanism to process files that were created while the script was not running.
The script supports logging and can be run with a switch to copy files without deletion.
.PARAMETER tempDrive
The letter of the temporary drive to be used for mapping the network path.
.PARAMETER sourcePath
The UNC path of the source directory from where files will be copied.
.PARAMETER destinationPath
The local file system path where the files will be copied to.
.PARAMETER logFilePath
The path to the log file where all operations will be logged.
.PARAMETER runLockFilePath
The path to the lock file used to prevent multiple instances of the script from running and also serves as a kind of "emergency stop" when deleted.
.PARAMETER credentialPath
The path to the XML file containing credentials as PSCredential-Object for accessing the network path.
.PARAMETER foldersToMonitor
An array of folder names within the source path that should be monitored for new files.
.PARAMETER copyonly
A switch parameter. If set, files will be copied but not deleted from the source after copying.
.PARAMETER catchup
A switch parameter. If set, the script will perform the catch-up process to copy existing files when it starts.
.EXAMPLE
.\FileCopyScript.ps1 -tempDrive "Z" -sourcePath "\\server\share" -destinationPath "C:\destination" -logFilePath "C:\logs\copy.log" -runLockFilePath "C:\run.lock" -credentialPath "C:\creds.xml" -foldersToMonitor "Folder1", "Folder2" -copyonly -catchup
This example runs the script with specified parameters, copying files from "\\server\share" to "C:\destination" without deleting the source files and performing the catch-up process.
.NOTES
Version: 2.1
Author: SAL
Creation Date: 31.05.2024
Purpose/Change: Added catch-up switch parameter
#>
param (
[ValidateNotNullOrEmpty()]
[string]$tempDrive = "T",
[string]$sourcePath = "\\1.2.3.4\thud\xyzzy",
[string]$destinationPath = "D:\thud\xyzzy",
[string]$logFilePath = "D:\thud\xyzzy\LogFile.csv",
[string]$runLockFilePath = "D:\thud\xyzzy\RunLockFile.lock",
[string]$credentialPath = "C:\foobar\creds.xml",
[string[]]$foldersToMonitor = @('FOO','BAR','BAZ','QUX','QUUX','CORGE'),
[switch]$copyonly,
[switch]$catchup
)
# Determine the culture-specific list separator
$listSeparator = (Get-Culture).TextInfo.ListSeparator
# Initialize an in-memory batch for logs
$script:logBatch = @()
function Write-Log {
param (
[string]$message,
[string]$separator = $listSeparator
)
$logEntry = "$(Get-Date -Format 'MM/dd/yyyy HH:mm:ss')$separator$message"
Add-Content -Path $logFilePath -Value $logEntry -Encoding UTF8
Write-Host "Log Entry Added: $logEntry"
}
function Flush-LogBatch {
if ($script:logBatch.Count -gt 0) {
$script:logBatch | Out-File -FilePath $logFilePath -Append -Encoding UTF8
Write-Host "Log Batch Flushed to File"
} else {
Write-Host "No Log Entries to Flush"
}
$script:logBatch = @()
}
# Create a timer object using the Start-Sleep cmdlet as a workaround
$timer = New-Object timers.timer
$timer.Interval = 60000 # Set the interval to 60 seconds
$timer.AutoReset = $true # Enable auto-reset
$timer.Enabled = $true
# Register the event that handles the timer's elapsed event
Register-ObjectEvent -InputObject $timer -EventName Elapsed -Action $FlushLogBatch
# Define the maximum number of concurrent jobs
$maxConcurrentJobs = 5
# Function to start a new job if under the limit
function Start-JobIfUnderLimit {
param (
[scriptblock]$scriptBlock,
[object[]]$argumentList
)
# Check and remove any completed or failed jobs before starting a new one
Cleanup-Jobs
# Get the current number of running jobs
$runningJobs = Get-Job | Where-Object { $_.State -eq 'Running' }
if ($runningJobs.Count -lt $maxConcurrentJobs) {
try {
# Start a new job
$job = Start-Job -ScriptBlock $scriptBlock -ArgumentList $argumentList
Write-Log "Job $($job.Id) started."
} catch {
Write-Log "Failed to start job: $($_.Exception.Message)"
}
} else {
Write-Log "Maximum job limit reached. Waiting for jobs to complete."
# Wait for any job to complete and then try starting the job again
Wait-Job -Job $runningJobs -Any | Out-Null
Start-Sleep -Seconds 2 # Short pause to allow job cleanup
Start-JobIfUnderLimit -scriptBlock $scriptBlock -argumentList $argumentList
}
}
function Cleanup-Jobs {
# Get all jobs that are completed or failed
$completedOrFailedJobs = Get-Job | Where-Object { $_.State -eq 'Completed' -or $_.State -eq 'Failed' }
# Remove the completed or failed jobs
foreach ($job in $completedOrFailedJobs) {
Remove-Job -Job $job -Force
Write-Log "Removed job $($job.Id) which was in state $($job.State)."
}
}
# Create the run-lock file to indicate the script is running
if (-not (Test-Path -Path $runLockFilePath)) {
New-Item -Path $runLockFilePath -ItemType File
Write-Log "Run-lock file created."
} else {
Write-Log "Run-lock file already exists. Another instance may be running."
exit
}
# Import credentials
$credential = Import-Clixml -Path $credentialPath
# Map the network drive
$networkDrive = $tempDrive
$networkPath = $sourcePath
# Check if the drive is already in use and remove it
if (Test-Path "${networkDrive}:") {
Write-Log "Drive $networkDrive is already in use. Attempting to remove it."
Remove-PSDrive -Name $networkDrive -Force
}
# Now, try mapping the drive again
try {
New-PSDrive -Name $networkDrive -PSProvider FileSystem -Root $networkPath -Persist -Credential $credential
Write-Log "Drive $networkDrive mapped successfully."
} catch {
Write-Log "Failed to map drive $($networkDrive): $_"
exit
}
# Define the source and destination paths
$sourcePath = "${networkDrive}:\"
# Process existing files in the monitored folders if catchup is enabled
if ($catchup) {
foreach ($folder in $foldersToMonitor) {
$folderPath = Join-Path -Path $sourcePath -ChildPath $folder
$filesToProcess = Get-ChildItem -Path $folderPath -File -Recurse
foreach ($file in $filesToProcess) {
# Calculate the relative path
$relativePath = $file.FullName.Substring($sourcePath.Length)
# Construct the destination file path using the relative path
$destinationFile = Join-Path -Path $destinationPath -ChildPath $relativePath
# Ensure the destination directory exists
$destinationDir = Split-Path -Path $destinationFile -Parent
if (-not (Test-Path -Path $destinationDir)) {
New-Item -ItemType Directory -Path $destinationDir -Force
}
# Copy (and delete if necessary) each file
Copy-Item -Path $file.FullName -Destination $destinationFile -Force
Write-Log "Catch-up - Processed file: $($file.Name)"
if (-not $copyonly) {
Remove-Item -Path $file.FullName -Force
Write-Log "Catch-up - Removed source file: $($file.Name)"
}
}
}
}
# Set up the file watcher
$watcher = New-Object System.IO.FileSystemWatcher
$watcher.Path = $sourcePath
$watcher.Filter = "*.*"
$watcher.IncludeSubdirectories = $false # Disable subdirectories initially
# Register the event for each folder
foreach ($folder in $foldersToMonitor) {
$watcher.Path = Join-Path -Path $sourcePath -ChildPath $folder
Register-ObjectEvent -InputObject $watcher -EventName Created -Action {
$eventArgs = $Event.SourceEventArgs
Start-JobIfUnderLimit -scriptBlock {
param($eventArgs, $sourcePath, $destinationPath, $copyonly, $listSeparator, $networkDrive, $logFilePath)
# Redefine Write-Log function within the job
function Write-Log {
param (
[string]$message,
[string]$separator = $listSeparator
)
$logEntry = "$(Get-Date -Format 'MM/dd/yyyy HH:mm:ss')$separator$message"
Add-Content -Path $logFilePath -Value $logEntry -Encoding UTF8
Write-Host "Log Entry Added: $logEntry"
}
$path = $eventArgs.FullPath
$name = $eventArgs.Name
Write-Log "Job script block execution started for file: $name"
$destinationFile = $path -replace ("^${networkDrive}:", $destinationPath)
Write-Log "Source Path: $path"
Write-Log "Destination File: $destinationFile"
try {
Write-Log "Preparing to copy file: $name"
$destinationDir = Split-Path -Path $destinationFile -Parent
if (-not (Test-Path -Path $destinationDir)) {
New-Item -ItemType Directory -Path $destinationDir -Force
Write-Log "Destination directory $destinationDir created."
}
Start-Sleep -Seconds 2
if (-not (Test-Path $destinationPath)) {
Write-Log "Destination path $destinationPath does not exist or is not accessible."
return
}
Copy-Item -Path $path -Destination $destinationFile -Force
Write-Log "Copy operation successful for file: $name"
if (-not $copyonly) {
Remove-Item -Path $path -Force
Write-Log "Source file $name removed from $path"
}
} catch {
Write-Log "An error occurred while copying file: $name - Error: $_"
}
Write-Log "Job script block execution completed for file: $name"
} -argumentList $eventArgs, $sourcePath, $destinationPath, $copyonly, $listSeparator, $networkDrive, $logFilePath
}
}
Write-Log "File watcher started."
# Loop to check for the run-lock file and clean up jobs
do {
# Cleanup completed or failed jobs
Cleanup-Jobs
Start-Sleep -Seconds 10
} while (Test-Path -Path $runLockFilePath)
# Cleanup process
function Cleanup {
Write-Log "Run-lock file deleted or cleanup initiated. Stopping the watcher."
Get-Job | Remove-Job -Force # Stop all jobs
$watcher.EnableRaisingEvents = $false
$watcher.Dispose()
Write-Log "File watcher stopped and resources freed."
Flush-LogBatch
Write-Log "Timer stopped and remaining Log-Entries flushed."
Remove-PSDrive -Name $networkDrive -Force
Write-Log "Network drive $networkDrive disconnected."
}
# Call the cleanup function at the end of the script or when the run-lock file is missing
try {
Cleanup
}
catch {
Write-Log "An error occurred during cleanup: $_"
}
finally {
# Ensure resources are freed and logs are flushed in case of error
if (Test-Path -Path $networkDrive) {
Remove-PSDrive -Name $networkDrive -Force
Write-Log "Network drive $networkDrive forcefully disconnected in finally block."
}
Flush-LogBatch
Write-Log "Log entries forcefully flushed in finally block."
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment