Looking to copy files to a external drive. I want to only copy the files that fit a date inside the metadata that I provide. I am using closed data to do this. Here is my script. Below is my script. Not sure why its failing. Can someone point out probably the simple thing I am missing
######################## Start Variables ########################
######################## Dave's Script######################
$destination = "D:\Extract"
$webUrl = "https://test"
$listUrl = "https://test/sites/Area7/conversion%20test"
##############################################################
$startdate = [datetime]'9/20/2017 12:00:00'
$enddate = [datetime]'9/22/2017 12:00:01'
$web = Get-SPWeb -Identity $webUrl
$list = $web.GetList($listUrl)
[datetime]$datevar = $item["DateClosed"]
function ProcessFolder
{
param($folderUrl)
$folder = $web.GetFolder($folderUrl)
foreach ($file in $folder.Files)
{
if($datevar -gt $startdate -and $datevar -lt $enddate)
{
#Ensure destination directory
$destinationfolder = $destination + "/" + $folder.Url
if (!(Test-Path -path $destinationfolder))
{
$dest = New-Item $destinationfolder -type directory
}
#Download file
$binary = $file.OpenBinary()
$stream = New-Object System.IO.FileStream($destinationfolder + "/" + $file.Name), Create
$writer = New-Object System.IO.BinaryWriter($stream)
$writer.write($binary)
$writer.Close()
}
}
}
#Download root files
ProcessFolder($list.RootFolder.Url)
#Download files in folders
foreach ($folder in $list.Folders) {
ProcessFolder($folder.Url)
}