hi perlmonks, i run this script as a cronjob every minute, it seems to work fine, but, every so often files are copied across that already exist and shouldn't be copied. This is killing my bandwidth and I can't work out what's causing it. One thing I notice is that if i run the script from the command prompt, and ctrl-c, files are sometimes copied across which are already there. Any ides?
use File::Find;
use File::Basename;
@backupFiles=();
%chompedList=();
$list="/usr/scripts/blist.txt";
if(-e $list){unlink($list);}
system("ssh user\@backupserver ls -c /usr/local/apache/htdocs >$list")
+;
open(LIST, "$list") or die "$!";
@backupFiles = <LIST>;
foreach $file (@backupFiles){chomp $file; $chompedList{$file} = $file;
+}
find(\&dofile, </usr/myfiles/*.htm>);
sub dofile {
($name,$path,$suffix) = fileparse($File::Find::name);
if (not exists $chompedList{"$name$suffix"})
{
system("scp -C \"$File::Find::name\" user\@backupserver:/usr/local/apa
+che/htdocs");
}
}#end find
close(list);
unlink($list);