I'm trying to figure out why a script is writing output twice. The first time in proper order, the second time in reverse order. It should only be writing it once.
#!/usr/bin/perl
use warnings;
use strict;
use Fcntl ':mode';
use File::Find;
no warnings 'File::Find';
no warnings 'uninitialized';
my $dir = "/var/log/tivoli/";
my $mtab = "/etc/mtab";
my $permFile = "world_writable_files.txt";
my $tmpFile = "world_writable_files.tmp";
my $exclude = "/usr/local/etc/world_writable_excludes.txt";
my $mask = S_IWUSR | S_IWGRP | S_IWOTH;
my (%excludes, %devNums);
my $errHeader;
# Compile a list of mountpoints that need to be scanned
my @mounts;
open MT, "<${mtab}" or die "Cannot open ${mtab}, $!";
# We only want the local mountpoints
while (<MT>) {
if ($_ =~ /ext[34]/) {
chomp;
my @line = split;
push(@mounts, $line[1]);
my @stats = stat($line[1]);
$devNums{$stats[0]} = undef;
}
}
close MT;
# Build a hash from /usr/local/etc/world_writables_excludes.txt
if ((! -e $exclude) || (-z $exclude)) {
$errHeader = <<HEADER;
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! !!
!! /usr/local/etc/world_writable_excludes.txt is !!
!! is missing or empty. This report includes !!
!! every world-writable file including those which !!
!! are expected and should be excluded. !!
!! !!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
HEADER
} else {
open XCLD, "<${exclude}" or die "Cannot open ${exclude}, $!\n";
while (<XCLD>) {
chomp;
$excludes{$_} = 1;
}
}
sub wanted {
my @dirStats = stat($File::Find::name);
# Is it excluded from the report...
return if exists $excludes{$File::Find::name};
# ...is the Tivoli installation directory or a special directory...
if ($File::Find::name =~ m{^/sys|^/proc|^/dev|^/opt/IBM/ITM}) {
$File::Find::prune = 1;
return;
}
# ...a regular file, ...
return unless -f;
# ...local, ...
return unless (exists $devNums{$dirStats[0]});
# ...and world writable?
return unless ($dirStats[2] & $mask) == $mask;
# If so, add the file to the list of world writable files
print(WWFILE "$File::Find::name\n");
}
# Create the output file path if it doesn't already exist.
mkdir($dir or die "Cannot execute mkdir on ${dir}, $!") unless (-d $di
+r);
# Create our filehandle for writing our findings
open WWFILE, ">${dir}${tmpFile}" or die "Cannot open ${dir}${tmpFile},
+ $!";
print(WWFILE "${errHeader}") if ($errHeader);
find(\&wanted, @mounts);
close WWFILE;
# If no world-writable files have been found ${tmpFile} should be zero
+-size;
# Delete it so Tivoli won't alert
if (-z "${dir}${tmpFile}") {
unlink "${dir}${tmpFile}";
} else {
rename("${dir}${tmpFile}","${dir}${permFile}") or die "Cannot rename
+ file ${dir}${tmpFile}, $!";
}
Example output:
# cat world_writable_files.txt
/var/opt/ds_agent/am/diagnostic_1.log
/home/User1/report.pl.20130220
/home/User1/report.pl.20130220
/var/opt/ds_agent/am/diagnostic_1.log
Each file is being written only once in the script so I am wondering if the filesystem is being scanned twice. Once in each direction. I don't see where that would be happening, but I don't know.
I've printed out the @mounts array to ensure it only contains one entry for each mountpoint. Additionally, I've tried finddepth() to see if it would make a difference. It didn't.
Any thoughts on this conundrum?