http://qs321.pair.com?node_id=263993


in reply to File::Find question

Consider using the preprocess and postprocess callback parameters to filter out and report files and directories you can't read. File::Find is recursive, so it's important to catch errors not only for the initial directory, but also for any files and directories beneath it.

Here's an example that shows how you might do this:

#!/usr/bin/perl use warnings; use strict; use File::Find; use File::Spec::Functions; my $directory = 'foodir'; my $size = 0; my @unreadables = (); my @errors = (); sub wanted { # test: handling of @errors #local $_ = $_; #if (/foo\.txt/) { # $_ = "nonexistentfile"; #} if (stat) { -f _ and $size += -s _; } else { push @errors, catfile($File::Find::dir, $_) . " !$!"; } } # filter unreadable directories # from the list of files # returned by readdir() before # processing the directory: # sub preprocess { my @readables; for (@_) { if (!-r) { push @unreadables, catfile($File::Find::dir, $_) . " !not +readable"; } else { push @readables, $_; } } return @readables; } # post-directory processing: # sub postprocess { unless (@unreadables or @errors) { return; } print "$_\n" for (@unreadables, @errors); @unreadables = @errors = (); } find( { wanted => \&wanted, preprocess => \&preprocess, postprocess => \&postprocess, }, $directory ); print "$directory size: $size\n";