use strict; use warnings; use File::Slurp; use Parallel::ForkManager; sub read_next_batch_of_filenames { my ($DH, $MAX_FILES) = @_; my @files = (); while (my $fn = readdir $DH) { next if ($fn !~ m/\.txt\z/); push @files, $fn; last if (scalar(@files) >= $MAX_FILES); } if (@files) { return \@files; } else { return; } } sub move_files { my ($outDir, $files) = @_; foreach my $f (@$files) { my $data = read_file($f); my ($channel) = $data =~ /A\|CHNL_ID\|(\d+)/i; move ($f, "$outDir/$channel") or die "Failed to move '$f' to '$outDir/$channel ($!)\n"; } } sub parallelized_move { my $dir = 'FIXME'; my $outDir = 'FIXME'; my $MAX_PROCESSES = 4; # tweak this to find the best number my $FILES_PER_PROCESS = 1000; # process in batches of 1000, to limit forking my $pm = Parallel::ForkManager->new($MAX_PROCESSES); opendir my $DH, $dir or die "Failed to open '$dir' for reading ($!)\n"; DATA_LOOP: while (my $files = read_next_batch_of_filenames($DH, $FILES_PER_PROCESS)) { # Forks and returns the pid for the child: my $pid = $pm->start and next DATA_LOOP; move_files($outDir, $files); $pm->finish; # Terminates the child process } closedir $DH or die "Failed to close directory handle for '$dir' ($!)\n"; }