#!/bin/perl
use warnings;
use strict;
use File::Find;
use Archive::Tar;
# Directory to begin search files
our $directoriesToSearch = '/internal/';
# Directory to hold backups
our $backupLocation = '/ipbackups/';
# Calculate current date for use in archive filename
my ($day,$month,$year) = (localtime(time))[3,4,5];
$year += 1900;
$month++;
# Archive filename
our $backupFilename = "backup_$month-$day-$year.tar.gz";
# Array to hold files to be backed up
our @filesToBackup;
# Array holding directories to be skipped
our @directoriesToSkip = qw{
/internal/scripts/chatlog
/internal/scripts/schedule
};
# Array holding extensions to backup
our @extensionsToBackup = qw{cgi pl dat txt log};
# Let the user know what we're doing and begin finding files.
print "Find files for backup...";
find(\&actOnFiles,($directoriesToSearch));
# Notify the user that the find process is done and that we're
# creating the archive. Then, create the archive.
print "Done!\nCreating archive $backupFilename...";
Archive::Tar->create_archive ("$backupLocation$backupFilename", 9,
@filesToBackup);
# Notify the user and exit.
print "Done!\n";
sub actOnFiles()
{
# Check if the file is in a skipped directory and, if so,
# return without action
foreach my $dir (@directoriesToSkip){
return if ($File::Find::dir =~ m/$dir/);
}
# Check if the file has an appropriate extension. If not,
# return without action
my $test = join('|',@extensionsToBackup);
return unless ($_ =~ m/.($test)$/i);
# Fail-safe check to make sure a directory hasn't slipped through
return if (-d $File::Find::name);
# Assuming the file has survived this long, push it into the
# list of files to backup.
push(@filesToBackup,$File::Find::name);
}
|