use Data::Dumper; #Next 2 lines make sure only one HTTP request deals with database at once open LOCKFILE, ">>lock.lck" or die "Could not open the lock file!"; flock LOCKFILE, LOCK_EX or die "Could not flock the lock file!"; #database.dat holds the physical hash #a 'do' executes the file (rebuilds your time hash) do "database.dat"; #Get the current date values @date = localtime(); #loop through the %userTimes hash to find old entries foreach $user (keys(%userTimes)) { $last_hour = $userTimes{$user}{'hour'}; $last_min = $userTimes{$user}{'min'}; $cur_hour = $date[2]; $cur_min = $date[1]; $expire = 0; #Yes, if 3 minutes old $expire = 1 if ($last_hour == $cur_hour && ($cur_min - $last_min) >= 3); #there are some exceptions to the 3 minute deal. #the next lines cover this! if ($last_hour != $cur_hour) { $expire = 1 if $last_hour > $cur_hour; $expire = 1 if $last_min <= 57 && $cur_min >= 0; $expire = 1 if $last_min == 58 && $cur_min >= 1; $expire = 1 if $last_min == 59 && $cur_min >= 2; } if ($expire == 1) { delete $userTimes{$user}; #yep, delete them! } } #Output the hash to the database file thingy $Data::Dumper::Purity = 1; #Need this $Data::Dumper::Indent = 0; #saves some disk space open FILE, ">database.dat"; print FILE Data::Dumper->Dump([\%userTimes], ['*userTimes']); close FILE;