http://qs321.pair.com?node_id=11113866

Probably reinventing the wheel, but...

(NB: You may need to cpan install DBD::SQLite first.)

use DBD::SQLite; use strict; use warnings; =pod This program reads a file named places.sqlite which is found somewhere under your folder named Mozilla/Firefox/Profiles You pass the pathname of this file as a command-line argument. This program outputs html. =cut my $dbfile = shift; $dbfile or die "Usage: $0 <path>/places.sqlite \n"; -r $dbfile or die "Unreadable $dbfile\n"; $dbfile =~ /\bplaces\.sqlite$/ or die "File should be places.sqlite\n" +; my $dbh = DBI->connect("dbi:SQLite:dbname=$dbfile","","") or die "Erro +r opening db $dbfile\n"; my $bookmarks = $dbh->selectall_hashref("select * from moz_bookmarks", +'id'); my $places = $dbh->selectall_hashref("select * from moz_places",'id +'); # construct the tree: my $root; for my $b ( values %$bookmarks ) { if ( $b->{parent} ) { my $p = $bookmarks->{ $b->{parent} }; push @{ $p->{children} }, $b; } else # yep, there's exactly one. { $root = $b; } } # produce the html: local($,,$\)=("\t","\n"); sub walk; # because it recurses. sub walk { my $depth = shift; my $n = shift; my $indent = "\t" x $depth; if ( $n->{type} == 2 ) # folder { print $indent . ($depth?"<li>":'') . "<h4>$n->{title}</h4>"; if ( $n->{children} ) { print $indent . "<ol>"; for my $c ( sort { $a->{position} <=> $b->{position} } @{ +$n->{children} } ) { walk($depth+1,$c); } print $indent . "</ol>"; } $depth and print $indent . "</li>"; } else # leaf bookmark { my $link = $n->{title}; if ( $n->{fk} and $places->{$n->{fk}} and $places->{$n->{fk}}{ +url} ) { my $url = $places->{$n->{fk}}{url}; $link =~ /\S/ or $link = $url; $link = qq(<a href="$url">$link</a>); } else { $link =~ /\S/ or $link = "$n->{type}:$n->{id}"; } print $indent . "<li>$link</li>"; } } walk(0,$root);

Replies are listed 'Best First'.
Re: Export (extract) Mozilla Firefox Bookmarks
by jimhenry (Acolyte) on Aug 18, 2020 at 21:28 UTC

    Thanks for the nifty tool. I used this as the basis for a script to dump or analyze Firefox history. It has worked fine so far except that one time, after doing some reports, I started Firefox and got an error about the bookmarks and history being unavailable because the database was locked. I'm not sure why the script didn't unlock the places.sqlite database on closing, but restarting Firefox fixed the problem, and since then I've run the history analysis script and started Firefox again without error messages.

    #! /usr/bin/perl use strict; use warnings; use DBD::SQLite; use feature 'switch'; no warnings 'experimental::smartmatch'; =pod Firefox history analyzer -- print all domains visited and the number of times visited, or a dump of all history URLs in chronological order based on firefox bookmarks exporter by jdporter https://www.perlmonks.org/?node_id=11113866 =cut sub usage { print <<USAGE; Usage: $0 [path-to-Firefox-profile/places.sqlite] [command] Command is one of: h -- list all history by date order. d -- List domains and visit counts, sorted by most often visited. Firefox must be closed for this to work, or you'll get "Database locke +d" errors. USAGE } sub unique_domains; sub list_visit_dates; my $dbfile = shift; $dbfile or usage, exit; -r $dbfile or die "Unreadable $dbfile\n"; $dbfile =~ /\bplaces\.sqlite$/ or die "File should be places.sqlite\n" +; my $dbh = DBI->connect("dbi:SQLite:dbname=$dbfile","","") or die "Erro +r opening db $dbfile\n"; my $history = $dbh->selectall_hashref( q( SELECT moz_historyvisits.id, visit_date, url FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id ), 'id' ); given ( $ARGV[0] ) { when ( undef ) { usage; } when( 'h' ) { list_visit_dates; } when( 'd' ) { unique_domains; } default { usage; } } sub unique_domains { my %domains; for my $k ( keys %$history ) { if ( $history->{$k}{url} =~ m! \w+:// ([^/]+) !x ) { $domains{ $1 }++; } } for my $d ( reverse sort { $domains{$a} <=> $domains{$b} || $a cmp + $b } keys %domains ) { printf "%4d\t%s\n", $domains{$d}, $d; } } # https://support.mozilla.org/en-US/questions/972178 indicates that # visit_date is a million times the Unix epoch date (with potentially # microsecond accuracy on some machines?) sub list_visit_dates { for my $k ( sort { my $c = $history->{$a}{visit_date} // 0; my $d = $history->{$b}{visit_date} // 0; $c <=> $d || $history->{$a}{url} cmp $history->{$a}{url} || $a <=> $b } keys %$history ) { my $t = $history->{$k}{visit_date} // 0; next unless $t; $t /= 1_000_000; printf "%20s %s\n", scalar localtime($t), $history->{$k}{url}; } }