X-Git-Url: https://git.deb.at/?p=deb%2Fpackages.git;a=blobdiff_plain;f=lib%2FPackages%2FSearch.pm;h=12e00c3ef2339c6c16c9b2b15fa3b12c067e252e;hp=c6a521ad65385d2cf11daa6f9ca95e8f00f3eb40;hb=0c1a44893f94f98deac8435e6ab235228880087f;hpb=a38d98d27880c8fc0f4d98646e318ecc1d39884f diff --git a/lib/Packages/Search.pm b/lib/Packages/Search.pm index c6a521a..12e00c3 100644 --- a/lib/Packages/Search.pm +++ b/lib/Packages/Search.pm @@ -1,8 +1,8 @@ # # Packages::Search # -# Copyright (C) 2004-2006 Frank Lichtenheld -# +# Copyright (C) 2004-2007 Frank Lichtenheld +# # The code is based on the old search_packages.pl script that # was: # @@ -23,7 +23,7 @@ # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # =head1 NAME @@ -46,6 +46,8 @@ use warnings; use POSIX; use HTML::Entities; use DB_File; +use Lingua::Stem v0.82; +use Search::Xapian qw(:ops); use Deb::Versions; use Packages::CGI; @@ -55,7 +57,8 @@ our @ISA = qw( Exporter ); our @EXPORT_OK = qw( read_entry read_entry_all read_entry_simple read_src_entry read_src_entry_all find_binaries - do_names_search do_fulltext_search + do_names_search do_fulltext_search do_xapian_search + find_similar ); our %EXPORT_TAGS = ( all => [ @EXPORT_OK ] ); @@ -71,16 +74,16 @@ sub read_entry_all { while (my ($suite, $provides) = each %virt) { next if $suite eq '-'; if ($opts->{h_suites}{$suite}) { - push @$results, [ $key, "-", $suite, 'virtual', 'v', 'v', 'v', 'v', + push @$results, [ $key, "-", $suite, 'virtual', 'v', 'v', 'v', 'v', 'v', $provides]; } else { - push @$non_results, [ $key, "-", $suite, 'virtual', 'v', 'v', 'v', 'v', + push @$non_results, [ $key, "-", $suite, 'virtual', 'v', 'v', 'v', 'v', 'v', $provides]; } } - foreach (split /\000/o, $result) { - my @data = split ( /\s/o, $_, 8 ); + foreach (split(/\000/o, $result||'')) { + my @data = split ( /\s/o, $_, 9 ); debug( "Considering entry ".join( ':', @data), 2) if DEBUG; if ($opts->{h_suites}{$data[1]} && ($opts->{h_archs}{$data[2]} || $data[2] eq 'all') @@ -100,9 +103,9 @@ sub read_entry { #FIXME: make configurable my %fallback_suites = ( - 'stable-backports' => 'stable', - 'stable-volatile' => 'stable', - experimental => 'unstable' ); + 'etch-backports' => 'etch', + 'etch-volatile' => 'etch', + experimental => 'sid' ); sub read_entry_simple { my ($hash, $key, $archives, $suite) = @_; @@ -118,7 +121,7 @@ sub read_entry_simple { # with correctly, but it's adequate enough for now return [ $virt{$suite} ] unless defined $result; foreach (split /\000/o, $result) { - my @data = split ( /\s/o, $_, 8 ); + my @data = split ( /\s/o, $_, 9 ); debug( "use entry: @data", 2 ) if DEBUG && $data[1] eq $suite; return [ $virt{$suite}, @data ] if $data[1] eq $suite; } @@ -159,14 +162,22 @@ sub do_names_search { my $first_keyword = lc shift @$keywords; @$keywords = map { lc $_ } @$keywords; - + my ($key, $prefixes) = ($first_keyword, ''); - my %pkgs; + my (%pkgs, %pkgs_min); $postfixes->seq( $key, $prefixes, R_CURSOR ); while (index($key, $first_keyword) >= 0) { - if ($prefixes =~ /^\001(\d+)/o) { - debug( "$key has too many hits", 2 ) if DEBUG; - $too_many_hits += $1; + if ($prefixes =~ /^(\^)?\001(\d+)/o) { + debug("$key has too many hits", 2 ) if DEBUG; + $too_many_hits += $2; + if ($1) { # use the empty prefix + foreach my $k (@$keywords) { + next unless $key =~ /\Q$k\E/; + } + debug("add key $key", 2) if DEBUG; + $pkgs{$key}++; + $pkgs_min{$key}++; + } } else { PREFIX: foreach (split /\000/o, $prefixes) { @@ -175,72 +186,115 @@ sub do_names_search { foreach my $k (@$keywords) { next PREFIX unless $word =~ /\Q$k\E/; } - debug( "add word $word", 2) if DEBUG; + debug("add word $word", 2) if DEBUG; $pkgs{$word}++; + $pkgs_min{$word}++ if $_ eq ''; } } last if $postfixes->seq( $key, $prefixes, R_NEXT ) != 0; - last if $too_many_hits or keys %pkgs >= 100; + last if keys %pkgs_min >= 100; } - - my $no_results = keys %pkgs; - if ($too_many_hits || ($no_results >= 100)) { - $too_many_hits += $no_results; - %pkgs = ( $first_keyword => 1 ) unless @$keywords; + + my $nr = keys %pkgs; + my $min_nr = keys %pkgs_min; + debug("nr=$nr min_nr=$min_nr too_many_hits=$too_many_hits", 1) if DEBUG; + if ($nr >= 100) { + $too_many_hits += $nr - $min_nr + 1; + %pkgs = %pkgs_min; } foreach my $pkg (sort keys %pkgs) { &$read_entry( $packages, $pkg, $results, $non_results, $opts ); } } -sub do_fulltext_search { - my ($keywords, $file, $did2pkg, $packages, $read_entry, $opts, + +sub do_xapian_search { + my ($keywords, $dbpath, $did2pkg, $packages, $read_entry, $opts, $results, $non_results) = @_; # NOTE: this needs to correspond with parse-packages! my @tmp; foreach my $keyword (@$keywords) { - $keyword =~ tr [A-Z] [a-z]; - if ($opts->{exact}) { - $keyword = " $keyword "; - } - $keyword =~ s/[(),.-]+//og; - $keyword =~ s;[^a-z0-9_/+]+; ;og; + $keyword =~ s;[^\w/+]+; ;og; push @tmp, $keyword; } - my $first_keyword = shift @tmp; - @$keywords = @tmp; - - my $numres = 0; - my %tmp_results; - # fgrep is seriously faster than using perl - open DESC, '-|', 'fgrep', '-n', '--', $first_keyword, $file - or die "couldn't open $file: $!"; - LINE: - while () { - foreach my $k (@$keywords) { - next LINE unless /\Q$k\E/; - } - /^(\d+)/; - my $nr = $1; - debug( "Matched line $_", 2) if DEBUG; - my $result = $did2pkg->{$nr}; + my $stemmer = Lingua::Stem->new(); + my @stemmed_keywords = grep { length($_) } @{$stemmer->stem( @tmp )}; + + my $db = Search::Xapian::Database->new( $dbpath ); + my $enq = $db->enquire( OP_OR, @$keywords, @stemmed_keywords ); + debug( "Xapian Query was: ".$enq->get_query()->get_description(), 1) if DEBUG; + my @matches = $enq->matches(0, 999); + + my (@order, %tmp_results); + foreach my $match ( @matches ) { + my $id = $match->get_docid(); + my $result = $did2pkg->{$id}; + foreach (split /\000/o, $result) { my @data = split /\s/, $_, 3; -# debug ("Considering $data[0], arch = $data[2]", 3) if DEBUG; + debug ("Considering $data[0], arch = $data[2], relevance=".$match->get_percent(), 3) if DEBUG; # next unless $data[2] eq 'all' || $opts->{h_archs}{$data[2]}; # debug ("Ok", 3) if DEBUG; - $numres++ unless $tmp_results{$data[0]}++; + unless ($tmp_results{$data[0]}++) { + push @order, $data[0]; + } } - last if $numres > 100; + last if @order > 100; } - close DESC; - $too_many_hits++ if $numres > 100; + undef $db; + $too_many_hits++ if @order > 100; - my @results; - foreach my $pkg (keys %tmp_results) { + debug ("ORDER: @order", 2) if DEBUG; + foreach my $pkg (@order) { &$read_entry( $packages, $pkg, $results, $non_results, $opts ); } - } +} + +sub find_similar { + my ($pkg, $dbpath, $did2pkg) = @_; + + my $db = Search::Xapian::Database->new( $dbpath ); + my $enq = $db->enquire( "P$pkg" ); + debug( "Xapian Query was: ".$enq->get_query()->get_description(), 1) if DEBUG; + my $first_match = ($enq->matches(0,1))[0]->get_document(); + + my @terms; + my $term_it = $first_match->termlist_begin(); + my $term_end = $first_match->termlist_end(); + + for (; $term_it ne $term_end; $term_it++) { + debug( "TERM: ".$term_it->get_termname(), 3); + push @terms, $term_it->get_termname(); + } + + my $rel_enq = $db->enquire( OP_OR, @terms ); + debug( "Xapian Query was: ".$rel_enq->get_query()->get_description(), 1) if DEBUG; + my @rel_pkg = $rel_enq->matches(2,20); + +# use Data::Dumper; +# debug(Dumper(\@rel_pkg),1); + + my (@order, %tmp_results); + foreach my $match ( @rel_pkg ) { + my $id = $match->get_docid(); + my $result = $did2pkg->{$id}; + + foreach (split /\000/o, $result) { + my @data = split /\s/, $_, 3; + debug ("Considering $data[0], arch = $data[2], relevance=".$match->get_percent(), 3) if DEBUG; + next if $data[0] eq $pkg; + unless ($tmp_results{$data[0]}++) { + push @order, $data[0]; + } + } + } + undef $db; + + debug ("ORDER: @order", 2) if DEBUG; + my $last = 10; + $last = $#order if $#order < $last; + return @order[0..$last]; +} sub find_binaries { my ($pkg, $archive, $suite, $src2bin) = @_;