X-Git-Url: http://dxcluster.org/gitweb/gitweb.cgi?a=blobdiff_plain;f=perl%2FSpot.pm;h=0761f07d0522f865ad10d4c950df4d12033f7f02;hb=dd01a8824f3896015e031cf301181760a6496bbd;hp=ebc5d92c18279db5e0d7ecdf7ae9405567f05872;hpb=8be46ac1786265a7ba6ee91b31141ecd017ecb49;p=spider.git diff --git a/perl/Spot.pm b/perl/Spot.pm index ebc5d92c..0761f07d 100644 --- a/perl/Spot.pm +++ b/perl/Spot.pm @@ -34,27 +34,36 @@ $duplth = 20; # the length of text to use in the deduping $dupage = 1*3600; # the length of time to hold spot dups $maxcalllth = 12; # the max length of call to take into account for dupes $filterdef = bless ([ - # tag, sort, field, priv, special parser - ['freq', 'r', 0, 0, \&decodefreq], - ['on', 'r', 0, 0, \&decodefreq], - ['call', 'c', 1], - ['info', 't', 3], - ['by', 'c', 4], - ['call_dxcc', 'nc', 5], - ['by_dxcc', 'nc', 6], - ['origin', 'c', 7, 9], - ['call_itu', 'ni', 8], - ['call_zone', 'nz', 9], - ['by_itu', 'ni', 10], - ['by_zone', 'nz', 11], - ['call_state', 'ns', 12], - ['by_state', 'ns', 13], - ['channel', 'c', 14], - - ], 'Filter::Cmd'); + # tag, sort, field, priv, special parser + ['freq', 'r', 0, 0, \&decodefreq], + ['on', 'r', 0, 0, \&decodefreq], + ['call', 'c', 1], + ['info', 't', 3], + ['by', 'c', 4], + ['call_dxcc', 'nc', 5], + ['by_dxcc', 'nc', 6], + ['origin', 'c', 7, 9], + ['call_itu', 'ni', 8], + ['call_zone', 'nz', 9], + ['by_itu', 'ni', 10], + ['by_zone', 'nz', 11], + ['call_state', 'ns', 12], + ['by_state', 'ns', 13], + ['ip', 'c', 14], +# ['channel', 'c', 15], +# ['rbn', 'a', 4, 0, \&filterrbnspot], + ], 'Filter::Cmd'); $totalspots = $hfspots = $vhfspots = 0; $use_db_for_search = 0; +our $usetac = 0; +our $readback; + +if ($usetac) { + $readback = `which tac`; + chomp $readback; +} + # create a Spot Object sub new { @@ -94,6 +103,13 @@ sub decodefreq return (0, join(',', @out)); } +# filter setup for rbn spot so return the regex to detect it +sub filterrbnspot +{ + my $dxchan = shift; + return ('-#$'); +} + sub init { mkdir "$dirprefix", 0777 if !-e "$dirprefix"; @@ -206,7 +222,7 @@ sub add } if ($_[3] =~ /(?:QSL|VIA)/i) { my $q = QSL::get($_[1]) || new QSL $_[1]; - $q->update($_[3], $_[2], $_[4]); + $q->update($_[3], $_[2], $_[4]) if $q; } } @@ -224,7 +240,7 @@ sub add # $f5 = spotted dxcc country # $f6 = spotter dxcc country # $f7 = origin -# +# $f8 = ip address # # In addition you can specify a range of days, this means that it will start searching # from days less than today to days less than today @@ -291,31 +307,50 @@ sub search my \@s = split /\\^/; $checkfilter; push \@spots, \\\@s; + shift \@spots if \@spots > $to + 2; } my \$c; my \$ref; - for (\$c = \$#spots; \$c >= 0; \$c--) { - \$ref = \$spots[\$c]; - if ($expr) { - \$count++; - next if \$count < \$from; # wait until from - push(\@out, \$ref); - last if \$count >= \$to; # stop after to - } - } + if (\$readback) { + foreach \$ref (\@spots) { + if ($expr) { + \$count++; + next if \$count < $from; # wait until from + push(\@out, \$ref); + last if \$count >= $to; # stop after to + } + } + } else { + for (\$c = \$#spots; \$c >= 0; \$c--) { + \$ref = \$spots[\$c]; + if ($expr) { + \$count++; + next if \$count < $from; # wait until from + push(\@out, \$ref); + last if \$count >= $to; # stop after to + } + } + } ); + + dbg("Spot eval: $eval") if isdbg('searcheval'); - - $fp->close; # close any open files - + my $fh; + my $now = $fromdate; for ($i = $count = 0; $i < $maxdays; ++$i) { # look thru $maxdays worth of files only - my $now = $fromdate->sub($i); # but you can pick which $maxdays worth - last if $now->cmp($todate) <= 0; - - my @spots = (); - my $fh = $fp->open($now); # get the next file + my @spots; + last if $now->cmp($todate) <= 0; + + if ($readback) { + my $fn = $fp->fn($now->sub($i)); + dbg("search using tac fn: $fn $i") if isdbg('search'); + $fh = IO::File->new("$readback $fn |"); + } else { + $fh = $fp->open($now->sub($i)); # get the next file + dbg("search fn: $fp->{fn} $i") if isdbg('search'); + } if ($fh) { my $in; eval $eval; # do the search on this file