X-Git-Url: http://dxcluster.org/gitweb/gitweb.cgi?a=blobdiff_plain;f=perl%2FGeomag.pm;h=1d3462dc408cf35dc33b5dcb9d0a2a0fdd41b77c;hb=f155969d600561b9ef151a7ce2494a0c89aed033;hp=d78ff2cd3f007fd67cd7b5233ec3db33551ff699;hpb=0542c1871c9563b7577a4b9a1282fd5d2c203047;p=spider.git diff --git a/perl/Geomag.pm b/perl/Geomag.pm index d78ff2cd..1d3462dc 100644 --- a/perl/Geomag.pm +++ b/perl/Geomag.pm @@ -15,10 +15,12 @@ use DXUtil; use DXLog; use Julian; use IO::File; -use Carp; +use DXDebug; use strict; -use vars qw($date $sfi $k $a $r $forecast @allowed @denied $fp $node $from); +use vars qw($date $sfi $k $a $r $forecast @allowed @denied $fp $node $from + $dirprefix $param + %dup $duplth $dupage); $fp = 0; # the DXLog fcb $date = 0; # the unix time of the WWV (notional) @@ -31,13 +33,16 @@ $node = ""; # originating node $from = ""; # who this came from @allowed = (); # if present only these callsigns are regarded as valid WWV updators @denied = (); # if present ignore any wwv from these callsigns -my $dirprefix = "$main::data/wwv"; -my $param = "$dirprefix/param"; +%dup = (); # the spot duplicates hash +$duplth = 20; # the length of text to use in the deduping +$dupage = 12*3600; # the length of time to hold spot dups + +$dirprefix = "$main::data/wwv"; +$param = "$dirprefix/param"; sub init { $fp = DXLog::new('wwv', 'dat', 'm'); - mkdir $dirprefix, 0777 if !-e $dirprefix; # now unnecessary DXLog will create it do "$param" if -e "$param"; confess $@ if $@; } @@ -73,8 +78,12 @@ sub update # my $trydate = cltounix($mydate, sprintf("%02d18Z", $mytime)); if ($mydate >= $date) { + if ($myr) { + $r = 0 + $myr; + } else { + $r = 0 unless abs ($mysfi - $sfi) > 3; + } $sfi = 0 + $mysfi; - $r = 0 + $myr unless !$r && $myk == $k; $k = 0 + $myk; $a = 0 + $mya; $forecast = $myforecast; @@ -234,5 +243,42 @@ sub readfile } return @in; } + +# enter the spot for dup checking and return true if it is already a dup +sub dup +{ + my ($d, $sfi, $k, $a, $text) = @_; + + # dump if too old + return 2 if $d < $main::systime - $dupage; + + $d /= 60; # to the nearest minute +# chomp $text; +# $text = substr($text, 0, $duplth) if length $text > $duplth; + my $dupkey = "$d|$sfi|$k|$a"; + return 1 if exists $dup{$dupkey}; + $dup{$dupkey} = $d * 60; # in seconds (to the nearest minute) + return 0; +} + +# called every hour and cleans out the dup cache +sub process +{ + my $cutoff = $main::systime - $dupage; + while (my ($key, $val) = each %dup) { + delete $dup{$key} if $val < $cutoff; + } +} + +sub listdups +{ + my @out; + for (sort { $dup{$a} <=> $dup{$b} } keys %dup) { + my $val = $dup{$_}; + push @out, "$_ = $val (" . cldatetime($val) . ")"; + } + return @out; +} 1; __END__; +