#! /bin/perl --    # -*-Perl-*-
# 
# inflow-collect - Flush & rotate batchfile produced by news server.
#                  Extract article count & volume data per sending site and
#                  per hierarchy. Update tab spaced datafile.
#
# newsfeeds entry required to provide batchfile:
#
#       INFLOW:*:Tf,Wsgbtp:
#     
# batchfile format: remote_site group size arrival_time posting_time
# ex:               news.inter.net misc.test 502 835740023 835730650
#
# History: The inflow-package is based on the ideas of counter.pl, a perl
#          script written by Juan Garcia, Rediris, 1995-96.
#
# 960806 V1.0 released
# 960826 V1.0.1 fixed resolv.conf parsing (Gerhard Winkler)
# 960919 V1.1.0 stores inflow-collect command line options in result files
#               for later use (Juan Garcia)
#               removes bug in collecting data for different -A and -L options 
# 961215 V1.1.1 optional hourly summaries added (Felix Kugler)
# 961216 V1.1.2b2 collects now monthly data too (Juan Garcia)
# 970113 V1.2   some defaults & paths slightly modified
# 970129 V1.2.1 easier portability: made system commands configurable  
# 970421 V1.2.2 update hour files even if INFLOW channel couldn't be flushed
# 970430 V1.2.3 hour file names rounded per default; configurable
# 971018 V1.2.4 improved search path handling
# 971028 V1.2.5 added external config file
# 971108 V1.3.0 article delay stats support
# 971112 V1.5.0 version number aligned
# 971119 V1.5.1 minor bug fixed in error message
# 980109 V1.5.2 minor bug fixed in article delay handling
#
$Copy      = "(c) 1997 Felix.Kugler\@switch.ch";

$RELDATE = "Fri Jan  9 14:36:24 MET 1998";
$RELEASE = "V1.5.2";
# config section -------------------------------------------------
#
$INFLOWCONF = "/home/news/config/inflow.conf"; # ext. local configs (optional)
#
# the following settings may be modified by an external config file
#
$BATCHDIR  = "/var/news/out.going";     # where INN puts data
$CHANNEL   = "INFLOW";
$SUMDIR    = "/home/news/inflow";       # summaries from previous periods
$SUMFILE   = "inflow.sum";
#
$HOURDIR   = "/home/news/inflow/hour";  # dir for hourly summaries
$ROUNDTIME = 1;                         # round down hour file names to nearest
                                        # 10 minutes (ex: ..-1401 -> ..1400) 
                                        # to get coherent data sequences
#
$CTLINND   = "/opt/inn/bin/ctlinnd";
$TIMEOUT   = 180;		# ctlinnd timeout
$MAXAGE    = 20;		# minutes after midnight after which to cycle
				# result file; sounds complicated, but this 
                                # allows to process logs normally right after
                                # midnight and cycle the result file next time
#
$STATSCRIPT = "inflow-stat -wVFp"; # script to start if -s option is given
#
# external programs & config files
@ADDTOPATH = ( '/opt/local/bin','/opt/gnu/bin' );
#
$CP        = "cp";
$MV        = "mv";
$GREP      = "grep";

# ---- end config section ----------------------------------------------------

require "getopts.pl";
require "ctime.pl";

($path,$0) = ($0 =~ /^(.*)\/([^\/]+)$/);                # strip path...

&Getopts('cdf:hpsvA:D');

# init some variables
$defaulttype = $opt_c ? "perday" : "continuous";  # continous, perday, permonth

# default per-hierarchie options
$opt_A=1 unless ($opt_A);
$options="-A$opt_A";	

&modify_config;
&update_PATH;
&gethostandfqdn;

$usage="$0      -  $Copy

release:  $RELEASE  of $RELDATE

usage:    $0 [-cdDhps][-A<level>][-f<configfile>]

Collects and preprocesses inbound news traffic on a News server.
$0 flushes channel $CHANNEL and resets the batchfile 
$BATCHDIR/$CHANNEL. 

Traffic data extracted from batchfiles is used to update the preprocessed 
data stored in $SUMDIR/$SUMFILE.

Options:  -h:         this help
          -c:         cycle resultfile after midnight i.e. reset resultfile
                      and label and save a copy of yesterday's results on
                      $SUMDIR
          -f<configfile>: load external configuration file 
                          (default: $inflowconf)
          -p:         prepare non-cumulative data files for plots which can
                      be postprocessed offline; files saved on
                      $HOURDIR
          -s:         process statistics when terminated i.e. start inflow-stat
          -A<level>:  resolve 'alt'-groups down to <level> hierarchy
                      levels. Default is to collect info about toplevel
                      alt-hierarchies only.

debug:   -d:          print debug info to STDERR
         -D:          developement mode: preserve batchfile, all created files
                      are stamped with a trailing \"D\".

external config file: $inflowconfinfo

The News server has to be configured to write a batchfile containing
site, group, size, and time information for every received article..
With INN, this can easily be achieved with a newsfeeds line

       $CHANNEL:*:Tf,Wsgbtp:

which writes a datafile with format

       remote_site group size arrival_time posting_time

$0 processes these data and combines them with older summaries. 
The result_file format is 

       datatype rsite|group articles volume delaysum

It is common to configure inflow to produce hourly snapshots. A typical way
to achieve this is to start this script every hour by cron with a line like

0 * * * * $path/$0 -cs

\n";

if ($opt_h) { print "$usage"; exit 0; }


$timenowstr = &MakeTimeStr;	# time of processing newlogs
$timenow = time; 

$dstr = `date`;
warn "log from $0 on $fqdn at $dstr\n";
warn "timenow=$timenow\n";
warn "timenowstr=$timenowstr\n";

if ($opt_D) {			# Developement mode
    unless (-f "$BATCHDIR/$CHANNEL.done") { 
	die "unable to locate data file of last period: $!\n";
    }
    system("$CP $BATCHDIR/$CHANNEL.done $BATCHDIR/$CHANNEL.doneD\n");
    unless ($? == 0) {		# in case we could copy INFLOW data
	die "unable to copy data file of last period: $!\n";
    }
    $rawbatchfile = "$BATCHDIR/$CHANNEL.doneD";
    $SUMFILE .= "D"; 
} else {			# normal operation mode
    unless (-e "$BATCHDIR/$CHANNEL") { 
	system("$CTLINND -t $TIMEOUT flush $CHANNEL>/dev/null 2>&1");
	sleep 2;
    }
    system("$MV $BATCHDIR/$CHANNEL $BATCHDIR/$CHANNEL.done\n");
    system("$CTLINND -t $TIMEOUT flush $CHANNEL>/dev/null 2>&1");
    unless ($? == 0) {		# in case we could not flush INFLOW channel
	# move back the batchfile to avoid data loss
	system("$MV $BATCHDIR/$CHANNEL.done $BATCHDIR/$CHANNEL\n");
        # don't forget to clear hourdata
	$unabletoflush = 1;
	&writehourdata;
	# try again later...
	die "unable to flush $CHANNEL - keeping data at $timenowstr\n" ;
    }
    $rawbatchfile = "$BATCHDIR/$CHANNEL.done";
}

if (-f "$rawbatchfile") {          # new data ready to process
    &readoldresults($defaulttype,"$SUMDIR/$SUMFILE");
    &readnewdata;
    &writeresults($defaulttype,"$SUMDIR/$SUMFILE");
    &updmonthresults if ($update_monfile == 1);

    # start making statistics 
    if ($opt_s && $STATSCRIPT ne "") { 
	exec("$path/$STATSCRIPT"); 
	warn "failed to execute $path/$STATSCRIPT\n";
    }
}
else { warn "no data file $rawbatchfile\n"; }


# updmonthresults
# ----------------------------------------------------------------------
# acumulate monthly results (file type 'permonth')
#
sub updmonthresults {
    $MONFILE  = $SUMFILE . "." . &MakeShortTimeStr(time - 86400);
    warn "updating monthly summaries in $MONFILE...\n" if $opt_d;

    # reset vars
    %arts=%bytes=%delay=();             # per node
    %artsph=%bytesph=%delayph=();       # per hierarchy
    %artspah=%bytespah=%delaypah=();    # per alt-hierarchy
    $totalarts=$totalbytes=$totaldelay=$_tmp=0;
    $lastperiod=$firstart=0;

    # read current month's results if available
    if (-e "$SUMDIR/$MONFILE") {
	warn "read current month's results from $MONFILE...\n" if $opt_d;
	&readoldresults('permonth',"$SUMDIR/$MONFILE");
	$firstartinmonth=$firstart if ($firstart > 0);  # preserve $firstart
    }

#    # last period to 1 day if previous month file doesn't exist
#    $lastperiod =  $timenow+24*3600 - $timenow unless ($lastperiod > 0);

    # now append yesterday's results
    warn "append yesterday's results from $SUMFILE.yesterday...\n" if $opt_d;
    &readoldresults('permonth',"$SUMDIR/$SUMFILE.yesterday");
    $firstart = $firstartinmonth if ($firstartinmonth > 0); # restore firstart

    # write new monthly data
    warn "write updated month's results back to $MONFILE...\n" if $opt_d;
    &writeresults('permonth',"$SUMDIR/$MONFILE");
}


# readoldresults
# ----------------------------------------------------------------------
# read existing resultfile (usually invoked several times per run)
# resultfile format: datatype rsite|group articles volume [delaysum]
# ex:   pn      surfnet.nl      5231    38371159   [468422]
#       ph      sfnet   24      42601   [463764]
#
sub readoldresults {
    local($type,$FILE) = @_;
    local($par,$val,$val2);
    local($oldtimenow,$options,$oldperiod,$oldtotalarts,$oldtotalbytes);
    local($oldtotaldelay,$oldnegdelayart,$delay_valid);

    unless (open(OLD,"$FILE")) {
	warn "missing $FILE\n";
	return;
    }
    if ($opt_d) {
	warn "reading old results from $FILE...\n";
	warn "  avgdelay_valid is \"$avgdelay_valid\"\n";
    }
    $firstart = $lastart = 0;

    while(<OLD>) {		#  read header
	chop;
	unless (/^\#/) { seek(OLD,0,0); last; }
	next if (/^\#\s*$/);
	($par,$val,$val2)  = /^\# ([^:]+): +(\S+) *(.*)$/;
	warn "par=$par    val=$val\n" if $opt_d;
	if ($par eq 'host') { $oldhost = $val; }
	elsif ($par eq 'firstart') { $firstart = $val; }
	elsif ($par eq 'lastart') { $lastart = $val; }
	elsif ($par eq 'timenow') { $oldtimenow = $val; }
	elsif ($par eq 'period') { $oldperiod = $val; }
	elsif ($par eq 'options') { $options = "$val" . " $val2"; }
	elsif ($par eq 'totalarts') { $oldtotalarts = $val; }
	elsif ($par eq 'totalbytes') { $oldtotalbytes = $val; }
	elsif ($par eq 'totaldelay') { $oldtotaldelay = $val; }
	elsif ($par eq 'negdelayart') { $oldnegdelayart = $val; }
	elsif ($par eq 'delay_valid') { $delay_valid = $val; }
    }

    # Only accept  compatible results (same options)
    # Options can only be overridden by making changes in sum_file headers,
    # but take care to interpret the results with caution !
    $options =~ /-A(\d+)/ && ($opt_A = $1);

    # turn of delay stats if delay data unavailable
    if ($delay_valid eq "no") {       
	$avgdelay_valid = "no";
	warn "setting avgdelay_valid to \"$avgdelay_valid\" while reading $FILE\n" 
	    if $opt_d;
    }

    # sum total arts & bytes
    $totalarts += $oldtotalarts;
    $totalbytes += $oldtotalbytes;
    $totaldelay += $oldtotaldelay;
    $negdelayart += $oldnegdelayart;

    if ("$FILE" eq "$SUMDIR/$SUMFILE.yesterday" ) {
        # extract last day's info for monthly result file
    	$lastarts=$oldtotalarts;
	$lastbytes=$oldtotalbytes;
	$lastdelay=$oldtotaldelay;
	$lastnegdelayart=$oldnegdelayart;
	$lastperiod = $oldperiod;
    } elsif ("$FILE" eq "$SUMDIR/$MONFILE" ) {
	# do nothing special
    } else  {
	# this is a regular hourly update 
	$lastperiod = $timenow - $oldtimenow;
	$startday = (localtime($firstart))[3];
	$thisweekday = (localtime($timenow))[6];
	$thisday = (localtime($timenow))[3];
	$thishour = (localtime($timenow))[2];
	$thisminute = (localtime($timenow))[1];
    }	
    if ($opt_d) {
	warn "readoldresults: type=$type firstart=$firstart lastart=$lastart\n";
	warn "readoldresults: should be 0: p_arts=$p_arts  p_bytes=$p_bytes\n";
	warn "readoldresults: lastperiod=$lastperiod thisday=$thisday\n";
	warn "readoldresults: thishour=$thishour thisminute=$thisminute\n"; 
	warn "readoldresults: thisweekday=$thisweekday\n"; 
    }

    # cycle resultfile if a new day has begun and $type eq 'perday'
    if  ($type eq 'perday') {  # for monthly results don't run this block
        if (( $opt_c && ($startday ne $thisday) && 
	     ($thishour*60 + $thisminute>$MAXAGE)) ) { 
	    close(OLD);

	    # update $MONFILE after cycling resultfiles
	    $update_monfile = 1;

	    unless (rename("$SUMDIR/$SUMFILE", "$SUMDIR/$SUMFILE.yesterday")) {
		warn "could not rename $SUMFILE to $SUMFILE.yesterday: $!\n";
	    }
	    $firstart=$lastart=$totalarts=$totalbytes=0;
	    $totaldelay=$negdelayart=0;
	    return;
        } 
    }

    while (<OLD>) {		# read summary data
	($par,$rsite,$cnt,$vol,$delay) = split;
	if ($par eq 'pn') { 
	    $arts{$rsite} += $cnt; 
	    $bytes{$rsite} += $vol; 
	    $delay{$rsite} += $delay;
	}
	elsif ($par eq 'ph') { 
	    $artsph{$rsite} += $cnt; 
	    $bytesph{$rsite} += $vol;
	    $delayph{$rsite} += $delay;
	}
	elsif ($par eq 'pah') { 
	    $artspah{$rsite} += $cnt; 
	    $bytespah{$rsite} += $vol;
	}
    }
    close(OLD);
}


# readnewdata
# ----------------------------------------------------------------------
# extract new data from logfile
# logfile format: rsite group size arrivaltime [postingtime]
# ex: news.belnet.be fr.rec.moto 798 878774403 [878771322]
#
sub readnewdata {
    open(NEW,"$rawbatchfile") || 
	die "cannot open $rawbatchfile: $!\n";

    $p_firstart = $p_lastart = 0;  # first & last article in last period
    %p_arts = %p_bytes = ();       # articles & bytes in last period, per rsite
    $lastarts = $lastbytes = 0;    # articles & bytes in last period

    while (<NEW>) {
	chop;
	@newsgroup=();
	$hier='';
	$subhier='';
	$arr = $post = $size = 0; 
	($rsite,$group,$size,$arr,$post) = split(/ /);
	if ($post) {
	    $data_withpost++;              # check if posting time available
	} else {
	    $data_withoutpost++;
	}
        $lastart=$p_lastart=$arr;
        $firstart=$arr unless $firstart;
        $p_firstart=$arr unless $p_firstart;
        $lastperiod='' unless $lastperiod;
	$delay = $post>0 ? $arr - $post : 0 ;
	$orig_delay = $delay;
	if ($delay<0) {
	    $negdelayart++;	
            # and now a very dirty hack to avoid displaying negative delays, 
	    # based on the assumption that most errors are due to incorrectly
	    # configured timezones on posting machines
	    local($hoursadded) = 0;
	    while ($delay < 0) {
		$delay += 3600;
		$hoursadded++;
		if ($hoursadded>12) {
		    warn "extremly wrong posting time detected, delay=$orig_delay,\n";
		    warn "  from $rsite in $group\n";
		    $delay=0;
		    last;
		}
	    }
#	    warn "neg_delay ${orig_delay}s from $rsite in $group: corrected +${hoursadded}h, new delay=${delay}s\n" if $opt_d;
	    warn "neg_delay ${orig_delay}s from $rsite in $group: corrected +${hoursadded}h, new delay=${delay}s\n";
	}
        @newsgroup=split(/\./,$group);
        $hier=$newsgroup[0];

        $bytes{$rsite} += $size;                # total summaries
        $delay{$rsite} += $delay;
        $arts{$rsite}++;
        $bytesph{$hier} += $size;
        $delayph{$hier} += $delay;
        $artsph{$hier}++;
	$totalarts++;                           
	$totalbytes += $size;
	$totaldelay += $delay;                  # delay [s]

	$lastarts++;                            # last period summaries
	$lastbytes += $size;
        $p_bytes{$rsite} += $size;
        $p_arts{$rsite}++;

        if ($opt_A>1 && $hier eq "alt") {	# resolve alt subhierarchies
	    $subhier=join('.',splice(@newsgroup,0,$opt_A)); 
	    $bytespah{$subhier}+=$size;
	    $delaypah{$subhier}+=$delay;
	    $artspah{$subhier}++;
	}
    }
    close(NEW);

    if ($data_withoutpost == 0) {
	$avgdelay_valid = "yes";
	warn "setting avgdelay_valid to \"$avgdelay_valid\" while reading $rawbatchfile\n" if $opt_d;
    }

    if ($opt_d) {
	warn "readnewdata: firstart=$firstart lastart=$lastart\n";
	warn "readnewdata: p_firstart=$p_firstart p_lastart=$p_lastart\n";
	warn "readnewdata: lastarts=$lastarts lastbytes=$lastbytes\n";
	warn "readnewdata: totalarts=$totalarts totalbytes=$totalbytes\n"; 
	warn "readnewdata: totaldelay=$totaldelay avgdelay_valid=$avgdelay_valid\n";
	warn "readnewdata: data_withpost=$data_withpost data_withoutpost=$data_withoutpost\n";
    }
    if ($opt_p) {		             # prepare hourly data for plots 
	&writehourdata;
    }
}


# writehourdata
# ----------------------------------------------------------------------
# write back updated result file
#
sub writehourdata {
    if ($ROUNDTIME == 1) { $thisminute = 10 * int($thisminute/10); }
    $hourfile = sprintf("%s.%01d-%02d%02d",
	        "$HOURDIR/$SUMFILE", $thisweekday, $thishour, $thisminute);

    $start = &MakeTimeStr($p_firstart);
    $end = &MakeTimeStr($p_lastart);

    # write new summary 
    open(RES,">$hourfile") || die "cannot open $hourfile: $!\n";

    # convert $lastperiod (in seconds) to a more readable form
    $_lastperiod = &MakeTimeOnlyStr("$lastperiod") unless $unabletoflush;

    # print header
    print RES "\# file:     $hourfile\n";
    print RES "\# filetype: plotdata\n";
    print RES "\# host:     $fqdn\n";
    print RES "\# timenow:  $timenow\t$timenowstr\n"; # time_t yymmdd.hh:mm
    print RES "\#\n";
    if ($unabletoflush) {
	print RES "\# status:   nodata\n";
	close(RES);
	return;
    }
    print RES "\# lastperiod:  $lastperiod\t$_lastperiod\n"; # sec
    print RES "\# p_firstart:  $p_firstart\t$start\n";  # time_t yymmdd.hh:mm
    print RES "\# p_lastart:   $p_lastart\t$end\n";     # time_t yymmdd.hh:mm
    print RES "\# lastarts:    $lastarts\n";          # articles in last period
    print RES "\# lastbytes:   $lastbytes\n";         # bytes in last period
    print RES "\#\n";

    # print data
    foreach $rsite (keys(%p_arts)) {                     # per node
	print RES "pn\t$rsite\t$p_arts{$rsite}\t$p_bytes{$rsite}\n";
    }
    close(RES);
}


# writeresults
# ----------------------------------------------------------------------
# write back updated result file
#
sub writeresults {
    local($type,$FILE) = @_;
    $start = &MakeTimeStr($firstart);
    $end = &MakeTimeStr($lastart);

    # make backup of regular files
    if (($type eq 'perday' || $type eq 'continous') && -e "$FILE") {	
	system("cp $FILE $FILE.old");
    }
    # write new summary 
    open(RES,">$FILE") || die "cannot open $FILE: $!\n";

    # convert $lastperiod (in seconds) to a more readable form
    $_lastperiod = &MakeTimeOnlyStr("$lastperiod");
    # compute total time period (UNIX time + human readable format)
    $totalperiod = $lastart - $firstart;
    $_totalperiod = &MakeTimeOnlyStr("$totalperiod");

    # get average delay 
    $avgdelay = sprintf("%16.2f", $totaldelay/$totalarts);
    unless($options) {
	$options = "-A $opt_A" if ($opt_A);
    }


    # print header
    print RES "\# file:     $FILE\n";
    print RES "\# filetype: $type\n";
    print RES "\# options:  $options\n";              # options
    print RES "\# host:     $fqdn\n";
    print RES "\# timenow:  $timenow\t$timenowstr\n"; # time_t yymmdd.hh:mm
    print RES "\#\n";
    print RES "\# firstart: $firstart\t$start\n";     # time_t yymmdd.hh:mm
    print RES "\# lastart:  $lastart\t$end\n";        # time_t yymmdd.hh:mm
    print RES "\# period:   $totalperiod\t$_totalperiod\n"; # sec d+hh:mm:ss
    print RES "\# totalarts:   $totalarts\n";     # art sum 
    print RES "\# totalbytes:  $totalbytes\n";    # byte sum
    print RES "\# delay_valid: $avgdelay_valid\n";
    print RES "\# totaldelay:  $totaldelay\n";    # cumulative delay [s]
    print RES "\# avgdelay:    $avgdelay\n";      # avg delay so far
    print RES "\# negdelayart: $negdelayart\n";   # arts with delay <0 
    print RES "\#\n";
    print RES "\# lastperiod:  $lastperiod\t$_lastperiod\n"; # sec
    print RES "\# lastarts:    $lastarts\n";      # articles in last period
    print RES "\# lastbytes:   $lastbytes\n";     # bytes in last period
    if ($type eq 'permonth') {
	print RES "\# lastdelay:   $lastdelay\n"; # delaysum of last day
	print RES "\# lastnegdelayart: $lastnegdelayart\n"; 
    }
    print RES "\#\n";

    # print data
    foreach $rsite (keys(%arts)) {                     # per node
	print RES "pn\t$rsite\t$arts{$rsite}\t$bytes{$rsite}\t$delay{$rsite}\n";
    }
    foreach $hier (keys(%artsph)) {                    # per hierarchy
	print RES "ph\t$hier\t$artsph{$hier}\t$bytesph{$hier}\t$delayph{$hier}\n";
    }
    if ($opt_A>1) {                    # alt.* hierarchy data 
	foreach $subhier (keys(%artspah)) {
	    print RES "pah\t$subhier\t$artspah{$subhier}\t$bytespah{$subhier}\t$delaypah{$subhier}\n";
	}
    }
    close(RES);
}


# MakeDateStr
# ----------------------------------------------------------------------
# make a date string yymmdd corresponding to the actual time.
# optional arg is number of days to subtract from current time
#
sub MakeDateStr {
    local($minusdays) = @_;
    local($tstr);
    local(@tarr) = localtime(time-$minusdays*86400);
    $tstr = sprintf ("%02d%02d%02d", $tarr[5], $tarr[4]+1, $tarr[3]);
}
 

# MakeTimeStr
# ----------------------------------------------------------------------
# make a time string yymmdd.hh:mm from current time (no args given) or 
# from UNIX time (long int) passed as argument
#
sub MakeTimeStr {
    local($arg) = @_;
    local($tstr);
    if ($arg == 0) { $arg = time; }
    local(@tarr) = localtime($arg);
    $tstr = sprintf ("%02d%02d%02d.%02d:%02d", 
		     $tarr[5], $tarr[4]+1, $tarr[3],$tarr[2], $tarr[1]);
}


# MakeTimeOnlyStr
# ----------------------------------------------------------------------
# convert seconds to time string d+hh:mm:ss 
#
sub MakeTimeOnlyStr {
    local($arg) = @_;
    local($tstr);
    return '' if ($arg == 0);
    local(@tarr) = gmtime($arg);
    $tstr = sprintf ("%d+%02d:%02d:%02d", 
		     $tarr[3]-1,$tarr[2], $tarr[1], $tarr[0]);
}

# MakeShortTimeStr
# ----------------------------------------------------------------------
# make a time string yymm from current time (no args given) or 
# from UNIX time (long int) passed as argument
#
sub MakeShortTimeStr {
    local($arg) = @_;
    local($tstr);
    if ($arg == 0) { $arg = time; }
    local(@tarr) = localtime($arg);
    $tstr = sprintf ("%02d%02d",$tarr[5], $tarr[4]+1);
}


# gethostandfqdn
#---------------------------------------------------------------------- 
# construct fully qualified domain name...
sub gethostandfqdn {
    chop($str=`uname -n`);
    if ($str =~ /\./) {             # str is fqdn
	$fqdn = $str;
	($hostname) = ($str =~ /^([^.]+)\./);
    } else {                        # str is simple hostname
	$hostname = $str;
	$str = `$GREP domain /etc/resolv.conf`;
	$str =~ /domain\s*(\S+)$/;
	$fqdn = $hostname . "." . $1;
    }
}


# update_PATH
# -----------------------------------------------------------------------------
# enhance PATH to support scripts run from cron
#
sub update_PATH {
    warn "OLD PATH=$ENV{'PATH'}\n" if $opt_d;
    @ENVPATH = split(/:/,$ENV{'PATH'});
    push(@ENVPATH,@ADDTOPATH);
    
    $ENV{'PATH'}='';
    foreach $p (@ENVPATH) {
	next if $envpathseen{$p}; # skip duplicates
	$envpathseen{$p} = 1;
	$ENV{'PATH'} .= $p . ":";
    }
    chop($ENV{'PATH'});           # cut last ":"
    warn "NEW PATH=$ENV{'PATH'}\n" if $opt_d;
}


# modify_config
# -----------------------------------------------------------------------------
#
sub modify_config {
    local($p,$n);
    $inflowconf = $opt_f ? $opt_f : $INFLOWCONF;
    if ($inflowconf =~ /^(.*)\/([^\/]+)$/) {
	$p = $1; 
	$n = $2;
	push(@INC,$p);
    } else { 
	$n = $inflowconf; 
    }
    warn "p=$p  n=$n  inflowconf=$inflowconf\n" if $opt_d;
    if (-e $inflowconf) {
	warn "loading local configs from $inflowconf...\n" if $opt_d;
	require $n;
	$inflowconfinfo = "$CFRELEASE - $inflowconf";
    } else {
	warn "didn't find any local configs $inflowconf on INC=@INC\n" if $opt_d;
	$inflowconfinfo = "- none -";
    }
}

