# Volcan $Date: 2004/02/16 15:14:06 $

my $version = q($Revision: 1.14 $);

##--------------------- SECTION ZERO ----------------------------------------------------
=pod
Set all the defaults for options not specified by user.  These are at the top
so whoever wants to alter them for their convenience can find them quickly.
=cut

my $pathsep = '\\';
my $ini_file = &path_join( 'C:', 'seismo','MSNet','MSnet.ini');

my $wantUnixCompat = 1;
my $ostype = undef;
my $homedir = undef;

if ($wantUnixCompat) {
  # Note to Angel: This code is harmless if $wantUnixCompat is set to 0.
  # It is also harmless on normally configured Windows systems.
  # It will prove valuable to those trying to run on Unix systems.
  my $ost = $ENV{'OSTYPE'};
  if ($ost) {
    if ($ost =~ m/Unix/i || $ost =~ m/Linux/i) {
      $ostype = 'Unix';
      $homedir = $ENV{'HOME'};
      if (-d $homedir) { $ini_file = &path_join($homedir, '.msnetrc'); }
      else { $ini_file = undef; } # No C: on Unix systems.
      $pathsep = '/'; # Don't want no backslashes either.
    }
  }
  else {
    $ost = $ENV{'OS'};
    if ((! $ost) || $ost =~ m/windows/i) {
      $ostype = 'Windows';
      $homedir = $ENV{'HOMEDRIVE'}.$ENV{'HOMEPATH'};
      if (-d $homedir) {
	my $inf = &path_join($homedir, 'MSnet.ini');
	if (-f $inf && -r $inf) { $ini_file = $inf; }
      }
    }
  }
  # CONSIDER: Maybe some Mac users will want to use this?
}
my $verbose = undef;

##--------------------- SECTION ONE ----------------------------------------------------

=pod
In this first section I declare soome variables and use some modules.  I also test to see
if a command line argument was given and if it was not given and I could not find the
default "global" ini file I send a error message to the connsole and exit the program.
=cut

my $usage = "usage: perl MSnetxx.pl path_to_ini/your_ini.your_extension";

use strict;
no strict "vars";
use Seisan::Tools;
use net::ftp;
use DateTime::Precise;
use File::Path;

my $logit_cnt = 0;
my $log1 = 0;
my $log2 = 0;
my $log3 = 0;
my %MTTNE = ();
my $ftptimeout = 120;

if ($ARGV[0]) {   # $ARGV is where cmd line parameters are stored
    $ini_file = shift;
}
# else # Let default stand.

# See if it is a plea for help.  (-? /? -h --? /help or like combinations)
# This precludes using '/help' or '/h' as a .ini file.
if ($ini_file =~ m/^(\/|-|--)(\?|h|help)/i) {
  print STDOUT $usage;
  exit 1;
}

if (! -r $ini_file || ! -f $ini_file) {
    print STDERR <<"_";        # to be printed if .ini not readable as a file

The global parameter .ini file, $ini_file ,
either could not be found or is unreadable.
Please make sure that you either wrote it correctly or that it
is in the directory you specified (or accepted as the default.)

Also, please make sure you have all the right directories made.

Correct $usage

_

exit 1;  # This will put you back on the command prompt; printing the above message
}

##--------------------- SECTION TWO ----------------------------------------------------

=pod
In this little section I call the datetime module to creat a new time object.  This is
an array that has year, mon, day etc.  I then make $today-julian which is the present
epoch time.
=cut

$timearray = DateTime::Precise->new(); #$timearray is the new time object
$today_julian = int($timearray->unix_seconds_since_epoch); # unix epoch time right now

##--------------------- SECTION THREE --------------------------------------------------

=pod
These next few lines will get the network wide parameters,
things that are common to the entry program.  Anything can be placed in the
file I call $ini_file it will all be placed in an array and can be used as
a variable

The subroutine &make_hash makes a hash of the msnet.ini

Key is before the comma and Value is after the comma in the file.
so that $global{LOGNAME} = the name of the file to write logs to.

=cut

# Read the .ini file and convert lines resembling:
#   TAG,some value
# to a hash of values indexed by the TAG's.


%global = &make_hash(&read_file_as_lines($ini_file, 'initialization', 1));

if ($global{VERBOSE} =~ m/YES/i) {
    # will output compiler and runtime errors to the console
    use warnings;
    $verbose = 1;
}
elsif ($global{VERBOSE} =~ m/VERY/i) {
    # will output compiler and runtime errors to the console
    use warnings;
    # and a lot more
    $verbose = 2;
}

if ($verbose == 2) {
  map {&logit($_)} &read_file_as_lines($ini_file, '.ini for logging', 0);
  &logit ("\n");
}
##--------------------- SECTION FOUR ---------------------------------------------------
=pod
In this section as I did above I go get the directory given to in "msnet.ini as
STATION_HOME and make an array of all files which end w/ extension $global{STATION_EXTENSION}
=cut

&make_dirname(\$global{STATION_HOME});

if (! opendir STATDIR, $global{STATION_HOME}) {
    my $problem = "$global{STATION_HOME} is not a readable directory.\n";
    print STDERR $problem, "Please provide it or name one in the .ini file.\n";
    &logit($problem);
    exit 2;
}


@stationarray = grep { m"$global{STATION_EXTENSION}$" } readdir STATDIR;
close STATDIR;

if (@stationarray == 0) {
    my $problem = "No stations to check found in $global{STATION_HOME}\n";
    print STDERR $problem, "Please check your MSnet directories.\n",
    &logit ($problem);
    exit 2;
}
elsif ($verbose) { print STDOUT " Preparing to check ".@stationarray." stations\n"; }

my $log_entry = " MSnet ".$version." started, will check ".@stationarray." stations";
&logit ($log_entry);

# Strip trailing extensions.  (eg. BRU2.stations => BRU2)
map { s/\.[^\\\/\.]*$// } @stationarray;

# TODO fix things so that any l part of the station name of 3 to 5 characters will work
# ie MOR.station, BRU2.station and berge.station should be ok
#  xx.station or xxxxxx.station should not b valid
# it would also be nice to be able to put comments in the .station files anything to eol
# after a "#"
# it would also be nice to be able to use the UNIX standard of TAG = variable spaces optional

##--------------------- SECTION FIVE ----------------------------------------------------

=pod

In the first part of this section I use @stationarray created above it contains the
name of each station. Then I first get the "station" parameters for each one and
build a hash as I did above with allows me to use the variables for each file

Then in the "stationsloop", using each member of the @stationarray, I get the
"station" parameters for each one and build a hash as I did above with allows
me to use the variables for each different station.

I then use Net::FTP to go to each station and fetch the directory listing of events, not the
events themselves.

At the end of SECTION FIVE I have an array called @FTPed_events_file_names which
has all the event file names from all of the stations that responded and had events

=cut

stationsloop: foreach $sa(@stationarray) {

    %station = (); # Ensure no leakage from one station to another.
    # open the ".station" file for each station and read the parameters
    my $sapath = &path_join($global{STATION_HOME}, $sa.$global{STATION_EXTENSION});
    my @slines = &read_file_as_lines($sapath, 'station descriptor', 0);
    if (@slines < 3) {
      my $trouble = "Cannot use station $sapath\n";
      print STDERR $trouble, "Skipping that station.\n";
      &logit($trouble);
      next;
    }
  
    if ($verbose == 2) {
      &logit("\n");
      map {&logit ($_) } @slines;
      &logit("\n");
    }
    
    %station = &make_hash(@slines);
    
    $station{STATION_NAME} = $sa;

    # Build a hash which has the MTTNE for each station, used later.
    $MTTNE{$station{STATION_NAME}} = $station{MTTNE};

##--------------------- SECTION FIVE A ---------------------------------------------------

# This is the guts of SECTION FIVE I use Net::FTP to visit each station.

    $rr = $sa; # needed because the varible $sa is only valid within the loop

    eval {  # this eval wrap (new book pg. 705) is to be able to trap run time errors that would
            # otheiwse be fatal during the FTP functions
	
	if ($verbose > 1) {

        $ftp = Net::FTP->new($station{IP},
                            Timeout => $ftptimeout,
                            Debug => 1);
        }else {

        $ftp = Net::FTP->new($station{IP},
                            Timeout => $ftptimeout,
                            Debug => 0);
        }
	
        if (!$ftp) {
            my $problem = " No initial connection to ".$sa." at IP ".$station{IP};
            &logit ($problem);
            next stationsloop;
        }

        $logon = $ftp->login($station{LOGIN},$station{PASSWORD});
        if (!$logon) {
            my $problem = " Failed initial logon to ".$sa." at IP ".$station{IP};
            &logit ($problem);
            $ftp->quit();
            next stationsloop;
        }
	if ($verbose) {
	  my $notice = " Pulling a directory via FTP from $station{IP} (as $station{LOGIN})";
	  print STDOUT $notice,"\n";
	}
      my $dir = $ftp->cwd($station{REMOTE_WAVE_FORM_DIR});
        if (!$dir) {
	  my $problem = " Did not find ".$station{REMOTE_WAVE_FORM_DIR}." at "
	    .$sa.", FTP aborted";
            &logit ($problem);
            $ftp->quit();
            next stationsloop;
        }
	
        @station_dir = $ftp->dir;

# this bit of code is used in debugging to see what remote directory is being read
	if ($verbose == 2) {
	    my $problem = " Below is the directory of $station{IP}";
	    &logit ("\n",$problem,"\n");
	    map { &logit ($_); } @station_dir;
	}
	
        if (@station_dir == 0) {
            my $problem
	      = " Connected but failed to get directory from ".$sa." at ".$station{IP};
            &logit ($problem);
            $ftp->quit();
            next stationsloop;
        }

        $temp1 = grep m/-\d{4}-\d{2}S.$sa/,@station_dir;
        if ($temp1 == 0 && $dir == 1) {
            my $problem = " Connected to ".$sa." and got a directory but there were no events";
            &logit ($problem);
        }

        $ftp->quit();

        $temp1 = $dir = undef;

    ##--------------------- SECTION FIVE B ---------------------------------------------------

                    # the next few lines take file names returned by the ftp, now in the
                    # array @temp and adds to the new array only file names that does match
                    # the station we are working with, $sa.  basicly removes any non event file
    }; # end of the Eval
    if ($@) {
        my $notice = " 1st $@ at $rr";
        &logit ($notice);
    }
    
    #print STDOUT " This is station dir ". @station_dir;

    foreach (@station_dir) {
        if ($_ =~ m/-\d{4}-\d{2}S.$sa/) {
            push(@FTPed_events_file_names, $_);
        } 
    }
} # end of station loop


if (@FTPed_events_file_names == 0) { # if array has no elements
    my $notice = " No events were fetched from anywhere, so I quit :( ";
    &logit ($notice);
    exit 0;
}
else {
    my $notice = " I have fetched ".@FTPed_events_file_names." events names for processing.";
    &logit ($notice);
}
                # the next few lines clears any information other than the file name
                # and what we have at this point is an array FTPed_events_file_names
                # that has the name of the files
                # of the station we called by ftp in the foreach loop that starts with
                # foreach $sa(@stationarray)

@FTPed_events_file_names = grep $_ =~ m/-\d{4}-\d{2}S./, @FTPed_events_file_names;

map {
    my $s = "x".(index($_,"S.") - 18);
    $_ = unpack ("$s A31", $_)
} @FTPed_events_file_names;

##--------------------- SECTION SIX ----------------------------------------------------
=pod
In this section I check each filename to see if it already in the REA_BASE
remove it if it is in the REA_BASE

I make an array called @stations_to_get which will be used at the end as the
array of station to revisit and fetch and delete files from

Lastly I make an array called remote_events_to_delete which will be altered
through this program and will be the list of remote files to delete

=cut
if ($global{PROCESSING_DAYS} > 29) {$global{PROCESSING_DAYS} = 30;} #limit to 30 days

@FTPed_events_file_names = grep -e ($global{EVENT_STORE}.$_) ne "1", @FTPed_events_file_names;

map {
  (my $statvar) = $_ =~ m/(\.[a-zA-Z0-9]+)/;
  ($statvar) = ($statvar) =~ m/([a-zA-Z0-9]+)/;
  push(@stations_to_get, $statvar)
} @FTPed_events_file_names;

@stations_to_get = &un_dupe (@stations_to_get);

foreach (@stations_to_get) {
    if (chop == "_") { next };
    
}
=pod
if (@stations_to_get < $global{DECLARE_EVENT}) {
    my $notice = " Not enough station's data retieved, so no point and I quit :( ";
    &logit ($notice);
    exit(1);
}

##--------------------- SECTION SEVEN ---------------------------------------------------

=pod
In this section I take the array @FTPed_events_file_names and create a hash
which has for it's KEY the event file names and for it's value the epoch time
of that event

I also then sort @FTPed_events_file_names by epoch time
=cut

map {$epoch_times_of_FTPed{$_} = Seisan::Tools::sei_epoch($_)} @FTPed_events_file_names;

@remote_events_to_delete = @FTPed_events_file_names;
# net we remove file names older than PROCESSING_DAYS
@FTPed_events_file_names = grep {
    ($today_julian - $epoch_times_of_FTPed{$_})
    < ($global{PROCESSING_DAYS}*86400)} @FTPed_events_file_names;

@FTPed_events_file_names = Seisan::Tools::event_sorter(@FTPed_events_file_names);
                # What we have now is the hash %epoch_times_of_FTPed that has the epoch time
                # of the event where the key is the Seisan event name
                # and the value is the epcoh time of the event

##--------------------- SECTION EIGHT --------------------------------------------------

=pod
At the end of this section what I have is an array of s_files that are younger
than $global{PROCESSING_DAYS}.  I also limit the request to 30 days.

I also have a hash which has the epoch times of the files in the @s_file_array
it is called %s_file_epoch

=cut

# Convert a time object to a directory name per seisan convention
# and root location given in the .ini file.
sub time_to_dirname {
  my $timeobj = shift;
  my $rootdir = $global{REA_BASE};
  my $year = $timeobj->[0];            # from DateTime::Precise
  my $mon = $timeobj->[1];
  &make_dirname(\$rootdir);
  # seisan names have a leading 0 in the month numbers.
  return sprintf("%s/%04d/%02d/", $rootdir, $year, $mon);
}

      # In the next few lines I go get the the "S"file names


sub sift_sfdir {
  my $sfdir = shift;
  my @files = ();
  if (opendir SFILEDIR, $sfdir) {
    @files = grep {$_ =~ /\.S20/} readdir SFILEDIR;
    close SFILEDIR;
  }
  else {
    # This is not necessarily an error.  But it is notable.  The consequence
    # is to return an empty list of matching filenames.
    my $notice = "Seisan file structure not found for $sfdir\n";
    if ($verbose) { print STDOUT $notice; }
    &logit($notice);
  }
  return @files;
}

# Get whatever appears for this month.
@s_file_array = &sift_sfdir(&time_to_dirname($timearray));

if ($timearray->[2] <= $global{PROCESSING_DAYS}) {
    # If the back check is longer we have to get previous month too.
    $timearray->dec_day($global{PROCESSING_DAYS}); # date::precise subroutine to go back days
    push (@s_file_array, &sift_sfdir(&time_to_dirname($timearray)));
}

map {$s_file_epoch{$_} = Seisan::Tools::nor_epoch($_)} @s_file_array;

@s_file_array = grep {
  $s_file_epoch{$_} > $today_julian-((86400)*$global{PROCESSING_DAYS})
} @s_file_array;
                    # What we have now is the %s_file_epoch hash whose key
                    # is the "S" file name and value is the epoch time
                    # I also have s_file_array which is made up of "s" file names
                    # that only go back the right number of days

##--------------------- SECTION ---------------------------------------------------

=pod
        # To resume what we have at this point:

        # all the event file names needed are in array FTPed_events_file_names
        # all the events epoch times are in the has %epoch_times_of_FTPed
        # all the s-file we might add to are in @s_file_array
        # all the epoch times of the members of s_file_array are in %s_file_epoch
        # the time interval is held in $global{PROCESSING_DAYS}
=cut

foreach $e (@FTPed_events_file_names) {
    (my $statvar) = $e =~ m/(\.[a-zA-Z0-9]+)/;  # I should be able to do this and the line below in one step
    ($statvar) = ($statvar) =~ m/([a-zA-Z0-9]+)/; # extracting the station name from the file name
    $min_time_key = $statvar;
    foreach $s (@s_file_array) {
        if (abs($epoch_times_of_FTPed{$e} - $s_file_epoch{$s}) < $global{ARRAY_PROP_TIME}) {
	  
	    # TODO here I need to see if there is an event in the S-file and if there is then
	    # check the time against MTTNE and if not check to see if it is within ARRAY_PROP_TIME
	    
#            if (abs($epoch_times_of_FTPed{$e} - $s_file_epoch{$s}) > $MTTNE{$min_time_key}) { #this is the suspect
                push (@get_these_files, $e);
		my $reabase = $global{REA_BASE};
		&make_dirname(\$reabase);
		my $file_to_open = &path_join($reabase, substr($s,13,4), substr($s,17,2), $s);
                my $add_notice = Seisan::Tools::add_to_s_file($file_to_open, $e);

                if ($add_notice =~ m/added/) {
                    $log3 = $log3 + 1;
                    &logit($add_notice);
                }
                else { # if "attemped" is returned then remove the file from the "get" array.
                    if ( -e &path_join($reabase, unpack("x17 A29", $add_notice))) {
                        pop @get_these_files;
                    }
                    &logit($add_notice);
                }
#            } # this is the end of the suspect
        }
    }
}
$min_time_key = undef;
##--------------------- SECTION ELEVEN ---------------------------------------------------

# Next i need to subtract the get_these_files from FTPed_events_file_names and remove
# events older than $global{FTP_INTERVAL} that were not in s-files

# I need to think about what if many stations were skipped once and
# between the left over stations the can declare an event?

again: foreach $remove(@FTPed_events_file_names) {
    foreach $dontremove(@get_these_files) {
        if ($remove eq $dontremove) {
            next again;
        }
    }
    push (@temp,$remove);
}

@FTPed_events_file_names = @temp;
undef @temp;

if (@FTPed_events_file_names == 0) {
    my $notice = " Done after \"add to s_file\" section. ";
    &logit ($notice);
}

##--------------------- SECTION TWELVE ---------------------------------------------------

# now we have to check all the files in @FTPed_events_file_names to see if they can be seisei

@events_that_are_close = ();
foreach $dd (@FTPed_events_file_names) {
    foreach $ddd (@FTPed_events_file_names) {
        if ($dd ne $ddd) {
            if (abs($epoch_times_of_FTPed{$dd} - $epoch_times_of_FTPed{$ddd}) < $global{ARRAY_PROP_TIME}) {
                push (@events_that_are_close, $dd);
                push (@events_that_are_close, $ddd);
            }
        }
    }
}

@events_that_are_close = &un_dupe (@events_that_are_close);

##--------------------- SECTION THIRTEEN ---------------------------------------------------

            # events_that_are_close now is an array of event names that have been "seisei"
            # and are with ARRAY_PROP_TIME of each other
            # and are free of duplicates

            # in the next few lines I get all the first characters of the
            # extentension and place them in an array, this is done so as to
            # build @stations_to_get so that
            # I only to the stations I need to goto to get events



            # stations_to_get now has a list of the stations with no dupes

            #  What follows makes the "s" files
            #  from chirinet.group
            #  PROCESSING_DAYS,30
            #  ARRAY_PROP_TIME,30 is the seisei time
            #  @same3 = @stations_to_get;

$samecnt = @events_that_are_close;  # stores the number of elements in
                                    # events_that_are_close to $samecnt
$cnt = 0;
my @to_be_joined;

loopxx:
while ($cnt != $samecnt) {

    if (abs($epoch_times_of_FTPed{$events_that_are_close[$cnt]} -
            $epoch_times_of_FTPed{$events_that_are_close[$cnt + 1]}) < $global{ARRAY_PROP_TIME}) {
        push (@to_be_joined,$events_that_are_close[$cnt]);
        push (@to_be_joined,$events_that_are_close[$cnt + 1]);
        ++$cnt;
        next loopxx;
    }

    if (@to_be_joined == 0) {
        ++$cnt;
        next loopxx;
    }

    @to_be_joined = &un_dupe (@to_be_joined);

    if (@to_be_joined < $global{DECLARE_EVENT}) {
        undef @to_be_joined;
        next loopxx;
    }

    ##--------------------- SECTION FOURTEEN ---------------------------------------------------

    ##### here I need to check to_be _joined against station dupes with the
    ##### pre-post (MIN_TIME_TO_NEXT_EVENT or shorter MTTNE) time and then
    ##### add those stations to the "get list"

    foreach $gg(@to_be_joined) {
        (my $statvar) = $gg =~ m/(\.[a-zA-Z0-9]+)/;
	($statvar) = ($statvar) =~ m/([a-zA-Z0-9]+)/;
	$min_time_key = $statvar;
        foreach $ggg(@to_be_joined) {
	    (my $statvar2) = $ggg =~ m/(\.[a-zA-Z0-9]+)/;
	    ($statvar2) = ($statvar2) =~ m/([a-zA-Z0-9]+)/;
	    
            if ($gg ne $ggg) {
                if ($statvar eq $statvar2) {
                    if (abs($epoch_times_of_FTPed{$gg} - $epoch_times_of_FTPed{$ggg})
                        < $MTTNE{$min_time_key}) {
                        $ggg = "";
                    }
                }
            }
        }
    }

    $min_time_key = undef;
    @to_be_joined = grep {$_ != ""} @to_be_joined;

    if (@to_be_joined < $global{DECLARE_EVENT}) {
        undef @to_be_joined;
        goto loopxx;
    }

    map {push(@get_these_files,$_)} @to_be_joined;

                # Now the array to_be_joined has files which are within ARRAY_PROP_TIME
                # of each other
                # foreach @to_be_joined I need to make an "S" file with each
                # memeber in it. the first memeber giving the
                # "S" file it's name.

    ### here I call the Seisan::Tools::make_s_file and this needs to have passed to it
    ### the an array with the seisan files which will be added to the S_file
    ### what we get back is an array with the lines of the s_file which can then be stored
    ### in the right place.

    undef @s_file_lines;
    @s_file_lines = Seisan::Tools::make_s_file(@to_be_joined);
    $sfilename = Seisan::Tools::sei2nor($to_be_joined[0]);

    my $sfpath = $global{REA_BASE};
    &make_dirname(\$sfpath);
    $sfpath = &path_join($sfpath, substr($sfilename,-6,4), substr($sfilename,-2));
    &ensure_isdir($sfpath) || die "Cannot put REA info";
    # TODO: Handle this error better.
    $sfpath = &path_join($sfpath, $sfilename);
    if (! -e $sfpath) {

      if (open(SFILE_PLACE, '>'.$sfpath)) {

        map { print SFILE_PLACE $_,"\n" } @s_file_lines;

        my $notice = " Made new s-file $sfilename ";
        &logit ($notice);
        map {
            $notice = "       With $_ ";
            &logit ($notice);
        } @to_be_joined;

        close SFILE_PLACE;

        ++$log2;
      }
      else {
	my $problem = "Could not create $sfpath\n";
	print STDERR $problem;
	&logit($problem);
      }
    }
    ++$cnt;
    undef @to_be_joined;
}
##--------------------- SECTION FIFTEEN ---------------------------------------------------
sleep $global{TIMEOUT}; # this is needed only on some systems if a station does not reset
            # the FTP sever between when it is called for the direcotry and file fetch
# CONSIDER: Make this an optional parameter in the .station file.  Take the longest
# found, or 0 if none necessary, as the idle time here.

undef @stations_to_get;
map {
  (my $statvar) = $_ =~ m/(\.[a-zA-Z0-9]+)/;
  ($statvar) = ($statvar) =~ m/([a-zA-Z0-9]+)/;
  push(@stations_to_get, $statvar)
} @get_these_files;

@stations_to_get = &un_dupe (@stations_to_get);

stationsloop2: foreach $ggit(@stations_to_get) {
    %station = (); # Ensure no leakage from one station to another.
    # open the ".station" file for each station and read the parameters
    my $sapath = &path_join($global{STATION_HOME}, $ggit.$global{STATION_EXTENSION});
    my @slines = &read_file_as_lines($sapath, 'station descriptor', 0);
    if (@slines < 3) {
      my $trouble = "Cannot use station $sapath\n";
      print STDERR $trouble, "Skipping that station.\n";
      &logit($trouble);
      next;
    }
  
    if ($verbose == 2) {
      &logit("\n");
      map {&logit ($_) } @slines;
      &logit("\n");
    }
    
    %station = &make_hash(@slines);
  
    my $eventroot = $global{EVENT_STORE};
    &make_dirname(\$eventroot);
    &ensure_isdir($eventroot) || die "Cannot use or create $eventroot";
    # CONSIDER: Should we die here?

    eval { #this eval is needed to trap and use the error message from net::ftp
	
        $ftp = Net::FTP->new($station{IP},
                            Timeout => $ftptimeout,
			    #Debut => 1);
                            Debug => (($verbose)? 1 : 0));

   
        if (!$ftp) {
            my $problem = " No second connection to ".$ggit." at IP ".$station{IP};
            &logit ($problem);
            next stationsloop2;
        }
        $logon = $ftp->login($station{LOGIN},$station{PASSWORD});
	if (!$logon) {
	    my $problem = " Failed second logon to ".$ggit." at IP ".$station{IP};
            &logit ($problem);
            $ftp->quit();
            next stationsloop2;
        }
        $cwd = $ftp->cwd($station{REMOTE_WAVE_FORM_DIR});
        if (!$cwd) {
            my $problem = " No \"dir change 2\" at ".$ggit." ".$station{IP}.", FTP aborted";
            &logit ($problem);
            $ftp->quit();
            next stationsloop2;
        }

        $ftp->binary();

        if ($verbose) {
	    $ftp->hash(\*STDOUT);
        }
	
up5lines:
# TODO make a sub of this foreach that gets passed @get_these_files
        foreach $ss(@get_these_files) {
            $rr = $ss;
	    (my $statvar) = $ss =~ m/(\.[a-zA-Z0-9]+)/;
	    ($statvar) = ($statvar) =~ m/([a-zA-Z0-9]+)/;
            if ($ggit eq $statvar) {
	        my $eventstore = "$eventroot/$ss";
                if ( -e $eventstore) {
                    print STDOUT " Skipped $ss\n";
                    next up5lines;
                }

                my $xfr_result = $ftp->get($ss,$eventstore);

                if ($xfr_result) {
		  my $notice = " Transfered ".$ss;
		  &logit ($notice);
                }
		else {
		    if ($verbose) {
			print STDERR " Did not get $ss\n";
		    }
		    push (@do_not_delete_on_remote, $ss);
                    $problem = " Failed during transfer of ".$ss;
                    &logit ($problem);
                    if (-e $eventstore) {
                        unlink($eventstore);
                    }
                }
                ++$log1;
            }
        }

        undef @temp;
        ##--------------------- SECTION ---------------------------------------------------
	#  THIS IS THE ONLY PLACE WHERE REMOTE FILES CAN BE DELETED
	#  IF DELETE_FILES IS EQUAL TO 99 THIS SECTION IS SKIPPED
	
        # this next "if" subtracts the do_not_delete_on_remote array from the remote_events_to_delete array
        if ($station{DELETE_FILES} != 99) {
            if ($station{DELETE_FILES} < 0) {$station{DELETE_FILES} = 0};
            loop: foreach $remove(@remote_events_to_delete) {
                foreach $dontremove(@do_not_delete_on_remote) {
                    if ($remove eq $dontremove) {
                        next loop;
                    }
                }
                push (@temp,$remove);
            }
            @remote_events_to_delete = @temp;
            undef @temp;
            foreach $remove(@remote_events_to_delete) {
                if ($remove =~ m/$ggit/ && (($today_julian - $epoch_times_of_FTPed{$remove}) > (($station{DELETE_FILES})*86400)) ) {
                    $ftp->delete($remove);
                    my $notice = " Deleted ".$remove." for being older than $station{DELETE_FILES} days";
                    &logit ($notice);
                }
            }
        }
        $ftp->quit();
    };  # This is the end of the eval 
    if ($@) {  # This an error from a time out during net::ftp
        my $notice = " 2d $@ at $rr";
        &logit ($notice);
	unlink("$eventroot/$rr");
    }
}

##--------------------- SECTION ---------------------------------------------------

unless (@do_not_delete_on_remote == 0) {
    my $missedfname = &path_join($global{LOG_PATH}, 'missed.files');
    my $fmode = (-e $missedfname)? '>>' : '>';
    if (open(MISSED, $fmode.$missedfname)) {
      map { print MISSED $_."\n"} @do_not_delete_on_remote;
      close(MISSED);
      my $stumble = " Could not fetch ".@do_not_delete_on_remote." needed files. (see $missedfname)\n";
      &logit ($stumble);
    }
    else {
      my $problem = "Cannot write missing file list to $missedfname\n";
      print STDERR $problem;
      &logit($problem);
      print STDERR "The missing files are:\n", join("\n", @do_not_delete_on_remote), "\n";
    }
}

##--------------------- SECTION ---------------------------------------------------

if ($logit_cnt < 3){
    my $notice = " I fetched ".$log1." files, made ".$log2." s_files and added to "
                        .$log3." s_files";
    &logit ($notice);
    $notice = " :) WOW! I did it!, Exited properly with all task done! :)";
    &logit ($notice);
}
else {
    my $notice = " I fetched ".$log1." files, made ".$log2." s_files and added to "
                        .$log3." s_files";
    &logit ($notice);
    $notice = " Exited properly with ".($logit_cnt-3)." extra log entries. :)";
    &logit ($notice);
}

# If get this far without an error exit, count it as success.
exit 0;

##--------------------- SUBROUTINES ---------------------------------------------------
{
  my $logpath;
  my $griped;
  BEGIN { $logpath = undef; $griped = 0; }
  sub logit {
    my $logmsg = shift;
    my $timearray = DateTime::Precise->new; # make a time object each time here
    $timestring = $timearray->asctime; # make the time into a nice string
    if (! defined($logpath)) {
      $logpath = &path_join($global{LOG_PATH}, $global{LOG});
      if (! open(LOGOUT, '>>'.$logpath)) {
	if (! $griped) {
	  print STDERR "Cannot open log file $logpath.  No log will be kept.\n";
	  print STDERR "Please check to make sure your \"log\" file directory is made\n";
	  ++$griped;
	}
	$logpath = undef;
      }
    }

    my $printok = print LOGOUT $timestring,$logmsg,"\n";
    print STDOUT $logmsg,"\n";
    if (! $printok && ! $griped) {
      print STDERR "Cannot append log file $logpath.  Is your disk getting full?\n";
      ++$griped;
    }
    if ($logmsg =~ m/\:\)/ || $logmsg =~ m/\:\(/) {
      print LOGOUT "\n";
    }
    $logit_cnt++;
  }
  END { if (defined($logpath)) { close(LOGOUT); } }
}

sub make_hash {
    my %newhash = ();
    map {
      my ($var, $value) = split (/\W/,$_,2);
      $newhash{$var} = $value;			
      #$newhash{substr($_,0,index($_,","))} = substr($_,-((length($_)-index($_,","))-1));
    } @_;
    return %newhash;
}

sub un_dupe {
    @_ = grep(!$saw{$_}++, @_);
    undef %saw;
    return @_
}

# Eliminate any trailing path separator from what should be a directory name.
# The argument is passed by reference and modified in place.
sub make_dirname {
  my $ref_dirname = shift;
  $$ref_dirname =~ s/[\\\/]$//;
}

sub path_join {
  return join($pathsep, map { &make_dirname(\$_); $_; } @_);
}

# Ensure argument is a directory.  If already is, or can be
# made so, return 1.  Otherwise complain and return undef;
sub ensure_isdir {
  my $dirpath = shift;
  if (! -d $dirpath) {
    if ($verbose) { print STDOUT "Making $dirpath\n"; }
    File::Path::mkpath($dirpath);
    if (! -d $dirpath) {
      my $problem = "Could not create directory $dirpath\n";
      print STDERR $problem;
      &logit($problem);
      return undef;
    }
  }
  return 1;
}

# Read file named by given file_name as a list of lines.
# If cannot, complain with message tailored per given file_role
# and, if given die_on_fail is true, exit with an error code.
# Blank lines are skipped and any trailing whitespace removed.
# Return lines as a list, possibly empty if file not read.
# If file_role is undef, no complaint is emitted or logged.
sub read_file_as_lines {
  my ($file_name, $file_role, $die_on_fail) = @_;
  my @lines = ();
  if ($file_name) {
    if (!open (FIN, $file_name)) {
      if (defined($file_role)) {
	my $ouch =  "Cannot read $file_role: $file_name .\n";
	print STDERR $ouch;
	&logit ($ouch);
      }
      if ($die_on_fail) { exit 2; }
    }
    else {
      while ($_ = <FIN>) {
	chomp;  # no newline
	s/#.*//; # no comments
	s/^\s+//; # no leading white space
	s/\s+$//; # no no trailing white space
	s/\s+//g; # collapse internal spaces
	if (length($_)) { push(@lines, $_); }
      }
      close(FIN);
    }
  }
  return @lines;
}
