#! /bin/bash #+ # NAME: # sync_ips_mirror # PURPOSE: # CALLING SEQUENCE: # sync_daily_mirror [$1 $2] # OPTIONAL INPUTS: # $1 list of email addresses # $2 if equal to '-mirror' then the the data from Nagoya are # mirrored even if there is no email alert. # OUTPUTS: # $? return status # 0: run tomography # 1: no need to run tomography, because # no new email arrived, or # no new daily files downloaded, or # email confirmation failed, or # merging of daily file with yearly files failed # File $TUB/daily_ips.txt stores output from the sorting program 'dailyips'. # CALLS: # sync_ips_email, mirror, daily_ips, run_glevel # RESTRICTIONS: # Needs the Perl script 'mirror'. Currently the version included # with SolarSoft is used. # SIDE EFFECTS: # The content of all new daily IPS files are emailed to everyone # on the $1 list of email addresses. # PROCEDURE: # First the script sync_ips_email is called to check whether a new email # has arrived from Nagoya indicating the availability of new IPS data. # If no new email has arrived, return status=1. # # If a new email has arrived, then the new IPS data are downloaded from # stesun5.stelab.nagoya-u.ac.jp, and are stored in $NAGOYA/ipsrt # If no new data files ar downloaded, return status=1. # # Then the program $EXE/dailyips is called to add the new data to the # yearly data files (stored in $NAGOYA/daily). # If this integration fails, return status=1 # # Then the idl program run_glevel.pro is run to calculate # g-level values. These are added to the yearly files. # MODIFICATION HISTORY: # OCT-2002, Paul Hick (UCSD/CASS) # Split off from sync_daily_ips # OCT-2003, Paul Hick (UCSD/CASS; pphick@ucsd.ed) # Replaced bash script sync_ips_email by Python script # with same name # APR-2011, John Clover (UCSD/CASS) # Updated STELab FTP address #- # === Initialization section ================= # IPS daily files have names VLIST_UCSD_timemarker # Directory where yearly IPS files are located (NOT the same as $local_dir) # Scratch file for storing file names for downloaded daily IPS files # File connected to standard ouput for daily_ips daily_ips=VLIST_UCSD_ yearly_ips=$NAGOYA/daily vlist_got=$TUB/$daily_ips.got vlist_tmp=$TUB/$daily_ips.tmp #output=$TUB/cron_dailyips.txt rm_vlist_got="rm -f $vlist_got" # Run the mirror program. # Check for lines starting with "Got VLIST_UCSD_". # Extract the file name and put in file $vlist_got after prefixing # the name of the local destination directory. local_dir=$NAGOYA/ipsrt/ remote_site=ftp.stelab.nagoya-u.ac.jp remote_dir=/pub/vlist/rt mirror.py job=nagoya_ipsrt local_dir=$local_dir remote_site=$remote_site remote_dir=$remote_dir recursive="false" > $vlist_tmp cat $vlist_tmp | grep "Got $daily_ips" | gawk --assign=here=$local_dir '{print here$2}' > $vlist_got rm -f $vlist_tmp # If $vlist_got is empty, then no new files were downloaded. # Otherwise it contains the names of files that were succesfully downloaded. if [ ! -s $vlist_got ]; then $rm_vlist_got echo $(date), no new files downloaded. exit 1 fi # We now know that new IPS data have been downloaded. echo $(date), new data downloaded. cat $vlist_got # At this point we know that new VLIST_UCSD files have been downloaded. year=`tail -n 1 /home/soft/tub/gvalue_rt1.dat|cut -c10-11` if [ $year -gt 0 ] then year=$(($((year)) + 2000)) else echo "Got bad year: $year, bailing" exit 1 fi # since gvalue.dat is overwritten every download, each velocity should be reintegrated every time... # so kludgey, sorry for file in `ls $local_dir$daily_ips$year*`; do extract_velocity.py $file $yearly_ips/nagoya.$year; done $rm_vlist_got exit 0