#! /usr/bin/python #+ # NAME: # mag_noaa # PURPOSE: # Downloads daily-updated photospheric magnetic maps from NOAA and # run's it through X's HCSS code to make a viable source surface map. # CALLING SEQUENCE: # mag_noaa.py -ftp -sync -gzip -obs=obs # as cron job: # bash --login -c "mag_noaa.py" # OPTIONAL INPUTS: # source of photospheric magnetic field maps # This is either a directory containing wso*.dat maps, # or a text file with a list of fully-qualified names # of wso*.dat files. # destination directory for source surface maps # -ftp synchronizes files on NOAA ftp server with local files. # If -ftp is set then =$DAT/map/wso_noaa and # =$DAT/map/wso_noaa/hcss are used, unless # -source and/or -destination are specified. # -sync only processes photospheric maps that do not have # a matching source surface file in # -gzip gzip the output files in # -obs=obs observatory; must be one of wso, nso, mwo # default is wso (Wilson Solar Observatory) # CALLS: # mirror, xhcs # PROCEDURE: # Currently magnetic source surface map at 15 solar radii are made. # MODIFICATION HISTORY: # APR-2003, Paul Hick (UCSD/CASS) # OCT-2003, Paul Hick (UCSD/CASS) # Added mag_noaa_badfiles to exclude files stored in the text # file 'badfiles_'+obs+'.txt' located in the destination directory. # NOV-2003, Paul Hick (UCSD/CASS) # Added a check to detect when a photospheric map downloaded from # NOAA overwrites an existing file. In this case the source surface # file in the destination directory (produced from the overwritten # file) is deleted to make sure that the new file is processed by # Xuepus program. # OCT-2005, Paul Hick (UCSD/CASS) # Modified default for destination directory to subdirectory # hcss in source directory. # JUN-2006, Paul Hick (UCSD/CASS) # nso_ktpk are now downloaded from remote subdirectory 2006 # FEB-2006, Paul Hick (UCSD/CASS; pphick@ucsd.edu) # nso_ktpk are now downloaded from remote subdirectory 2007 #- import sys, os def mag_noaa_nameonly ( lst ): # Returns name only (no dir, no ext) newlst = [] for f in lst: tmp = os.path.splitext( f ) if tmp[1] == '.gz': # Strip of .gz. extension tmp = tmp[0] else: tmp = f tmp = (os.path.splitext(tmp))[0]# Drop extension ('.dat' or '.fits') newlst.append( (os.path.split(tmp))[1] ) return newlst def mag_noaa_diff ( obs_lst, lst, obs_ref, ref ): # Comparisons are made on file name only after # .dat[.gz] or .fits[.gz] extension is removed. lst_nogz = mag_noaa_nameonly( lst ) ref_nogz = mag_noaa_nameonly( ref ) newlst = [] for i in range(len(lst)): tmp = lst_nogz[i] tmp = obs_ref+tmp[tmp.find(obs_lst)+len(obs_lst):] if ref_nogz.count( tmp ) == 0: newlst.append( lst[i] ) return newlst def mag_noaa_obs ( obs, lst ): # Pick up file names starting with 'obs' newlst = [] for l in lst: if (os.path.split(l))[1].find(obs) == 0: newlst.append(l) return newlst def mag_noaa_badfiles( destin, obs, ref ): badlist = os.path.join(destin,'badfiles_'+obs+'.txt') if os.path.exists( badlist ): lst = (open(badlist, 'r')).read() lst = lst.split() lst_nogz = mag_noaa_nameonly( lst ) ref_nogz = mag_noaa_nameonly( ref ) newlst = [] for i in range(len(ref)): if lst_nogz.count( ref_nogz[i] ) == 0: newlst.append(ref[i]) else: newlst = ref return newlst if __name__ == '__main__': from tiny import argn, start, is_there, hide_env from mirror import mirror import tempfile tempfile.tempdir = os.environ['TUB'] say = argn( sys.argv, 0 ) say = (os.path.splitext( (os.path.split(say))[1] ))[0]+', ' obs = start( '-obs=', sys.argv) if obs == '': obs = 'wso_noaa' local_prefix = obs remote_prefix = obs if obs == 'wso_noaa': # New Fits files from NOAA remote_prefix = 'wso' elif obs == 'wso_arge': # Old .dat files from NOAA remote_prefix = 'wso' local_prefix = 'wso_noaa' # Merge output with wso_noaa elif obs == 'nso_ktpk': remote_prefix = 'svsm_m11lr_B3_cr' elif obs == 'nso_noaa': remote_prefix = 'nso' elif obs == 'mwo_noaa': remote_prefix = 'mwo' ftp = is_there( '-ftp' , sys.argv ) gzip = is_there( '-gzip', sys.argv ) sync = is_there( '-sync', sys.argv ) or ftp source = argn( sys.argv, 1 ) destin = argn( sys.argv, 2 ) if sync: if source == '': source = os.path.join( os.environ['DAT'],'map',obs ) if destin == '': destin = os.path.join( source,'hcss' ) if ftp: # Mirror the data directory remote_dir locally in source # This keeps remote and local directory identical args = dict \ ( [ \ ( 'job' , 'mag_noaa' ) , \ ( 'local_dir' , source ) , \ ( 'remote_site' , '' ) , \ ( 'remote_dir' , '' ) , \ ( 'remote_password' , 'pphick@ucsd.edu' ), \ ( 'recursive' , 'false' ) , \ ( 'get_patt' , '' ) , \ ( 'local_ignore' , 'hcss' ) , \ ] ) if obs == 'wso_noaa': args['remote_site'] = 'helios.sec.noaa.gov' args['remote_dir' ] = os.path.join('/pub','lmayer','WSA','synoptic','WSO','daily') elif obs == 'nso_ktpk': args['remote_site'] = 'solarch.tuc.noao.edu' args['remote_dir' ] = os.path.join('/synoptic','level3','vsm','merged','carr-daily','2007') args['get_patt' ] = '^'+remote_prefix+'.*fts.gz$' new_list = mirror( args ) dst_list = os.listdir( destin ) # Occasionally we download a file that overwrites a previous photospheric map. # We test for this by looking for a source surface file in destin with the same name. # If this happens, delete the source surface file to make sure that the new file # gets processed by Xuepus program new_list = mag_noaa_nameonly( new_list ) # Remove extensions tmp_list = mag_noaa_nameonly( dst_list ) for file in new_list: tmp = local_prefix+file[file.find(remote_prefix)+len(remote_prefix):] if tmp_list.count(tmp) != 0: print 'existing file downloaded: '+file tmp = os.path.join( destin, dst_list[tmp_list.index(tmp)] ) print 'removing source surface file: '+hide_env(tmp) os.remove( tmp ) # Put a copy of the date file in destination #datefile = os.path.join(source, 'datefile_'+obs+'.dat') #print 'copy '+datefile+' --> '+destin #os.spawnlp( os.P_WAIT, 'cp', 'cp', datefile, destin ) elif source == '': print say+'no source file or directory specified' sys.exit() elif destin == '': print say+'no destination directory specified' sys.exit() if os.path.isfile( source ): # Read list of daily files from file in first cmd line argument new_list = (open(source,'r' )).read().split() elif os.path.isdir( source ): new_list = os.listdir( source ) for i in range(len(new_list)): # Fully-qualified file names new_list[i] = os.path.join( source, new_list[i] ) else: print say+'invalid source specified, '+source if obs == 'mwo': print say+'not yet implemented: '+obs sys.exit() # Pick up all photospheric maps that don't have a matching map # in the source surface directory. Only keep obs+'*' files. if sync: new_list = mag_noaa_diff( remote_prefix, mag_noaa_obs(remote_prefix, new_list ) , local_prefix , mag_noaa_obs(local_prefix , os.listdir(destin) ) ) new_list = mag_noaa_badfiles( destin, obs, new_list ) if len(new_list) == 0: print say+'no new photospheric maps' sys.exit() new_list.sort() main_pro = tempfile.mktemp('.pro') tmp_list = tempfile.mktemp('.txt') # Write main IDL program that runs xhcs.pro. Arguments are the destination # directory for the source surface files and the file containing the # fully-qualified names of photospheric maps. lines = ['xhcs, "'+obs+'", destination="'+destin+'", list="'+tmp_list+'", remote_prefix="'+remote_prefix+'", local_prefix="'+local_prefix+'", gzip=%d'%gzip, 'tmp = do_file(/delete, "'+tmp_list+'", /silent)', 'tmp = do_file(/delete, "'+main_pro+'", /silent)', 'exit' ] # Write main program (open(main_pro,'w')).write( '\n'.join( lines )+'\n' ) # Write the input file with a list of photospheric maps new_list.reverse() (open(tmp_list,'w')).write( '\n'.join( new_list )+'\n' ) # Process the newly arrived files # Note that FTS reader is needed (readfits.pro) os.spawnlp( os.P_WAIT, 'idl', 'idl', '-quiet', main_pro ) #if obs == 'wso': # os.spawnlp( os.P_WAIT, 'idl', 'idl', '-quiet', main_pro ) #else: # os.spawnlp( os.P_WAIT, 'csh', 'csh', '-c', 'sswidl '+main_pro ) sys.exit()