Документ взят из кэша поисковой машины. Адрес оригинального документа : http://www.stsci.edu/spst/UnixTransition/doc/check_snally_jobs.py
Дата изменения: Fri Feb 28 14:46:08 2014
Дата индексирования: Sat Mar 1 19:35:42 2014
Кодировка:

Поисковые слова: п п п р п р п р п р п р п р п р п р п р п р п р п р п р п р п р п р п р п р п р п
#
#MODULE check_snally_jobs
#
#***********************************************************************
"""

**PURPOSE** --
Check the detailed status of the snallygaster batch jobs.

**DEVELOPER** --
Alan Patterson
derived from check_batch.py

**MODIFICATION HISTORY** --
Initial implementation app 04/24/03
Mods from python review app 05/06/03
Mod to catch missing job app 05/15/03
Write temporary file to /tmp
directory. dc 9/16/04
Mods for Mac OS X mdr 3/15/05
"""
#***********************************************************************
__version__ = "3/15/05"
import string, spss_sys_util, spst_getopt, batch_util, transfer_util
import time_util, file_util
import os, pickle, sys

SNALLY_LIST = ['chk_snally','fr_snallya','fr_snallyb','to_snallya', \
'to_snallyb']
def run(*args):
"""Check the status of the snallygaster batch jobs and filespace

Usage:
do check_snally_jobs <-all>|<-unix>|<-snally> <-output=filename>

where <-unix> causes checking of the snallygaster related Unix
batch jobs
<-snally> causes checking of the Sogsfiles python job on
snallygaster
<-all> causes checking of the Unix batch jobs and the snallygaster
Sogsfiles job
<-output=filename> directs output to a file rather than the
screen

The output may display:
1. The status of the five Unix snallygaster related batch jobs
indicating whether their status and submission time is
reasonable
2. The elasped time since the last execution of the Sogsfiles
python job on Snallygaster (This should be less than 2 mins)
3. The number of files in several snallygaster transfer
directories. The directories are all those which are input
directories for the five Snallygaster batch jobs

"""
if not args:
# Spew out the usage and quit when no parameters
# are provided.
print run.__doc__
return spss_sys_util.SUCCESS

# convert arguement values to lowercase
low_args = []
for a in args:
low_args.append(string.lower(a))
# Parse the arguments...
optlist = ['all','unix','snally','output=']

options, parms = spst_getopt.spst_getopt(low_args, optlist)

if (options.has_key('-all') or options.has_key('-unix')) and \
not (spss_sys_util.on_sun() or spss_sys_util.on_mac()):
print 'This run of check_snally_jobs must be done'
print 'from a Unix machine'
return not spss_sys_util.SUCCESS

# setup output stream
if options.has_key('-output'):
fout = open(options['-output'],'w')
else:
fout = sys.stdout

# remember current directory
cwd = os.getcwd()

if (options.has_key('-all') or options.has_key('-unix')):
# Get a list of all the pickle files in BATCH_PICKLE_DATA
ppaths = spss_sys_util.get_environ_variable('BATCH_PICKLE_DATA')
for ppath in ppaths:
os.chdir(ppath)
files = spss_sys_util.glob('*_pickled.dat')
for file in files:
# find batch job name - convention is early part of filename
off = string.find(file,'_pickled.dat')
leader = file[:off]
# if batch job not in snally related list - bypass
if leader not in SNALLY_LIST:
continue
# find job status
job = pickle.Unpickler(open(file, 'r')).load()
if job.check():
x = job.status()
else:
x = {'state': 'Not Running'}
if x.has_key('submit_time'):
delta = time_util.spss_time() - x['submit_time']
else:
delta = 0
# check for allowed states and submission times
if x['state'] == 'qw' or \
((x['state'] == 'r' or x['state'] == 't') \
and int(delta) < 180):
fout.write('\nJob %s is running OK\n' % leader)
else:
fout.write('\nJob %s has state %s\n' % (leader, \
x['state']))
fout.write(' and submit time %s in the past\n' % \
delta)

if options.has_key('-all') or options.has_key('-snally'):
# setup ftp connection
host = spss_sys_util.get_environ_variable("SPST_PC_FTP_NODE")[0]
user = spss_sys_util.get_environ_variable("SPST_PC_FTP_USER")[0]
word = string.strip(open(spss_sys_util.resolver('PE_DAT',user+'.dat'),'r').readline())
ftp = transfer_util.spstftp(host,user,word)
# find files waiting for transfer on Snallygaster
tosogs_ascii = ftp.nlst("tosogs/ascii")
tosogs_binary = ftp.nlst("tosogs/binary")
# find file contasining timestamp of last Sogsfiles loop
ftp.cwd("Documents and Settings/planinstnt/My Documents")
tempfile = os.path.join("/tmp", file_util.tempfile())
ftp.get("Sogsfiles_lasttime.txt",tempfile)
ftp.close()
f = open(tempfile,'r')
snallyut = time_util.spss_time(string.strip(f.readline()))
f.close()
os.remove(tempfile)
now = time_util.spss_time()
delta_snally = now - snallyut
fout.write('The last Sogfiles loop on Snallygaster occurred\n')
fout.write('%s ago\n' % delta_snally)

todira = spss_sys_util.get_environ_variable("TOSNALLY_ASCII")[0]
todirb = spss_sys_util.get_environ_variable("TOSNALLY_BINARY")[0]
tosnallya = spss_sys_util.glob(os.path.join(todira,'*.*'))
tosnallyb = spss_sys_util.glob(os.path.join(todirb,'*.*'))
fromdir = spss_sys_util.get_environ_variable("FROMSNALLY")[0]
fromsnally = spss_sys_util.glob(os.path.join(fromdir,'*.*'))
fout.write('Number of files in TOSNALLY_ASCII %i\n' % len(tosnallya))
fout.write('Number of files in TOSNALLY_BINARY %i\n' % len(tosnallyb))
fout.write('Number of files in tosogs/ascii %i\n' % len(tosogs_ascii))
fout.write('Number of files in tosogs/binary %i\n' % len(tosogs_binary))
fout.write('Number of files in FROMSNALLY %i\n' % len(fromsnally))

if fout != sys.stdout:
fout.close()
os.chdir(cwd)
return spss_sys_util.SUCCESS


if __name__ == "__main__":
if len(sys.argv) > 1:
apply(run, tuple(sys.argv[1:]))
else:
run()