"""
##############################################################################################
Project : CRESCENDO/AEROCOM
Filename : organize_yaml.py
Author : Ramiro Checa-Garcia
email : rcheca@lsce.ipsl.fr
Purpose : Reorganize variables for LMDzINCAOR experiment runs.
Revision History ----------------------------------------------------------
Date Author Ref Revision
2018-Apr R.Checa First version
2018-Sep R.Checa Working as main with modules in lib
2018-Oct R.Checa Implemented yaml
2018-Nov R.Checa Final first version based on yaml
TODO LIST:
##############################################################################################
"""
# External modules -----------------------------------------------------------------
import yaml # Manage the settings, new variables and check files
import platform # Just to print info of the computer used on calcul.
import datetime
import xarray as xr
import numpy as np
import glob
import os
from optparse import OptionParser
# Internal modules ----------------------------------------------------------------
from lib.liborganize import (directory_structure,
process_files, myprint,
post_processing)
from lib.tabulate import tabulate
#### Parsing function ------------------------------------------------------------------------
#
[docs]def opt_parser():
parser = OptionParser(usage="usage: %prog [options] filename",
version="%prog 1.0")
parser.add_option("-s", "--settings",
action="store",
dest="myfsettings",
default='settings.ymal',
help="Give the filename with the settings in YMAL format")
parser.add_option("-c", "--config",
action="store",
dest="myfconfig",
default='config.ymal',
help="Give the filename with the configuration in YMAL format")
(options, args) = parser.parse_args()
#print(options, args)
#if len(args) != 1:
# parser.error("wrong number of arguments")
return options, args
if __name__ == '__main__':
#### GENERAL SETTINGS ------------------------------------------------------------------------
#
#import logging
#from logging.config import fileConfig
#fileConfig('logging_config.ini')
#logger = logging.getLogger()
#from netCDF4 import Dataset
options, args = opt_parser()
fname_set = options.myfsettings
fsettings = open(fname_set, 'r')
mysettings = yaml.load(fsettings)
fname_con = options.myfconfig
fconfig = open(fname_set, 'r')
myconfig = yaml.load(fconfig)
work_dir = myconfig['workdir']
list_studies = mysettings['studies'] # As much as possible here the program should be
# functional (same result for same settings)
# The program does a loop on the different studies requested on the settings
for study in list_studies:
datenow = datetime.datetime.now().strftime("%Y%m%d-%H%M%S_") # datetime of calculation
finfo = open(myconfig['info_logs']+datenow+study+'.loginfo', 'w') # we open a loginfo file.
expcase = mysettings[study]['expIPSL']
expID = mysettings[study]['expID']
lyears = [str(y) for y in mysettings[study]['years']]
pfiles = mysettings[study]['process']
post_t = mysettings[study]['accum']
# Show machine info
machine_info = platform.uname()._asdict()
# myprint('\n------------------------------------------------------------', finfo=finfo)
myprint('--- Settings and info from: '+fname_set+ '\n',finfo=finfo)
for key, value in machine_info.items():
myprint(' '+key+':'+value, finfo=finfo)
# Show general study info from settings
myprint('\n Processing ............... to subdir: '+ study+
' ... with experiment name: '+ expID, finfo=finfo)
myprint(' from '+mysettings['modelname']+' experiment ..... '+ expcase, finfo=finfo)
myprint(' for years ................ '+ str(lyears), finfo=finfo)
myprint(' with processing files .... '+ str(pfiles)+ '\n\n', finfo=finfo)
# Checking file directories structure ---------------------------------------
if mysettings['safety']['clean']==True:
# This clean directory outputs
directory_structure(study, mysettings, clean=True, finfo=finfo)
# This create directory outputs if necessary and returns directory structure
dmon, dday, dhrs = directory_structure(study, mysettings, create=True, finfo=finfo)
# Pre-processing step -------------------------------------------------------
for year in lyears:
myprint('\n----- '+year+' ------ day datasets '+study, finfo=finfo)
daily_vars = process_files(mysettings,year, expcase, study, pfiles['day'],finfo=finfo)
myprint('\n----- '+year+' ------ mon datasets '+study, finfo=finfo)
month_vars = process_files(mysettings,year, expcase, study, pfiles['mon'],finfo=finfo)
myprint('\n----- '+year+' ------ hrs datasets '+study, finfo=finfo)
hours_vars = process_files(mysettings,year, expcase, study, pfiles['hrs'],finfo=finfo)
# Post-processing + checking values steps -----------------------------------
if mysettings[study]['postprocess']==True:
if len(month_vars)>=1:
test_vars_mon = post_processing(mysettings, dmon, month_vars, lyears, post_t,
study, expID, freq='mon', finfo=finfo)
with open('info_checks/'+datenow+study+'_mon_checks.txt', 'w') as ftable:
ftable.write(tabulate(test_vars_mon, tablefmt='simple'))
if len(daily_vars)>=1:
test_vars_day = post_processing(mysettings, dday, daily_vars, lyears, post_t,
study, expID, freq='day', finfo=finfo)
with open('info_checks/'+datenow+study+'_day_checks.txt', 'w') as ftable:
ftable.write(tabulate(test_vars_day, tablefmt='simple'))
if len(hours_vars)>=1:
test_vars_hrs = post_processing(mysettings, dhrs, hours_vars, lyears, post_t,
study, expID, freq='hr' , finfo=finfo)
with open('info_checks/'+datenow+study+'_hrs_checks.txt', 'w') as ftable:
ftable.write(tabulate(test_vars_hrs, tablefmt='simple'))
finfo.close()