#!/bin/bash
set -ex
#   This script is the main conversion step of the nightly build ci pipeline
#   It does the following:
# 
#   1. cleans and sets up a new output directory, in addition to setting 
#       up a directory to contain the logs/variable tables for this build.
#   2. configures a pycmor working directory, and sets up the environment.
#   3. updates the variable tables to match the current version of the 
#       CCCma-CMOR table spreadsheet, and store them in the build directory.
#   4. checks if any new variables have been moved to the FINAL directory, 
#       and setting there status to 'frozen' in the stored 'var_hashes.json' 
#       file.
#   5. launches the conversion for the given CONVERT_LST, which is set at the 
#       at the trigger step, in 'trigger-pycmor-nightly-build'.
#   6. commits/pushes any changes to the frozen statuses in the stored 
#       'var_hashes.json' file.
#
#   Note: Environment variables are inherited when nb_config.sh is sourced
#   
#   Clint Seinen 2019-07-08

#=========================================
# Confirm that a conversion list was given
#=========================================
if [ -z "$CONVERT_LST" ]; then
    echo "NO CONVERSION LIST GIVEN!"
    echo "A CONVERSION LIST IS REQUIRED!"
    exit 1
fi

#=====================
# configure repository
#=====================
git config --local user.name "Nightly Build Runner"

# output commit information to be reviewed in the stdout
git log -n 1 --pretty=short
git status

#===========================
# Set up storage directories
#===========================
# Make sure storage directory exists
[ -d "$NB_STRG_DIR" ] || mkdir $NB_STRG_DIR

# Clean output directory and make build directory
[ -d "${NB_STRG_DIR}/CMIP6" ] && rm -rf ${NB_STRG_DIR}/CMIP6
[ -d "$BLD_DIR" ] && rm -rf $BLD_DIR
mkdir $BLD_DIR

#=====================================
# set up working directory/environment
#=====================================
pycmor/config-pycmor
cd netcdfconv
source path_export.sh
cp -f ${NCCONV_DIR}/${NB_SCRPT_DIR}/callcp.sh .
cp -f ${NCCONV_DIR}/${NB_SCRPT_DIR}/query_esgf .

# update variable tables and store them
cd $NCCONV_DIR/tables
python get_ggle_ss.py
cp -rf variable_tables ${BLD_DIR}/
cd -

# update published list to reflect what variables are on the ESGF server
HASH_FILE=${NCCONV_DIR}/${NB_SCRPT_DIR}/var_hashes.json
updated_vars=$(query_esgf -q -u -f $HASH_FILE)

#=======================
# submit conversion jobs
#=======================
# set some pertinent pycmor options
TABGRP_OPTS1="--check_deltas --no_exclusions -L"
TABGRP_OPTS2="$TABGRP_OPTS1 --update_hashes"
for conversion in $CONVERT_LST; do
    # parse conversion specs
    vars=(`echo $conversion | tr ":" " "`)
    runid=${vars[0]}
    chunk=${vars[1]}  
    user=${vars[2]} 
    otp_f=${runid}_${chunk}_${user}_stdout_stderr
    jobname=${runid}_${chunk}_${user}_conversion
    
    # submit job
    echo "LAUNCHING CONVERSION ON ${user}'s $runid $chunk DATA!"
    ARGLST="WRK_DIR=`pwd`,runid=$runid,user=$user,chunk=$chunk,TABGRP_OPTS1=$TABGRP_OPTS1,TABGRP_OPTS2=$TABGRP_OPTS2"
    jobid=`jobsub -c $HDNODE --cwd callcp.sh -- -v "$ARGLST" -N $jobname -o $otp_f`
    
    # since we are writing hashes to a single file.. these must be serial. Monitor the job
    until [ "$jobrunning" == "0" ]; do
        sleep 60
        echo "`date`"
        jobrunning=0
        
        # if job is still running, flip switch to 1
        jobchk -c $HDNODE $jobid && jobrunning=1 || :
    done

    # flip switch back to nonzero number for next conversion
    echo "Conversion job ended"
    jobrunning=1
    
    # after the first conversion, update TABGRP_OPTS1 to use 'update_hashes' option
    # so hash file is appended to for all remaning conversions
    TABGRP_OPTS1=$TABGRP_OPTS2
done
#========================================================================
# Store working directory and commit/push any updates to published status
#========================================================================
# store working directory
STRD_WRK_DIR=${BLD_DIR}/work_dir
LNKD_OUTPUT_DIR=`echo ${NB_STRG_DIR##*/}`
mkdir $STRD_WRK_DIR
files=`ls | grep -v "\<input\>" | grep -v "$LNKD_OUTPUT_DIR"`   # ignore input dir and linked output directory
for f in $files; do
    cp -rL $f ${STRD_WRK_DIR}/
done

# commit/push any changes in "frozen" status
if [ ! -z "$updated_vars" ]; then
    git add $HASH_FILE
    git commit -m "Set frozen = True for $updated_vars"
    git push $NCCONV_REPO HEAD:$NB_BRANCH
fi
