proc_gfs_file.sh 1.31 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53
#!/bin/bash

# Make symlinks for gfs files and purge old ones

set -eu

cd "$1" # switch to gfs forecast directory

dirlist=(gfs*/)

if [ $# -gt 1 ]; then
	remove_days=$2 # Delete forecast folders from longer than this number of days ago
	# Remove folders of forecasts older than cutoff
	cut_date=$(date +%s --date="${remove_days} days ago")
	for this_dir in "${dirlist[@]}"; do
		this_YmdH=$(echo ${this_dir} | cut -d'_' -f 2)
		this_Ymd=${this_YmdH::${#this_YmdH}-2}
		this_H=${this_YmdH:8}

		dir_date=$(date +%s -d "${this_Ymd} ${this_H}00")

		if [ $dir_date -lt $cut_date ]; then
			rm -r $this_dir
		fi
	done
fi

# Symlink forcing to timestamped files in forecast_data folder
# It relies on dirlist automatically sorting by name then reversing for using the latest possible forecasts for each file
cd forecast_data
rm gfs_forecast_* || true

for ((i=${#dirlist[@]}-1; i>=0; i--)); do
	this_dir=${dirlist[i]}

	this_YmdH=$(echo ${this_dir} | cut -d'_' -f 2)
	this_Ymd=${this_YmdH::${#this_YmdH}-2}
	this_H=${this_YmdH:8}

	for file in ../${this_dir}*; do
		[ -e "$file" ] || continue
		f_hr_raw=$(echo ${file} | cut -d'.' -f 7)
		f_hr=${f_hr_raw:1}

		filename=gfs_forecast_$(date +%Y%m%d_%H%M -d "${this_Ymd} ${this_H}00 +$f_hr hour")

		if [ ! -f ${filename} ]; then
			ln -s ../${this_dir}/${file} ${filename}
		fi
	done
done