Volcanic Aerosols from CMIP7 recommendationsΒΆ
Volcanic aerosol boundary conditions for 20CRv3 are specified as a set of self-describing text files.
1800-1809 MONTHLY VOLCANIC FORCING IN 45-DEGREE-LAT BANDS FROM N TO S
43 541 797 890 880 823 741 654 570 493 423 364
31 218 292 306 291 263 234 205 178 155 135 117
29 41 58 72 83 88 89 88 85 80 75 69
39 40 43 50 60 72 81 89 95 99 96 93
313 270 235 208 178 155 136 119 107 98 89 82
102 89 78 69 62 56 51 48 45 42 40 37
63 58 53 50 48 46 44 43 41 39 36 34
89 84 79 74 73 72 70 70 69 69 63 59
75 71 68 68 64 62 59 57 57 56 55 55
35 33 31 38 38 36 36 35 34 33 33 31
31 29 28 36 38 38 37 37 35 34 33 31
54 50 46 45 47 50 52 54 56 58 54 51
53 53 53 55 51 49 46 45 45 45 45 45
31 29 28 27 26 26 26 27 27 27 27 26
29 27 26 26 27 27 28 28 28 28 27 25
48 45 41 39 40 42 43 45 47 49 46 43
44 45 46 49 45 44 42 41 41 42 42 120
26 25 25 24 24 23 24 25 25 26 26 55
24 23 23 23 24 25 26 27 27 26 26 25
41 38 36 34 36 38 40 42 44 47 44 41
222 266 277 271 248 224 199 176 157 140 125 113
90 100 99 93 85 76 70 64 58 53 49 45
28 32 35 37 40 41 41 41 40 39 37 34
39 39 38 39 43 48 51 55 57 60 57 54
101 92 86 82 73 67 61 57 55 54 52 51
42 38 36 33 32 30 30 30 29 29 29 28
32 30 29 28 29 29 30 30 29 29 28 26
51 48 45 43 44 45 46 48 49 51 48 45
49 49 49 51 47 46 43 42 42 43 43 44
28 26 25 25 25 24 25 26 26 26 26 26
25 24 23 23 25 25 26 27 27 27 26 25
42 40 37 35 37 39 41 43 45 47 44 42
43 44 45 48 44 43 41 40 41 42 42 43
26 25 24 24 24 23 24 25 25 25 26 25
23 22 22 22 24 25 26 26 26 26 26 24
39 37 35 33 35 37 39 42 44 46 43 41
72 241 513 846 1201 1559 1898 2156 2381 2571 2724 2844
372 1045 1574 1986 2300 2531 2692 2731 2737 2717 2678 2622
300 849 1301 1671 1968 2200 2374 2440 2474 2484 2471 2441
62 209 445 732 1043 1354 1647 1872 2065 2228 2354 2453
----- ----- ----- ----- -----
NOTE: data in (12 months) by (4 lat-zones) by (10 years)
to get actual values of optical depth use a factor of 1.e-4
To make them we need a source of volcanic aerosol data. We use the CMIP7 recommendations from Input4MIPS.
Script to download the extinctions file
#!/bin/bash
##############################################################################
# ESGF wget download script
#
# Template version: 0.4
# Generated by nimbus.llnl.gov - 2025/09/29 09:29:09
# Search URL: http://nimbus.llnl.gov/wget
# Request method: POST
#
###############################################################################
# first be sure it's bash... anything out of bash or sh will break
# and the test will assure we are not using sh instead of bash
if [ $BASH ] && [ `basename $BASH` != bash ]; then
echo "######## This is a bash script! ##############"
echo "Change the execution bit 'chmod u+x $0' or start with 'bash $0' instead of sh."
echo "Trying to recover automatically..."
sleep 1
/bin/bash $0 $@
exit $?
fi
version=0.4
CACHE_FILE=.$(basename $0).status
search_url='http://nimbus.llnl.gov/wget'
request_method='POST'
url_params=(
'input4MIPs.CMIP7.CMIP.uoexeter.UOEXETER-CMIP-2-2-1.atmos.mon.ext.gnz.v20250521|esgf-node.ornl.gov'
)
#These are the embedded files to be downloaded
download_files="$(cat <<EOF--dataset.file.url.chksum_type.chksum
'ext_input4MIPs_aerosolProperties_CMIP_UOEXETER-CMIP-2-2-1_gnz_175001-202312.nc' 'https://esgf-node.ornl.gov/thredds/fileServer/user_pub_work/input4MIPs/CMIP7/CMIP/uoexeter/UOEXETER-CMIP-2-2-1/atmos/mon/ext/gnz/v20250521/ext_input4MIPs_aerosolProperties_CMIP_UOEXETER-CMIP-2-2-1_gnz_175001-202312.nc' 'SHA256' '2a3980ff420487f94816f53f3fa2b6518bd28104d30ad66d55dd5f24baabc422'
EOF--dataset.file.url.chksum_type.chksum
)"
check_os() {
local os_name=$(uname | awk '{print $1}')
case ${os_name} in
Linux)
((debug)) && echo "Linux operating system detected"
LINUX=1
MACOSX=0
;;
Darwin)
((debug)) && echo "Mac OS X operating system detected"
LINUX=0
MACOSX=1
;;
*)
echo "Unrecognized OS [${os_name}]"
return 1
;;
esac
return 0
}
#taken from http://stackoverflow.com/a/4025065/1182464
vercomp () {
if [[ $1 == $2 ]]
then
return 0
fi
local IFS=.
local i ver1=($1) ver2=($2)
# fill empty fields in ver1 with zeros
for ((i=${#ver1[@]}; i<${#ver2[@]}; i++))
do
ver1[i]=0
done
for ((i=0; i<${#ver1[@]}; i++))
do
if [[ -z ${ver2[i]} ]]
then
# fill empty fields in ver2 with zeros
ver2[i]=0
fi
if ((10#${ver1[i]} > 10#${ver2[i]}))
then
return 1
fi
if ((10#${ver1[i]} < 10#${ver2[i]}))
then
return 2
fi
done
return 0
}
check_commands() {
#check wget
local MIN_WGET_VERSION=1.10
vercomp $(wget -V | sed -n 's/^.* \([1-9]\.[0-9.]*\) .*$/\1/p') $MIN_WGET_VERSION
case $? in
2) #lower
wget -V
echo
echo "** ERROR: wget version is too old. Use version $MIN_WGET_VERSION or greater. **" >&2
exit 1
esac
}
usage() {
echo "Usage: $(basename $0) [flags]"
echo "Flags is one of:"
sed -n '/^while getopts/,/^done/ s/^\([^)]*\)[^#]*#\(.*$\)/\1 \2/p' $0
echo
echo "This command stores the states of the downloads in .$0.status"
}
#defaults
debug=0
clean_work=1
#parse flags
while getopts 'F:w:iuUnSpdvqh' OPT; do
case $OPT in
F) input_file="$OPTARG";; #<file> : read input from file instead of the embedded one (use - to read from stdin)
w) output="$OPTARG";; #<file> : Write embedded files into a file and exit
i) insecure=1;; # : set insecure mode, i.e. don't check server certificate
u) update=1;; # : Issue the search again and see if something has changed.
U) update_files=1;; # : Update files from server overwriting local ones (detect with -u)
n) dry_run=1;; # : Don't download any files, just report.
S) skip_checksum=1;; # : Skip file checksum
p) clean_work=0;; # : preserve data that failed checksum
d) verbose=1;debug=1;; # : display debug information
v) verbose=1;; # : be more verbose
q) quiet=1;; # : be less verbose
h) usage && exit 0;; # : displays this help
\?) echo "Unknown option '$OPTARG'" >&2 && usage && exit 1;;
\:) echo "Missing parameter for flag '$OPTARG'" >&2 && usage && exit 1;;
esac
done
shift $(($OPTIND - 1))
#setup input as desired by the user
if [[ "$input_file" ]]; then
if [[ "$input_file" == '-' ]]; then
download_files="$(cat)" #read from STDIN
exec 0</dev/tty #reopen STDIN as cat closed it
else
download_files="$(cat $input_file)" #read from file
fi
fi
#if -w (output) was selected write file and finish:
if [[ "$output" ]]; then
#check the file
if [[ -f "$output" ]]; then
read -p "Overwrite existing file $output? (y/N) " answ
case $answ in y|Y|yes|Yes);; *) echo "Aborting then..."; exit 0;; esac
fi
echo "$download_files">$output
exit
fi
#assure we have everything we need
check_commands
if ((update)); then
echo "Checking the server for changes..."
post_data=$(IFS="&" ; echo "${url_params[*]}")
new_wget="$(wget --post-data "$post_data" "$search_url" -qO -)"
compare_cmd="grep -vE '^(# Generated by|# Search URL|search_url=)'"
if diff -q <(eval $compare_cmd<<<"$new_wget") <(eval $compare_cmd $0) >/dev/null; then
echo "No changes detected."
else
echo "Wget was changed. Dowloading. (old renamed to $0.old.#N)"
counter=0
while [[ -f $0.old.$counter ]]; do ((counter++)); done
mv $0 $0.old.$counter
echo "$new_wget" > $0
fi
exit 0
fi
check_chksum() {
local file="$1"
local chk_type=$2
local chk_value=$3
local local_chksum=Unknown
case $chk_type in
md5) local_chksum=$(md5sum_ $file | cut -f1 -d" ");;
sha256) local_chksum=$(sha256sum_ $file|awk '{print $1}'|cut -d ' ' -f1);;
*) echo "Can't verify checksum." && return 0;;
esac
#verify
((debug)) && echo "local:$local_chksum vs remote:$chk_value" >&2
echo $local_chksum
}
#Our own md5sum function call that takes into account machines that don't have md5sum but do have md5 (i.e. mac os x)
md5sum_() {
hash -r
if type md5sum >& /dev/null; then
echo $(md5sum $@)
else
echo $(md5 $@ | sed -n 's/MD5[ ]*\(.*\)[^=]*=[ ]*\(.*$\)/\2 \1/p')
fi
}
#Our own sha256sum function call that takes into account machines that don't have sha256sum but do have sha2 (i.e. mac os x)
sha256sum_() {
hash -r
if type sha256sum >& /dev/null; then
echo $(sha256sum $@)
elif type shasum >& /dev/null; then
echo $(shasum -a 256 $@)
else
echo $(sha2 -q -256 $@)
fi
}
get_mod_time_() {
if ((MACOSX)); then
#on a mac modtime is stat -f %m <file>
echo "$(stat -f %m $@)"
else
#on linux (cygwin) modtime is stat -c %Y <file>
echo "$(stat -c %Y $@)"
fi
return 0;
}
remove_from_cache() {
local entry="$1"
local tmp_file="$(grep -ve "^$entry" "$CACHE_FILE")"
echo "$tmp_file" > "$CACHE_FILE"
unset cached
}
download() {
wget="wget ${insecure:+--no-check-certificate} ${quiet:+-q} ${quiet:--v}"
while read line
do
# read csv here document into proper variables
eval $(awk -F "' '" '{$0=substr($0,2,length($0)-2); $3=tolower($3); print "file=\""$1"\";url=\""$2"\";chksum_type=\""$3"\";chksum=\""$4"\""}' <(echo $line) )
#Process the file
echo -n "$file ..."
#get the cached entry if any.
cached="$(grep -e "^$file" "$CACHE_FILE")"
#if we have the cache entry but no file, clean it.
if [[ ! -f $file && "$cached" ]]; then
#the file was removed, clean the cache
remove_from_cache "$file"
unset cached
fi
#check it wasn't modified
if [[ -n "$cached" && "$(get_mod_time_ $file)" == $(echo "$cached" | cut -d ' ' -f2) ]]; then
if [[ "$chksum" == "$(echo "$cached" | cut -d ' ' -f3)" ]]; then
echo "Already downloaded and verified"
continue
elif ((update_files)); then
#user want's to overwrite newer files
rm $file
remove_from_cache "$file"
unset cached
else
#file on server is different from what we have.
echo "WARNING: The remote file was changed (probably a new version is available). Use -U to Update/overwrite"
continue
fi
fi
unset chksum_err_value chksum_err_count
while : ; do
# (if we had the file size, we could check before trying to complete)
echo "Downloading"
[[ ! -d "$(dirname "$file")" ]] && mkdir -p "$(dirname "$file")"
if ((dry_run)); then
#all important info was already displayed, if in dry_run mode just abort
#No status will be stored
break
else
$wget -O "$file" $url || { failed=1; break; }
fi
#check if file is there
if [[ -f $file ]]; then
((debug)) && echo file found
if ((skip_checksum)); then
echo "Skipping check of file checksum"
break
fi
if [[ ! "$chksum" ]]; then
echo "Checksum not provided, can't verify file integrity"
break
fi
result_chksum=$(check_chksum "$file" $chksum_type $chksum)
if [[ "$result_chksum" != "$chksum" ]]; then
echo " $chksum_type failed!"
if ((clean_work)); then
if !((chksum_err_count)); then
chksum_err_value=$result_chksum
chksum_err_count=2
elif ((checksum_err_count--)); then
if [[ "$result_chksum" != "$chksum_err_value" ]]; then
#this is a real transmission problem
chksum_err_value=$result_chksum
chksum_err_count=2
fi
else
#ok if here we keep getting the same "different" checksum
echo "The file returns always a different checksum!"
echo "Contact the data owner to verify what is happening."
echo
sleep 1
break
fi
rm $file
#try again
echo -n " re-trying..."
continue
else
echo " don't use -p or remove manually."
fi
else
echo " $chksum_type ok. done!"
echo "$file" $(get_mod_time_ "$file") $chksum >> $CACHE_FILE
fi
fi
#done!
break
done
if ((failed)); then
echo "download failed"
unset failed
fi
done <<<"$download_files"
}
dedup_cache_() {
local file=${1:-${CACHE_FILE}}
((debug)) && echo "dedup'ing cache ${file} ..."
local tmp=$(LC_ALL='C' sort -r -k1,2 $file | awk '!($1 in a) {a[$1];print $0}' | sort -k2,2)
((DEBUG)) && echo "$tmp"
echo "$tmp" > $file
((debug)) && echo "(cache dedup'ed)"
}
#do we have old results? Create the file if not
[ ! -f $CACHE_FILE ] && echo "#filename mtime checksum" > $CACHE_FILE && chmod 666 $CACHE_FILE
#
# MAIN
#
echo "Running $(basename $0) version: $version"
((verbose)) && echo "we use other tools in here, don't try to user their proposed 'options' directly"
echo "Use $(basename $0) -h for help."$'\n'
cat <<'EOF-MESSAGE'
Script created for 1 file(s)
(The count won't match if you manually edit this file!)
EOF-MESSAGE
sleep 1
check_os
download
dedup_cache_
echo "done"
To convert the data to the 20CRv3 format we use the script below:
#!/usr/bin/env python
# Read in the CMIP7 volcanic aerosol extinctions and convert to 20CR forcing
import os
import iris
import numpy as np
import argparse
# Get the start year as an argument
parser = argparse.ArgumentParser()
parser.add_argument(
"--start_year",
type=int,
default=1800,
help="Start year for the 10-year file (default 1800)",
)
args = parser.parse_args()
input_file = (
"%s/20CR_volcanic_bc/ext_input4MIPs_aerosolProperties_CMIP_UOEXETER-CMIP-2-2-1_gnz_175001-202312.nc"
% os.getenv("SCRATCH")
)
# Load the extinctions for a single month
def load_month(year, month):
time_cons = iris.Constraint(
time=lambda cell: cell.point.year == year and cell.point.month == month
)
name_and_time_cons = iris.Constraint(name="ext") & time_cons
ext = iris.load_cube(input_file, name_and_time_cons)
# Mask out the missing data
ext.data = np.ma.masked_invalid(ext.data)
return ext
# Remove wavelength dependence by extracting value at 550nm
def extract_550nm(cube):
wl_cons = iris.Constraint(
radiation_wavelength=lambda cell: cell == 550.0 / 1000000000
)
ext_550nm = cube.extract(wl_cons)
return ext_550nm
# Integrate over height to get total column extinction
def integrate_height(cube):
# get the height coordinate object
height_coord = cube.coord("height_above_mean_sea_level")
# ensure there are bounds so we can compute layer thicknesses
if height_coord.bounds is None:
height_coord.guess_bounds()
# compute thickness of each layer from bounds (same units as coord)
bounds = height_coord.bounds
thickness = bounds[:, 1] - bounds[:, 0]
# collapse (sum) over the height coordinate using thickness as weights
ext_column = cube.collapsed(height_coord, iris.analysis.SUM, weights=thickness)
return ext_column
# Area weighted average over latitude range
def area_weighted_lat_avg(cube, lat_min, lat_max):
lat_cons = iris.Constraint(latitude=lambda cell: lat_min <= cell <= lat_max)
cube_lat = cube.extract(lat_cons)
# Area weights are cos(latitude) in radians
latitudes = cube_lat.coord("latitude").points
weights = np.cos(np.deg2rad(latitudes))
# Normalize weights to sum to 1
weights /= weights.sum()
# Collapse over latitude using area weights
cube_lat_avg = cube_lat.collapsed("latitude", iris.analysis.MEAN, weights=weights)
return cube_lat_avg
# Get the forcings 20CR needs for a single month
def get_20cr_forcing(year, month):
ext = load_month(year, month)
ext = extract_550nm(ext)
ext = integrate_height(ext)
ext1 = area_weighted_lat_avg(ext, -90, -45).data.item()
ext2 = area_weighted_lat_avg(ext, -45, 0).data.item()
ext3 = area_weighted_lat_avg(ext, 0, 45).data.item()
ext4 = area_weighted_lat_avg(ext, 45, 90).data.item()
return [ext1, ext2, ext3, ext4]
# Get a decade of data
def get_decade_forcing(start_year):
decade_ext = []
for year in range(start_year, start_year + 10):
for month in range(1, 13):
try:
ext = get_20cr_forcing(year, month)
except Exception as e:
ext = [0.0, 0.0, 0.0, 0.0]
decade_ext.append(ext)
return np.array(decade_ext)
# Print out the 20CR forcing file for the decade
ext = get_decade_forcing(args.start_year)
fn = "CMIP7_volcanic_forcing_20CR_%04d-%04d.txt" % (
args.start_year,
args.start_year + 9,
)
with open(fn, "w") as f:
f.write(
"%04d-%04d MONTHLY VOLCANIC FORCING IN 45-DEGREE-LAT BANDS FROM N TO S\n"
% (args.start_year, args.start_year + 9)
)
for yri in range(10):
for lati in range(4):
for mon in range(12):
f.write(" %5d" % int(ext[yri * 12 + mon, 3 - lati] * 1.0e4))
f.write("\n")
f.write("\n")
f.write(" ----- ----- ----- ----- -----\n")
f.write("NOTE: data in (12 months) by (4 lat-zones) by (10 years)\n")
f.write(" to get actual values of optical depth use a factor of 1.e-4\n")