import copy
import sys
from datetime import datetime, timedelta
import earthaccess # needed to discover and download TEMPO data
import h5py # needed to read DSCOVR_EPIC_L2_TO3 files
import matplotlib.pyplot as plt # needed to plot the resulting time series
import netCDF4 as nc # needed to read TEMPO data
import numpy as np
from shapely.geometry import Point, Polygon # needed to search a point within a polygon
from scipy.interpolate import griddata # needed to interpolate TEMPO data to the point of interest
TEMPO UV Aerosol Index from L2_O3TOT with EPIC and AERONET
Summary
This notebook illustrates a comparison of TEMPO ultra-violet aerosol index (UVAI) against DSCOVR EPIC UVAI as well as comparison of DSCOVR EPIC AOD against AERONET.
Since AERONET data files cannot be downloaded programmatically, a user is supposed to upload an AERONET AOD level 1.5 or 2.0 data file and provide its name as a constant. Since TEMPO spatial coverage is regional and limited to North America, it is user’s responsibilty to select AERONET station within TEMPO’s field of regard (FOR). If the selected station is outside FOR, no TEMPO time series will be generated.
The user is allowed to choose the time period of interest by providing start and end dates in the form YYYYMMDD. Please be aware, that if the selecte period of interest is outside of available time span of one of the sensors, corresponding time series will not be generated.
TEMPO and DSCOVR data are downloaded on-the-fly with earthaccess library.
TEMPO and DSCOVR L2 AER data are interpolated to the location of the selected AERONET station.
Dataset Information
“DSCOVR_EPIC_L2_AER_03 is the Deep Space Climate Observatory (DSCOVR) Enhanced Polychromatic Imaging Camera (EPIC) Level 2 UV Aerosol Version 3 data product. Observations for this data product are at 340 and 388 nm and are used to derive near UV (ultraviolet) aerosol properties. The EPIC aerosol retrieval algorithm (EPICAERUV) uses a set of aerosol models to account for the presence of carbonaceous aerosols from biomass burning and wildfires (BIO), desert dust (DST), and sulfate-based (SLF) aerosols. These aerosol models are identical to those assumed in the OMI (Ozone Monitoring Instrument) algorithm (Torres et al., 2007; Jethva and Torres, 2011).” (Source)
Total ozone Level 2 files provide ozone information at Tropospheric Emissions: Monitoring of Pollution (TEMPO)’s native spatial resolution, ~10 km^2 at the center of the Field of Regard (FOR), for individual granules. Each granule covers the entire North-South TEMPO FOR but only a portion of the East-West FOR.
Prerequisites
A free(!) account at https://www.earthdata.nasa.gov/ is needed to login and download the appropriate files.
This notebook was last tested using Python 3.10.15, and requires these libraries:
- earthaccess
- [h5py]
- matplotlib
- [netCDF4]
- numpy
- [shapely]
- [scipy]
# 1. Setup
2. Defining functions
2.1 function to read DSCOVR AER data files
function read_epic_l2_AER reads DSCOVR_EPIC_L2_AER product file given by its fname and returns arrays of 2D latitudes, longitudes, UVAI, and AOD along with their fill values, time, and wavelengths at which AOD are retrieved
def get_dataset_array_and_fill_value(file_object: h5py.File, dataset_path: str):
= file_object[dataset_path]
h5_dataset return np.array(h5_dataset[:]), h5_dataset.fillvalue
def read_epic_l2_AER(filename: str):
= "/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/FinalAerosolOpticalDepth"
aod_name = "/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/UVAerosolIndex"
uvai_name = "/HDFEOS/SWATHS/Aerosol NearUV Swath/Geolocation Fields/Latitude"
lat_name = "/HDFEOS/SWATHS/Aerosol NearUV Swath/Geolocation Fields/Longitude"
lon_name = "/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/Wavelength"
wl_name
= {}
arrays = {}
fill_values
with h5py.File(filename, "r") as f:
"aod3D"], fill_values["aod"] = get_dataset_array_and_fill_value(f, aod_name)
arrays["uvai2D"], fill_values["uvai"] = get_dataset_array_and_fill_value(f, uvai_name)
arrays["lat2D"], fill_values["lat"] = get_dataset_array_and_fill_value(f, lat_name)
arrays["lon2D"], fill_values["lon"] = get_dataset_array_and_fill_value(f, lon_name)
arrays["wl"], fill_values["wl"] = get_dataset_array_and_fill_value(f, wl_name)
arrays[
# Get time from the granule's filename.
= datetime.strptime(filename.split("_")[-2], "%Y%m%d%H%M%S")
timestamp
return arrays, fill_values, timestamp
2.2 function to read AERONET data files
Function read_aeronet_mw takes aeronet file name, a set of wavelengths, and start and end dates of the timeframe of interest as arguments and returns a timeseries array of AODs at these wavelengths.
If the wavelength is not in the list of wavelengths of the data file, or if there are no retrieval at it, empty array is returned.
def read_aeronet_mw(
str, wavelengths: list, start_date: datetime.date, end_date: datetime.date
filename:
):= np.array(copy.deepcopy(sorted(wavelengths)))
sorted_wavelengths
with open(filename, "r") as aero_file:
# Loop through and read lines until encountering the
# header line (identified by 'Date') or null.
while True:
= aero_file.readline()
line if not line:
break
if "Date" in line:
= line.split(",")
header break
# Find positions of necessary information in the header.
# Date and time are always 1st and 2nd.
= 0
date_pos = 1
time_pos = [header.index(f"AOD_{wavelength:d}nm") for wavelength in wavelengths]
aod_pos = header.index("AERONET_Site_Name")
name_pos = header.index("Site_Latitude(Degrees)")
lat_pos = header.index("Site_Longitude(Degrees)")
lon_pos
# Read the 1st line of data to get name, lat, lon.
= aero_file.readline().split(",")
values = values[name_pos]
AERONET_name = float(values[lat_pos])
lat = float(values[lon_pos])
lon
# If AOD is valid, then append a new row to the aod and date_time arrays.
= np.empty([0, len(wavelengths)])
aod = np.empty([0, 2])
date_time = values[date_pos]
date_loc if start_date <= datetime.strptime(date_loc, "%d:%m:%Y").date() <= end_date:
= np.array([float(values[position]) for position in aod_pos])
aod_values = np.append(aod, [aod_values], axis=0)
aod = np.append(date_time, [[date_loc, values[time_pos]]], axis=0)
date_time
# Read all other lines of data.
# For each AOD that is valid, append a new row to the aod and date_time arrays.
while True:
= aero_file.readline()
line if not line:
break
= line.split(",")
values
= values[date_pos]
date_loc = datetime.strptime(date_loc, "%d:%m:%Y").date()
date_stamp if date_stamp < start_date or date_stamp > end_date:
continue
= np.array([float(values[position]) for position in aod_pos])
aod_values = np.append(aod, [aod_values], axis=0)
aod = np.append(date_time, [[date_loc, values[time_pos]]], axis=0)
date_time
return AERONET_name, lat, lon, sorted_wavelengths, date_time, aod
2.3 functions to work with TEMPO
2.3.1 function to read UVAI from TEMPO_O3TOT_L2
function read_TEMPO_O3TOT_L2_UVAI reads the following arrays from the TEMPO L2 O3TOT product TEMPO_O3TOT_L2_V03(2):
uv_aerosol_index;
quality_flag;
and returns respective fields along with their fill values and coordinates of the pixels.
If one requested variables cannot be read, all returned variables are zeroed.
def read_TEMPO_O3TOT_L2_UVAI(filename):
"""Read the following product arrays from the TEMPO_O3TOT_L2_V03(2):
- vertical_column
- vertical_column_uncertainty
and returns the respective fields along with coordinates of the pixels.
"""
= "uv_aerosol_index"
var_name = "quality_flag"
var_QF_name
= {}
arrays = {}
fill_values
with nc.Dataset(filename) as ds:
# Open the product group (/product), read the chosen UVAI variable and its quality flag.
= ds.groups["product"] # this opens group product, /product, as prod
prod "uvai"] = prod.variables[var_name][:]
arrays["uvai"] = prod.variables[var_name].getncattr("_FillValue")
fill_values[= prod.variables[var_QF_name]
var_QF "uvai_QF"] = var_QF[:]
arrays[# Note: there is no fill value for the quality flag.
# Once it is available in the next version of the product,
# un-comment the line below and add fv_QF to the return line.
# fv_QF = var_QF.getncattr('_FillValue')
# Open geolocation group (/geolocation), and
# read the latitude and longitude variables into a numpy array.
= ds.groups["geolocation"] # this opens group geolocation, /geolocation, as geo
geo "lat"] = geo.variables["latitude"][
arrays[
:# this reads variable latitude from geo (geolocation group, /geolocation) into a numpy array
] "lon"] = geo.variables["longitude"][
arrays[
:# this reads variable longitude from geo (geolocation group, /geolocation) into a numpy array
] "geo"] = geo.variables["latitude"].getncattr("_FillValue")
fill_values[# Note: it appeared that garbage values of latitudes and longitudes in the L2 files
# are 9.969209968386869E36 while fill value is -1.2676506E30
# (after deeper search it was found that actual value in the file is -1.2676506002282294E30).
# For this reason, fv_geo is set to 9.96921E36 to make the code working.
# Once the problem is resolved and garbage values of latitudes and longitudes
# equal to their fill value, the line below must be removed.
"geo"] = 9.969209968386869e36
fill_values[
"time"] = geo.variables["time"][
arrays[
:# this reads variable longitude from geo (geolocation group, /geolocation) into a numpy array
]
return arrays, fill_values
2.3.2 function creating TEMPO O3 granule polygon
def TEMPO_L2_polygon(lat, lon, fv_geo):
= lon.shape[0]
nx = lon.shape[1]
ny print("granule has %3d scanlines by %4d pixels" % (nx, ny))
= np.empty([0, 2])
dpos
= np.empty([nx, ny], dtype=int) # creating array in x indices
x_ind for ix in range(nx):
= ix # populating array in x indices
x_ind[ix, :] = np.empty([nx, ny], dtype=int)
y_ind for iy in range(ny):
= iy # populating array in x indices
y_ind[:, iy]
= (lon[ix, iy] != fv_geo) & (lat[ix, iy] != fv_geo)
mask if len(lon[mask]) == 0:
print("the granule is empty - no meaningful positions")
return dpos
# right boundary
= min(x_ind[mask].flatten())
r_m = (lon[r_m, :] != fv_geo) & (lat[r_m, :] != fv_geo)
local_mask = np.stack((lon[r_m, local_mask], lat[r_m, local_mask])).T
r_b
# left boundary
= max(x_ind[mask].flatten())
l_m = (lon[l_m, :] != fv_geo) & (lat[l_m, :] != fv_geo)
local_mask = np.stack((lon[l_m, local_mask], lat[l_m, local_mask])).T
l_b
# top and bottom boundaries
= np.empty([0, 2])
t_b = np.empty([0, 2])
b_b for ix in range(r_m + 1, l_m):
= (lon[ix, :] != fv_geo) & (lat[ix, :] != fv_geo)
local_mask = y_ind[ix, local_mask]
local_y_ind = min(local_y_ind)
y_ind_top = max(local_y_ind)
y_ind_bottom = np.append(t_b, [[lon[ix, y_ind_top], lat[ix, y_ind_top]]], axis=0)
t_b = np.append(b_b, [[lon[ix, y_ind_bottom], lat[ix, y_ind_bottom]]], axis=0)
b_b
# combining right, top, left, and bottom boundaries together, going along the combined boundary counterclockwise
= np.append(dpos, r_b[::-1, :], axis=0) # this adds right boundary, counterclockwise
dpos = np.append(dpos, t_b, axis=0) # this adds top boundary, counterclockwise
dpos = np.append(dpos, l_b, axis=0) # this adds left boundary, counterclockwise
dpos = np.append(dpos, b_b[::-1, :], axis=0) # this adds bottom boundary, counterclockwise
dpos
print("polygon shape: ", dpos.shape)
return dpos
Main code begins here
3. Establishing access to EarthData
User needs to create an account at https://www.earthdata.nasa.gov/ Function earthaccess.login prompts for EarthData login and password.
= earthaccess.login(strategy="interactive", persist=True) auth
4. Select timeframe of interest
the timeframe is going to be common for all instruments
For the tutorial demonstration, 20230805
was used for both the start and end date.
print("enter period of interest, start and end dates, in the form YYYYMMDD")
= input("enter start date of interest ")
datestamp_initial = input("enter end date of interest ")
datestamp_final
= datetime.strptime(datestamp_initial + "00:00:00.000000", "%Y%m%d%H:%M:%S.%f")
datetime_initial = datetime_initial.strftime("%Y-%m-%d %H:%M:%S")
date_start
= datetime.strptime(datestamp_final + "23:59:59.999999", "%Y%m%d%H:%M:%S.%f")
datetime_final = datetime_final.strftime("%Y-%m-%d %H:%M:%S")
date_end
print(date_start, date_end)
enter period of interest, start and end dates, in the form YYYYMMDD
enter start date of interest 20230805
enter end date of interest 20230805
2023-08-05 00:00:00 2023-08-05 23:59:59
5. Work with AERONET data
As of now, April 24, 2024, AERONET data cannot be downloaded programmatically. User must upload an AERONET data file using AERONET download tool here https://aeronet.gsfc.nasa.gov/new_web/webtool_aod_v3.html. The first line below is the name of the AERONET file used in this example.
= "20230801_20241231_CCNY.lev15"
aeronet_filename = [500, 340, 380] wl
5.1 read AERONET data
= read_aeronet_mw(
AERONET_name, lat, lon, wln, date_time, aod
aeronet_filename, wl, datetime_initial.date(), datetime_final.date()
)= lat
POI_lat = lon
POI_lon = AERONET_name
POI_name = len(date_time)
num_datetimes
print(f"name {AERONET_name}, latitude {lat:>9.4f}, longitude {lon:>10.4f}")
= len(wln)
nwl for iwl in range(nwl):
print(f"wavelength: {wln[iwl]}, length:{len(aod[aod[:, iwl] > 0, iwl])}")
name CCNY, latitude 40.8213, longitude -73.9490
wavelength: 340, length:22
wavelength: 380, length:22
wavelength: 500, length:22
5.2 create timeseries of AERONET AODs
calculate time difference, dy_loc, in fractional days between AERONET observations and the beginning of the timeframe of interest, write date and time as yyyy, mm, dd, hh, mn, ss, dt_loc along with wavelengths and corresponding AODs to a file
= "AOD_AERONET"
out_Q_AERONET with open(
f"{out_Q_AERONET}_{datestamp_initial}_{datestamp_final}_"
f"{POI_name}_{POI_lat:>08.4f}N_{-POI_lon:>08.4f}W.txt",
"w",
as fout:
)
fout.write(f"timeseries of {out_Q_AERONET} at {POI_name} {POI_lat:>08.4f}N {-POI_lon:>08.4f}W\n"
)"yyyy mm dd hh mn ss time,days wl,nm AOD wl,nm AOD wl,nm AOD\n")
fout.write(
for i, datetime_row in enumerate(date_time):
= datetime.strptime(datetime_row[0] + datetime_row[1], "%d:%m:%Y%H:%M:%S")
dt_AERONET = (dt_AERONET - datetime_initial).total_seconds() / 86400
dt_loc = (
time_str f"{dt_AERONET.year} {dt_AERONET.month:>02} {dt_AERONET.day:>02} "
f"{dt_AERONET.hour:>02} {dt_AERONET.minute:>02} {dt_AERONET.second:>02} "
f"{dt_loc:>9.6f}"
)
fout.write(time_str)for wl, tau in zip(wln, aod[i]):
f" {wl:>5.0f} {max(tau, -1.0):>6.3f}")
fout.write("\n")
fout.write(print(datetime_row, time_str, aod[i])
['05:08:2023' '12:21:54'] 2023 08 05 12 21 54 0.515208 [0.333314 0.494809 0.458947]
['05:08:2023' '12:37:38'] 2023 08 05 12 37 38 0.526134 [0.306128 0.460212 0.425241]
['05:08:2023' '12:39:36'] 2023 08 05 12 39 36 0.527500 [0.311574 0.468725 0.432512]
['05:08:2023' '12:41:35'] 2023 08 05 12 41 35 0.528877 [0.324803 0.484136 0.453612]
['05:08:2023' '12:43:34'] 2023 08 05 12 43 34 0.530255 [0.328589 0.488803 0.454636]
['05:08:2023' '12:45:32'] 2023 08 05 12 45 32 0.531620 [0.335888 0.490196 0.453847]
['05:08:2023' '12:54:52'] 2023 08 05 12 54 52 0.538102 [0.325635 0.478464 0.44467 ]
['05:08:2023' '13:10:01'] 2023 08 05 13 10 01 0.548623 [0.333986 0.494555 0.460215]
['05:08:2023' '13:19:13'] 2023 08 05 13 19 13 0.555012 [0.330567 0.486189 0.452743]
['05:08:2023' '13:35:01'] 2023 08 05 13 35 01 0.565984 [0.276587 0.418951 0.386741]
['05:08:2023' '13:44:19'] 2023 08 05 13 44 19 0.572442 [0.266198 0.404995 0.374061]
['05:08:2023' '13:59:33'] 2023 08 05 13 59 33 0.583021 [0.280744 0.427243 0.394882]
['05:08:2023' '14:09:10'] 2023 08 05 14 09 10 0.589699 [0.262378 0.400944 0.369637]
['05:08:2023' '14:24:59'] 2023 08 05 14 24 59 0.600683 [0.269201 0.402717 0.373348]
['05:08:2023' '14:29:02'] 2023 08 05 14 29 02 0.603495 [0.245482 0.376444 0.346455]
['05:08:2023' '15:32:29'] 2023 08 05 15 32 29 0.647558 [0.208331 0.326602 0.30022 ]
['05:08:2023' '15:42:29'] 2023 08 05 15 42 29 0.654502 [0.23699 0.35099 0.325715]
['05:08:2023' '15:47:29'] 2023 08 05 15 47 29 0.657975 [0.216409 0.328565 0.302978]
['05:08:2023' '16:02:30'] 2023 08 05 16 02 30 0.668403 [0.220808 0.334116 0.308406]
['05:08:2023' '16:04:48'] 2023 08 05 16 04 48 0.670000 [0.221642 0.334563 0.308769]
['05:08:2023' '16:14:27'] 2023 08 05 16 14 27 0.676701 [0.294151 0.405008 0.380736]
['05:08:2023' '16:30:17'] 2023 08 05 16 30 17 0.687697 [0.246413 0.356773 0.32694 ]
6. Work with DSCOVR data
6.1 search for DSCOVR EPIC granules
falling into the timeframe of interest and 1 degree box surrounding the AERONET location
= "DSCOVR_EPIC_L2_AER" # collection name to search for in the EarthData
short_name
= (POI_lon - 0.5, POI_lat - 0.5, POI_lon + 0.5, POI_lat + 0.5)
bbox
= earthaccess.search_data(
POI_results_EPIC =short_name, temporal=(date_start, date_end), bounding_box=bbox
short_name
)
print(
f"total number of DSCOVR EPIC L2_AER granules found for POI {POI_name}\n",
f"within period of interest between {date_start} and {date_end} is {len(POI_results_EPIC)}",
)
total number of DSCOVR EPIC L2_AER granules found for POI CCNY
within period of interest between 2023-08-05 00:00:00 and 2023-08-05 23:59:59 is 11
6.2 ensure all granules have download links
lines below before the call of earthaccess.download() check whether all found granules have download links. granules without links are removed from the list of search results without this step, those granules crash the call of earthaccess.download()
def get_url_value_from_result(earthaccess_result):
"""Return a tuple of the result itself and the data URL, only if the URL field is accessible."""
try:
return earthaccess_result, earthaccess_result["umm"]["RelatedUrls"][0]["URL"]
except Exception:
return None
# Populate the list of results that have links.
= [
good_EPIC_result_links
get_url_value_from_result(result)for result in POI_results_EPIC
if get_url_value_from_result(result) is not None
]
# Show the result links
for r in sorted(good_EPIC_result_links, key=lambda x: x[1]):
print(r[1])
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5
6.3 download DSCOVR EPIC granules using earthaccess library
check whether all downloads were successful and try another time to get the failed granules if the second attempt was not successful, remove failed granules from the list
= earthaccess.download(
downloaded_files
POI_results_EPIC,=".",
local_path )
6.4 read DSCOVR EPIC granules and compile timeseries of AODs and UVAI
# parameter geo_deviation defines maximum difference between lats and lons of EPIC pixels and that of AERONET
= 0.075
geo_deviation = np.array([POI_lon, POI_lat])
POI_coordinate
= "AOD_UVAI_EPIC"
out_Q_EPIC
def get_prod_loc(num_loc: int, lon_loc_array, lat_loc_array, values_loc, fill_value: float):
if num_loc < 1:
= fill_value
prod_loc else:
= np.empty([0, 2])
points = np.empty(0)
ff
for i in range(num_loc):
= np.append(points, [[lon_loc_array[i], lat_loc_array[i]]], axis=0)
points = np.append(ff, values_loc[i])
ff
try:
= griddata(
[prod_loc] ="linear", fill_value=fill_value, rescale=False
points, ff, POI_coordinate, method
)except Exception:
try:
= np.mean(ff)
prod_loc except Exception:
= fill_value
prod_loc
return prod_loc
with open(
f"{out_Q_EPIC}_{datestamp_initial}_{datestamp_final}_{POI_name}_"
f"{POI_lat:>08.4f}N_{-POI_lon:>08.4f}W.txt",
"w",
as fout:
) f"timeseries of {out_Q_EPIC} at {POI_name} {POI_lat:>08.4f}N {-POI_lon:>08.4f}W\n")
fout.write("yyyy mm dd hh mn ss time,days wl,nm AOD wl,nm AOD wl,nm AOD UVAI\n")
fout.write(
for _, granule_link in sorted(good_EPIC_result_links, key=lambda x: x[1]):
= granule_link.split("/")[-1]
EPIC_fname print(EPIC_fname)
try:
= read_epic_l2_AER(EPIC_fname)
dscovr_arrays, dscovr_fill_values, dscovr_timestamp except Exception:
print("Unable to find or read hdf5 input granule file ", EPIC_fname)
continue
= (dscovr_timestamp - datetime_initial).total_seconds() / 86400
dt_loc = (
time_str f"{dscovr_timestamp.year} {dscovr_timestamp.month:>02} {dscovr_timestamp.day:>02} "
f"{dscovr_timestamp.hour:>02} {dscovr_timestamp.minute:>02} {dscovr_timestamp.second:>02} "
f"{dt_loc:>9.6f}"
)print(time_str)
fout.write(time_str)
# Check whether POI is in the granule. If not - move to the next granule.
= (
mask_geo "lat2D"] < POI_lat + geo_deviation)
(dscovr_arrays[& (dscovr_arrays["lat2D"] > POI_lat - geo_deviation)
& (dscovr_arrays["lon2D"] < POI_lon + geo_deviation)
& (dscovr_arrays["lon2D"] > POI_lon - geo_deviation)
)
for iwl in range(len(dscovr_arrays["wl"])):
= mask_geo & (dscovr_arrays["aod3D"][:, :, iwl] > 0.0)
mask = dscovr_arrays["lat2D"][mask]
lat_loc = dscovr_arrays["lon2D"][mask]
lon_loc = dscovr_arrays["aod3D"][mask, iwl]
aod_loc
= get_prod_loc(len(aod_loc), lon_loc, lat_loc, aod_loc, fill_value=-0.999)
prod_loc
f" {dscovr_arrays['wl'][iwl]:>5.0f} {prod_loc:>6.3f}")
fout.write(print(dscovr_arrays["wl"][iwl], prod_loc)
= mask_geo & (dscovr_arrays["uvai2D"] != dscovr_fill_values["uvai"])
mask = dscovr_arrays["lat2D"][mask]
lat_loc = dscovr_arrays["lon2D"][mask]
lon_loc = dscovr_arrays["uvai2D"][mask]
uvai_loc
= get_prod_loc(len(aod_loc), lon_loc, lat_loc, uvai_loc, fill_value=-99.0)
prod_loc
f" {prod_loc:>6.2f}\n")
fout.write(print("UVAI", prod_loc)
DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5
2023 08 05 11 40 28 0.486435
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5
2023 08 05 12 45 55 0.531887
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5
2023 08 05 13 51 23 0.577350
340.0 0.658370852470398
388.0 0.6070993542671204
500.0 0.5208560228347778
UVAI 1.0490291118621826
DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5
2023 08 05 14 56 50 0.622801
340.0 1.0037826597690582
388.0 0.925611823797226
500.0 0.7941211760044098
UVAI 1.269827425479889
DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5
2023 08 05 16 02 17 0.668252
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5
2023 08 05 17 07 45 0.713715
340.0 2.0829079262483576
388.0 1.6970317278376417
500.0 1.0809463897588194
UVAI -0.14213168194983128
DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5
2023 08 05 18 13 12 0.759167
340.0 1.6575589177292698
388.0 1.528474366786192
500.0 1.3113422907254944
UVAI 0.9618683264479623
DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5
2023 08 05 19 18 39 0.804618
340.0 1.46158707100744
388.0 1.347764097820558
500.0 1.1563033843281367
UVAI 1.5943157504218373
DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5
2023 08 05 20 24 06 0.850069
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5
2023 08 05 21 29 34 0.895532
340.0 4.511193513870239
388.0 4.159878492355347
500.0 3.568934202194214
UVAI 3.3587218523025513
DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5
2023 08 05 22 35 01 0.940984
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
7. Work with TEMPO data
7.1 Search for TEMPO data files within 0.5 degree range around the POI
(position of the AERONET station) falling into the timeframe of interest
# Setting TEMPO name constants
= "TEMPO_O3TOT_L2" # collection name to search for in the EarthData
short_name = "UVAI_TEMPO" # name of the output quantity with unit
out_Q = "V03"
version
# Searching TEMPO data files within 0.5 degree range around the POI
= (POI_lon - 0.5, POI_lat - 0.5, POI_lon + 0.5, POI_lat + 0.5)
bbox
= earthaccess.search_data(
POI_results =short_name, version=version, temporal=(date_start, date_end), bounding_box=bbox
short_name
)= len(POI_results)
n_gr print(
f"Total number of TEMPO version {version} granules found for POI {POI_name}\n",
f"within period of interest between {date_start} and {date_end} is {n_gr}.",
)
if n_gr == 0:
print("program terminated")
sys.exit()
# Print links to the granules.
= []
tempo_granule_links for result in POI_results:
"umm"]["RelatedUrls"][0]["URL"])
tempo_granule_links.append(result[for granule_link in tempo_granule_links:
print(granule_link)
Total number of TEMPO version V03 granules found for POI CCNY
within period of interest between 2023-08-05 00:00:00 and 2023-08-05 23:59:59 is 11.
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T122445Z_S001G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T132716Z_S002G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T142947Z_S003G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T153218Z_S004G03.nc.met
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T163449Z_S005G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T173720Z_S006G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T183951Z_S007G03.nc.met
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T194222Z_S008G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T204453Z_S009G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T214724Z_S010G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T224955Z_S011G03.nc
7.2 download TEMPO files
check whether all downloads were successful, try to download failed granules yet another time. if second attempt fails, remove those granules from the list.
# Downloading TEMPO data files
= earthaccess.download(POI_results, local_path=".") downloaded_files
7.3 compile TEMPO UVAI timeseries
with open(
f"{out_Q}_{datestamp_initial}_{datestamp_final}_{POI_name}_"
f"{POI_lat:>08.4f}N_{-POI_lon:>08.4f}W.txt",
"w",
as fout:
) f"timeseries of {out_Q} at {POI_name} {POI_lat:>08.4f}N {-POI_lon:>08.4f}W\n'")
fout.write("yyyy mm dd hh mn ss time,days UVAI\n")
fout.write(
= np.array([POI_lon, POI_lat])
POI_coordinate = Point(POI_coordinate)
POI_point
for granule_link in sorted(tempo_granule_links):
= granule_link.rfind("/")
last_slash_ind = granule_link[last_slash_ind + 1 :]
tempo_file_name print(f"\ngranule {tempo_file_name}")
try:
= read_TEMPO_O3TOT_L2_UVAI(tempo_file_name)
tempo, tempo_fv except Exception:
print(f"TEMPO UVAI cannot be read in file {tempo_file_name}")
continue
# Get time from the granule filename.
# This time corresponds to the 1st element of array time above; that is GPS time in seconds.
= tempo_file_name.rfind("T")
T_index = datetime.strptime(
tempo_file_datetime - 8 : T_index + 7], "%Y%m%dT%H%M%S"
tempo_file_name[T_index
)
# Check whether POI is in the granule. If not - move to the next granule.
= Polygon(
tempo_polygon list(TEMPO_L2_polygon(tempo["lat"], tempo["lon"], tempo_fv["geo"]))
# create granule polygon
) if not POI_point.within(tempo_polygon):
continue
print(f"point {POI_point} is in granule polygon.")
= False
POI_found for ix in range(tempo["lon"].shape[0] - 1):
for iy in range(tempo["lon"].shape[1] - 1):
def subarray(X_array):
return np.array(
[
X_array[ix, iy],+ 1],
X_array[ix, iy + 1, iy + 1],
X_array[ix + 1, iy],
X_array[ix
]
)
= subarray(tempo["lat"])
lat_loc = subarray(tempo["lon"])
lon_loc
= [
coords_poly_loc 0], lat_loc[0]],
[lon_loc[1], lat_loc[1]],
[lon_loc[2], lat_loc[2]],
[lon_loc[3], lat_loc[3]],
[lon_loc[
]if np.any(coords_poly_loc == tempo_fv["geo"]):
continue
if POI_point.within(Polygon(coords_poly_loc)):
# Print the polygon coordinates within which this point has been found.
= True
POI_found print("scanl pixel latitude longitude UVAI UVAI_QF")
for scl in range(ix, ix + 2):
for pix in range(iy, iy + 2):
print(
" %3d %4d %9.6f %10.6f %5.1f %6i"
% (
scl,
pix,"lat"][scl, pix],
tempo["lon"][scl, pix],
tempo["uvai"][scl, pix],
tempo["uvai_QF"][scl, pix],
tempo[
)
)print(f"POI {POI_name} at {POI_lat}N {POI_lon}E found")
# Get the values for this polygon.
= subarray(tempo["uvai"])
uvai_loc = uvai_loc != tempo_fv["uvai"]
mask_noFV = np.column_stack((lon_loc[mask_noFV], lat_loc[mask_noFV]))
points_noFV = uvai_loc[mask_noFV]
ff_noFV
= np.empty([0, 2])
points = np.empty(0)
ff
if ff_noFV.shape[0] == 0:
continue
elif ff_noFV.shape[0] < 4:
= np.mean(ff_noFV)
uvai_noFV elif ff_noFV.shape[0] == 4:
= griddata(
uvai_noFV
points_noFV,
ff_noFV,
POI_coordinate,="linear",
method=-99.0,
fill_value=False,
rescale0]
)[if uvai_noFV == -99.0:
continue
# Get the scanline time average, as the TEMPO time of observation in seconds since first scanline.
= timedelta(
delta_t =(tempo["time"][ix + 1] + tempo["time"][ix]) * 0.5 - tempo["time"][0]
seconds
)# Convert that delta of seconds into UTC timestamp, by adding those seconds to the filename's UTC timestamp.
= tempo_file_datetime + delta_t
mid_granule_datetime
= (mid_granule_datetime - datetime_initial).total_seconds() / 86400
dt_loc = (
time_and_uvai_str f"{mid_granule_datetime.year} {mid_granule_datetime.month:>02} {mid_granule_datetime.day:>02} "
f"{mid_granule_datetime.hour:>02} {mid_granule_datetime.minute:>02} {mid_granule_datetime.second:>02} "
f"{dt_loc:>9.6f} {uvai_noFV:>10.3e}"
)print(time_and_uvai_str)
fout.write(time_and_uvai_str)
fout.write(f"{tempo['lat'][ix, iy]:>9.4f}N {-tempo['lon'][ix, iy]:>9.4f}W {tempo['uvai'][ix, iy]:>10.3e} "
)
fout.write(f"{tempo['lat'][ix, iy + 1]:>9.4f}N {-tempo['lon'][ix, iy + 1]:>9.4f}W {tempo['uvai'][ix, iy + 1]:>10.3e} "
)
fout.write(f"{tempo['lat'][ix + 1, iy + 1]:>9.4f}N {-tempo['lon'][ix + 1, iy + 1]:>9.4f}W {tempo['uvai'][ix + 1, iy + 1]:>10.3e} "
)
fout.write(f"{tempo['lat'][ix + 1, iy]:>9.4f}N {-tempo['lon'][ix + 1, iy]:>9.4f}W {tempo['uvai'][ix + 1, iy]:>10.3e} "
)
break
if POI_found:
break
granule TEMPO_O3TOT_L2_V03_20230805T122445Z_S001G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
59 661 40.834648 -73.943382 -0.9 8
59 662 40.813667 -73.949684 -0.8 8
60 661 40.831947 -74.001534 -0.8 8
60 662 40.810905 -74.007851 -0.8 8
POI CCNY at 40.821313N -73.949036E found
2023 08 05 12 27 45 0.519276 -8.146e-01
granule TEMPO_O3TOT_L2_V03_20230805T132716Z_S002G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.834454 -73.943253 -1.8 13
59 661 40.813469 -73.949562 -1.4 13
60 660 40.833084 -74.000923 -1.8 13
60 661 40.812099 -74.007202 -1.5 13
POI CCNY at 40.821313N -73.949036E found
2023 08 05 13 30 16 0.562691 -1.555e+00
granule TEMPO_O3TOT_L2_V03_20230805T142947Z_S003G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
58 660 40.837505 -73.885612 -1.5 13
58 661 40.816528 -73.891937 -1.3 13
59 660 40.836674 -73.944603 -2.4 13
59 661 40.815800 -73.950844 -2.3 13
POI CCNY at 40.821313N -73.949036E found
2023 08 05 14 32 44 0.606070 -2.297e+00
granule TEMPO_O3TOT_L2_V03_20230805T153218Z_S004G03.nc.met
TEMPO UVAI cannot be read in file TEMPO_O3TOT_L2_V03_20230805T153218Z_S004G03.nc.met
granule TEMPO_O3TOT_L2_V03_20230805T163449Z_S005G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
58 660 40.824287 -73.898758 -1.1 13
58 661 40.803333 -73.905052 -1.5 13
59 660 40.823364 -73.957039 -1.6 13
59 661 40.802170 -73.963463 -1.7 13
POI CCNY at 40.821313N -73.949036E found
2023 08 05 16 37 46 0.692899 -1.584e+00
granule TEMPO_O3TOT_L2_V03_20230805T173720Z_S006G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
58 660 40.831722 -73.893791 -1.5 13
58 661 40.810757 -73.900108 -1.5 13
59 660 40.829990 -73.952919 -2.1 13
59 661 40.808990 -73.959221 -1.9 13
POI CCNY at 40.821313N -73.949036E found
2023 08 05 17 40 17 0.736313 -2.005e+00
granule TEMPO_O3TOT_L2_V03_20230805T183951Z_S007G03.nc.met
TEMPO UVAI cannot be read in file TEMPO_O3TOT_L2_V03_20230805T183951Z_S007G03.nc.met
granule TEMPO_O3TOT_L2_V03_20230805T194222Z_S008G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.837013 -73.913071 -1.0 8
59 661 40.815960 -73.919426 -0.9 13
60 660 40.835320 -73.974220 -1.1 8
60 661 40.814457 -73.980431 -0.8 13
POI CCNY at 40.821313N -73.949036E found
2023 08 05 19 45 22 0.823177 -9.506e-01
granule TEMPO_O3TOT_L2_V03_20230805T204453Z_S009G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.825069 -73.947708 -1.1 8
59 661 40.803844 -73.954155 -0.8 8
60 660 40.823502 -74.007164 -0.9 8
60 661 40.802341 -74.013550 -0.8 13
POI CCNY at 40.821313N -73.949036E found
2023 08 05 20 47 53 0.866591 -1.015e+00
granule TEMPO_O3TOT_L2_V03_20230805T214724Z_S010G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.833599 -73.914108 -1.0 8
59 661 40.812668 -73.920380 -1.1 8
60 660 40.830616 -73.972809 -1.2 8
60 661 40.810017 -73.978859 -0.9 8
POI CCNY at 40.821313N -73.949036E found
2023 08 05 21 50 24 0.910005 -1.131e+00
granule TEMPO_O3TOT_L2_V03_20230805T224955Z_S011G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon.
scanl pixel latitude longitude UVAI UVAI_QF
59 661 40.826336 -73.932693 -1.0 8
59 662 40.805405 -73.938957 -1.0 8
60 661 40.823360 -73.993324 -1.2 8
60 662 40.802219 -73.999695 -1.3 8
POI CCNY at 40.821313N -73.949036E found
2023 08 05 22 52 55 0.953420 -1.047e+00
8. Reading back timeseries written in the files and plot the result
8.1 AERONET
with open(
f"{out_Q_AERONET}_{datestamp_initial}_{datestamp_final}_{POI_name}_"
f"{POI_lat:>08.4f}N_{-POI_lon:>08.4f}W.txt",
"r",
as fin:
) = fin.readlines()
data_lines
= np.empty([0, 2])
timeseries_AERONET_340 = np.empty([0, 2])
timeseries_AERONET_380 = np.empty([0, 2])
timeseries_AERONET_500 for data_line in data_lines[2:]:
= data_line.split()
split = float(split[6])
tt = float(split[8])
aod_340 = float(split[10])
aod_380 = float(split[12])
aod_500 if aod_340 > 0.0:
= np.append(timeseries_AERONET_340, [[tt, aod_340]], axis=0)
timeseries_AERONET_340 if aod_380 > 0.0:
= np.append(timeseries_AERONET_380, [[tt, aod_380]], axis=0)
timeseries_AERONET_380 if aod_500 > 0.0:
= np.append(timeseries_AERONET_500, [[tt, aod_500]], axis=0) timeseries_AERONET_500
8.2 DSCOVR EPIC
with open(
f"{out_Q_EPIC}_{datestamp_initial}_{datestamp_final}_{POI_name}_"
f"{POI_lat:>08.4f}N_{-POI_lon:>08.4f}W.txt",
"r",
as fin:
) = fin.readlines()
data_lines
= np.empty([0, 2])
timeseries_EPIC_340 = np.empty([0, 2])
timeseries_EPIC_388 = np.empty([0, 2])
timeseries_EPIC_500 = np.empty([0, 2])
timeseries_EPIC_UVAI for data_line in data_lines[2:]:
= data_line.split()
split = float(split[6])
tt = float(split[8])
aod_EPIC_340 = float(split[10])
aod_EPIC_388 = float(split[12])
aod_EPIC_500 = float(split[13])
EPIC_UVAI if aod_EPIC_340 > 0.0:
= np.append(timeseries_EPIC_340, [[tt, aod_EPIC_340]], axis=0)
timeseries_EPIC_340 if aod_EPIC_388 > 0.0:
= np.append(timeseries_EPIC_388, [[tt, aod_EPIC_388]], axis=0)
timeseries_EPIC_388 if aod_EPIC_500 > 0.0:
= np.append(timeseries_EPIC_500, [[tt, aod_EPIC_500]], axis=0)
timeseries_EPIC_500 if EPIC_UVAI > -99.0:
= np.append(
timeseries_EPIC_UVAI =0
timeseries_EPIC_UVAI, [[tt, EPIC_UVAI]], axis# third, TEMPO UVAI )
8.3 TEMPO
with open(
f"{out_Q}_{datestamp_initial}_{datestamp_final}_{POI_name}_"
f"{POI_lat:>08.4f}N_{-POI_lon:>08.4f}W.txt",
"r",
as fin:
) = fin.readlines()
data_lines
= np.empty([0, 2])
timeseries_TEMPO_UVAI for data_line in data_lines[2:]:
= data_line.split()
split = float(split[6])
tt = float(split[7])
TEMPO_UVAI if TEMPO_UVAI > -99.0:
= np.append(timeseries_TEMPO_UVAI, [[tt, TEMPO_UVAI]], axis=0) timeseries_TEMPO_UVAI
8.4 plot Aerosol Optical Depth (AOD) from AERONET and DSCOVR EPIC
= "AOD_" + datestamp_initial + "_" + datestamp_final + "\n" + POI_name
plot_title = "AOD_" + datestamp_initial + "_" + datestamp_final + "_" + POI_name + ".jpg"
img_name
plt.plot(0],
timeseries_AERONET_340[:, 1],
timeseries_AERONET_340[:, ="AOD AERONET at 340nm",
label="None",
linestyle=2,
markersize="s",
marker="r",
markerfacecolor="r",
markeredgecolor
)
plt.plot(0],
timeseries_AERONET_380[:, 1],
timeseries_AERONET_380[:, ="AOD AERONET at 380nm",
label="None",
linestyle=2,
markersize="o",
marker="r",
markerfacecolor="r",
markeredgecolor
)
plt.plot(0],
timeseries_AERONET_500[:, 1],
timeseries_AERONET_500[:, ="AOD AERONET at 500nm",
label="None",
linestyle=2,
markersize="D",
marker="r",
markerfacecolor="r",
markeredgecolor
)
plt.plot(0],
timeseries_EPIC_340[:, 1],
timeseries_EPIC_340[:, ="AOD EPIC at 340nm",
label="None",
linestyle=2,
markersize="s",
marker="c",
markerfacecolor="c",
markeredgecolor
)
plt.plot(0],
timeseries_EPIC_388[:, 1],
timeseries_EPIC_388[:, ="AOD EPIC at 388nm",
label="None",
linestyle=2,
markersize="o",
marker="c",
markerfacecolor="c",
markeredgecolor
)
plt.plot(0],
timeseries_EPIC_500[:, 1],
timeseries_EPIC_500[:, ="AOD EPIC at 500nm",
label="None",
linestyle=2,
markersize="D",
marker="c",
markerfacecolor="c",
markeredgecolor
)
# Set the range of x-axis
= 0.0
l_lim = ((datetime_final - datetime_initial).total_seconds() + 1.0) / 86400.0
u_lim
plt.xlim(l_lim, u_lim)
# Set the range of y-axis
= 0.0
l_lim = 2.0
u_lim
plt.ylim(l_lim, u_lim)
r"GMT, day from beginning of " + datestamp_initial, fontsize=12)
plt.xlabel(r"AOD", fontsize=12)
plt.ylabel(
="lower left")
plt.legend(loc
+ str(", %08.4fN %08.4fW" % (POI_lat, -POI_lon)))
plt.title(plot_title format="jpg", dpi=300)
plt.savefig(img_name,
plt.close()
8.5 plot UV Aerosol Index
= f"UVAI_{datestamp_initial}_{datestamp_final}\n{POI_name}"
plot_title = f"UVAI_{datestamp_initial}_{datestamp_final}_{POI_name}.jpg"
img_name
plt.plot(0],
timeseries_EPIC_UVAI[:, 1],
timeseries_EPIC_UVAI[:, ="UVAI EPIC",
label="None",
linestyle=2,
markersize="o",
marker="c",
markerfacecolor="c",
markeredgecolor
)
plt.plot(0],
timeseries_TEMPO_UVAI[:, 1],
timeseries_TEMPO_UVAI[:, ="UVAI TEMPO",
label="None",
linestyle=2,
markersize="o",
marker="m",
markerfacecolor="m",
markeredgecolor
)
# Set the range of x-axis
= 0.0
l_lim = ((datetime_final - datetime_initial).total_seconds() + 1.0) / 86400.0
u_lim
plt.xlim(l_lim, u_lim)
# Set the range of y-axis
= -3.0
l_lim = 3.0
u_lim
plt.ylim(l_lim, u_lim)
r"GMT, day from beginning of " + datestamp_initial, fontsize=12)
plt.xlabel(r"UV Aerosol Index", fontsize=12)
plt.ylabel(
="lower left")
plt.legend(loc
+ str(", %08.4fN %08.4fW" % (POI_lat, -POI_lon)))
plt.title(plot_title format="jpg", dpi=300)
plt.savefig(img_name,
plt.close()