import earthaccess # needed to discover and download TEMPO data
import netCDF4 as nc # needed to read TEMPO data
import os
import sys
import platform
from subprocess import Popen
import shutil
from shapely.geometry import Point, Polygon # needed to search a point within a polygon
from scipy.interpolate import griddata # needed to interpolate TEMPO data to the point of interest
from scipy import stats # needed for linear regression analysis
import requests # needed to search for and download Pandora data
import codecs # needed to read Pandora data
import numpy as np
import h5py # needed to read DSCOVR_EPIC_L2_TO3 files
import matplotlib.pyplot as plt # needed to plot the resulting time series
from urllib.request import urlopen, Request # needed to search for and download Pandora data
from pathlib import Path # needed to check whether a needed data file is already downloaded
from datetime import datetime, timedelta # needed to work with time in plotting time series
import cartopy.crs as ccrs
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
TEMPO UVAI vs DSCOVR (spatial)
Summary
This notebook illustrates a comparison of TEMPO ultra-violet aerosol index (UVAI) against DSCOVR EPIC UVAI. TEMPO_O3TOT_L2_V03 and DSCOVR_EPIC_L2_AER_03 are the data collections used as sources of UVAI.
TEMPO and DSCOVR granules are downloaded on-the-fly with earthaccess library, which may need to be installed first.
Dataset Information
“DSCOVR_EPIC_L2_AER_03 is the Deep Space Climate Observatory (DSCOVR) Enhanced Polychromatic Imaging Camera (EPIC) Level 2 UV Aerosol Version 3 data product. Observations for this data product are at 340 and 388 nm and are used to derive near UV (ultraviolet) aerosol properties. The EPIC aerosol retrieval algorithm (EPICAERUV) uses a set of aerosol models to account for the presence of carbonaceous aerosols from biomass burning and wildfires (BIO), desert dust (DST), and sulfate-based (SLF) aerosols. These aerosol models are identical to those assumed in the OMI (Ozone Monitoring Instrument) algorithm (Torres et al., 2007; Jethva and Torres, 2011).” (Source)
Total ozone Level 2 files provide ozone information at Tropospheric Emissions: Monitoring of Pollution (TEMPO)’s native spatial resolution, ~10 km^2 at the center of the Field of Regard (FOR), for individual granules. Each granule covers the entire North-South TEMPO FOR but only a portion of the East-West FOR.
Table of Contents
- Setup
- Define utility functions for DSCOVR and TEMPO data
- Establish access to Earthdata
- Select timeframe of interest
- Retrieving DSCOVR EPIC granules
- For every DSCOVR EPIC granule, find simultaneous TEMPO granules and re-map DSCOVR EPIC data to geolocations of TEMPO
Notebook’s general code outline:
- Timeframe of interest is selected by a user.
- Searches for DSCOVR EPIC granules withing the TEMPO field of regard (FOR) and within user’s timeframe by means of earthaccess library.
- After downloading DSCOVR EPIC granules, a loop by these granules DSCOVR L2 AER data searches for TEMPO granules simultaneous with DSCOVR EPIC one.
- If such TEMPO granules exist, DSCOVR EPIC UVAI retroevals are interpolated to the positions of the TEMPO pixels. The interpolated values are ritten into a netCDF file along with TEMPO geolocations.
- Finally original UVAI from DSCOVR EPIC and TEMPO are plotted along with interpolated DSCOVR EPIC values in the same plot. Output images are written into PNG files.
1. Setup
2. Define utility functions for DSCOVR and TEMPO data
2.1 Function to read DSCOVR AER data files
function read_epic_l2_AER reads DSCOVR_EPIC_L2_AER product file given by its fname and returns arrays of 2D latitudes, longitudes, UVAI, and AOD along with wavelength along with their fill values and time.
def read_epic_l2_AER(fname):
= '/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/FinalAerosolOpticalDepth'
aod_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/UVAerosolIndex'
uvai_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Geolocation Fields/Latitude'
lat_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Geolocation Fields/Longitude'
lon_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/Wavelength'
wl_name
try:
= h5py.File(fname, "r" )
f
= f[aod_name]
item = np.array(item[:])
aod2D = item.fillvalue
fv_aod
= f[uvai_name]
item = np.array(item[:])
uvai2D = item.fillvalue
fv_uvai
= f[lat_name]
item = np.array(item[:])
lat2D = item.fillvalue
fv_lat
= f[lon_name]
item = np.array(item[:])
lon2D = item.fillvalue
fv_lon
= f[wl_name]
item = np.array(item[:])
wl = item.fillvalue
fv_wl
f.close()
# getting time from the granule's filename
= fname.split('_')
fname_split = fname_split[-2]
timestamp = int(timestamp[0 : 4])
yyyy= int(timestamp[4 : 6])
mm = int(timestamp[6 : 8])
dd = int(timestamp[8 : 10])
hh = int(timestamp[10 : 12])
mn = int(timestamp[12 : 14])
ss
except:
print("Unable to find or read hdf5 input granule file ", fname)
= 0.
aod2D = 0.
fv_aod = 0.
uvai2D = 0.
fv_uvai = 0.
lat2D = 0.
fv_lat = 0.
lon2D = 0.
fv_lon = 0.
wl = 0.
fv_wl = 0.
yyyy = 0.
mm = 0.
dd = 0.
hh = 0.
mn = 0.
ss
return aod2D, fv_aod, uvai2D, fv_uvai, lat2D, fv_lat, lon2D, fv_lon\
, wl, fv_wl, yyyy, mm, dd, hh, mn, ss
2.2 Function to read UV Aerosol Index from TEMPO O3TOT data file
function read_TEMPO_O3TOT_L2_UVAI reads the following arrays from the TEMPO L2 O3TOT product TEMPO_O3TOT_L2_V01(2): vertical_column; vertical_column_uncertainty; and returns respective fields along with coordinates of the pixels.
If one requested variables cannot be read, all returned variables are zeroed
def read_TEMPO_O3TOT_L2_UVAI(fn):
= 'uv_aerosol_index'
var_name = 'quality_flag'
var_QF_name
try:
= nc.Dataset(fn)
ds
= ds.groups['product'] # this opens group product, /product, as prod
prod
= prod.variables[var_name] # this reads variable column_amount_o3 from prod (group product, /product)
var = np.array(var)
uvai = var.getncattr('_FillValue')
uvai_fv
= prod.variables[var_QF_name] # this reads variable column_amount_o3 from prod (group product, /product)
var_QF = np.array(var_QF)
uvai_QF # there is no fill value for the quality flag.
# Once it is available in the next version of the product,
# un-comment the line below and add fv_QF to the return line.
# fv_QF = var_QF.getncattr('_FillValue')
= ds.groups['geolocation'] # this opens group geolocation, /geolocation, as geo
geo
= np.array(geo.variables['latitude']) # this reads variable latitude from geo (geolocation group, /geolocation) into a numpy array
lat = np.array(geo.variables['longitude']) # this reads variable longitude from geo (geolocation group, /geolocation) into a numpy array
lon = geo.variables['latitude'].getncattr('_FillValue')
fv_geo # it appeared that garbage values of latitudes and longitudes in the L2 files
# are 9.969209968386869E36 while fill value is -1.2676506E30
# (after deeper search it was found that actual value in the file is -1.2676506002282294E30).
# For this reason, fv_geo is set to 9.96921E36 to make the code working.
# Once the problem is resolved and garbage values of latitudes and longitudes
# equal to their fill value, the line below must be removed.
= 9.969209968386869E36
fv_geo
= np.array(geo.variables['time'] )# this reads variable longitude from geo (geolocation group, /geolocation) into a numpy array
time
ds.close()
except:
print('variable '+var_name+' cannot be read in file '+fn)
= 0.
lat = 0.
lon = 0.
time = 0.
fv_geo = 0.
uvai = 0.
uvai_QF = 0.
fv_uvai # fv_QF = -999
= ''
prod_unit
return lat, lon, fv_geo, time, uvai, uvai_QF, uvai_fv
2.3 Function creating TEMPO O3 granule polygon
def TEMPO_L2_polygon(lat, lon, fv_geo):
= lon.shape[0]
nx = lon.shape[1]
ny print('granule has %3d scanlines by %4d pixels' %(nx, ny))
= np.empty([0,2])
dpos
= np.empty([nx, ny], dtype = int) # creating array in x indices
x_ind for ix in range(nx): x_ind[ix, :] = ix # populating array in x indices
= np.empty([nx, ny], dtype = int)
y_ind for iy in range(ny): y_ind[:, iy] = iy # populating array in x indices
= (lon[ix, iy] != fv_geo)&(lat[ix, iy] != fv_geo)
mask if len(lon[mask]) == 0:
print('the granule is empty - no meaningful positions')
return dpos
# right boundary
= min(x_ind[mask].flatten())
r_m = (lon[r_m, :] != fv_geo)&(lat[r_m, :] != fv_geo)
local_mask = np.stack((lon[r_m, local_mask], lat[r_m, local_mask])).T
r_b
# left boundary
= max(x_ind[mask].flatten())
l_m = (lon[l_m, :] != fv_geo)&(lat[l_m, :] != fv_geo)
local_mask = np.stack((lon[l_m, local_mask], lat[l_m, local_mask])).T
l_b
#top and bottom boundaries
= np.empty([0,2])
t_b = np.empty([0,2])
b_b for ix in range(r_m + 1, l_m):
= (lon[ix, :] != fv_geo)&(lat[ix, :] != fv_geo)
local_mask = y_ind[ix, local_mask]
local_y_ind = min(local_y_ind)
y_ind_top = max(local_y_ind)
y_ind_bottom = np.append(t_b, [[lon[ix, y_ind_top], lat[ix, y_ind_top]]], axis=0)
t_b = np.append(b_b, [[lon[ix, y_ind_bottom], lat[ix, y_ind_bottom]]], axis=0)
b_b
# combining right, top, left, and bottom boundaries together, going along the combined boundary counterclockwise
= np.append(dpos, r_b[ : :-1, :], axis=0) # this adds right boundary, counterclockwise
dpos = np.append(dpos, t_b, axis=0) # this adds top boundary, counterclockwise
dpos = np.append(dpos, l_b, axis=0) # this adds left boundary, counterclockwise
dpos = np.append(dpos, b_b[ : :-1, :], axis=0) # this adds bottom boundary, counterclockwise
dpos
print('polygon shape: ',dpos.shape)
return dpos
2.4 Function writing DSCOVR EPIC UV Aerosol Index re-mapped to TEMPO granule locations
def write_DSCOVR_TEMPO_UVAI(fname, lat2D, lon2D, uvai2D):
#
# variables:
# fname - TEMPO file name, will be used to create output file name
# lat2D - 2D array of TEMPO latitudes
# lon2D - 2D array of TEMPO longitudes
# uvai2D - 2D array of DSCOVR EPIC UVAI re-mapped to TEMPO locations
# arrays above shoud be of the same shape
try:
= lat2D.shape
(nx, ny) = nc.Dataset('DSCOVR_UVAI_'+fname, mode='w', format='NETCDF4_CLASSIC')
ncf = ncf.createDimension('mirror_step', nx) # number of scanlines
x_dim = ncf.createDimension('xtrack', ny) # number of pixels in a scanline
y_dim
= ncf.createVariable('lat', np.float32, ('mirror_step', 'xtrack'))
lat = 'degrees_north'
lat.units = 'latitude'
lat.long_name = lat2D
lat[:,:]
= ncf.createVariable('lon', np.float32, ('mirror_step', 'xtrack'))
lon = 'degrees_east'
lon.units = 'longitude'
lon.long_name = lon2D
lon[:,:]
= ncf.createVariable('uv_aerosol_index', np.float32, ('mirror_step', 'xtrack'))
uv_aerosol_index = uvai2D
uv_aerosol_index[:,:]
ncf.close()
= True
success
except: success = False
return success
3. Establish access to EarthData
3.1. Log in
User needs to create an account at https://www.earthdata.nasa.gov/ Function earthaccess.login prompts for EarthData login and password.
= earthaccess.login(strategy="interactive", persist=True) auth
3.2. Create local directory
= os.path.expanduser("~") + os.sep
homeDir
with open(homeDir + '.dodsrc', 'w') as file:
file.write('HTTP.COOKIEJAR={}.urs_cookies\n'.format(homeDir))
file.write('HTTP.NETRC={}.netrc'.format(homeDir))
file.close()
print('Saved .dodsrc to:', homeDir)
# Set appropriate permissions for Linux/macOS
if platform.system() != "Windows":
'chmod og-rw ~/.netrc', shell=True)
Popen(else:
# Copy dodsrc to working directory in Windows
+ '.dodsrc', os.getcwd())
shutil.copy2(homeDir print('Copied .dodsrc to:', os.getcwd())
Saved .dodsrc to: /home/jovyan/
4. Select timeframe of interest
DSCOVR EPIC granules will be searched within this timeframe
print('enter period of interest, start and end dates, in the form YYYYMMDD')
= input('enter start date of interest ')
datestamp_ini = input('enter end date of interest ')
datestamp_fin
= int(datestamp_ini)
start_date = int(datestamp_fin)
end_date
= start_date//10000
yyyy_ini = (start_date//100 - yyyy_ini*100)
mm_ini = (start_date - yyyy_ini*10000 - mm_ini*100)
dd_ini
= end_date//10000
yyyy_fin = (end_date//100 - yyyy_fin*100)
mm_fin = (end_date - yyyy_fin*10000 - mm_fin*100)
dd_fin print(yyyy_ini, mm_ini, dd_ini, yyyy_fin, mm_fin, dd_fin)
= str('%4.4i-%2.2i-%2.2i 00:00:00' %(yyyy_ini, mm_ini, dd_ini))
date_start = str('%4.4i-%2.2i-%2.2i 23:59:59' %(yyyy_fin, mm_fin, dd_fin)) date_end
enter period of interest, start and end dates, in the form YYYYMMDD
enter start date of interest 20230805
enter end date of interest 20230805
2023 8 5 2023 8 5
5. Retrieving DSCOVR EPIC granules
in the time of interest falling into TEMPO polygon
= 'DSCOVR_EPIC_L2_AER' # collection name to search for in the EarthData
short_name
# polygon below is taken from MMT description of TEMPO_O3TOT_L2,
# see https://mmt.earthdata.nasa.gov/collections/C2842849465-LARC_CLOUD
# Polygon: (10.0°, -170.0°), (10.0°, -10.0°), (80.0°, -10.0°), (80.0°, -170.0°), (10.0°, -170.0°)
= (-170., 10., -10., 80.)
bbox
= earthaccess.search_data(short_name = short_name\
FOR_results_EPIC = (date_start, date_end)\
, temporal = bbox)
, bounding_box
= len(FOR_results_EPIC)
n_EPIC
print('total number of DSCOVR EPIC L2_AER granules found for TEMPO FOR'\
'\nwithin period of interes between', date_start, 'and', date_end, 'is', n_EPIC) ,
Granules found: 21
total number of DSCOVR EPIC L2_AER granules found for TEMPO FOR
within period of interes between 2023-08-05 00:00:00 and 2023-08-05 23:59:59 is 21
5.1. ensuring all discovered granules have download links
without this step, those granules crash the call of earthaccess.download()
= []
granule_links_EPIC = []
FOR_results_EPIC_bad for result in FOR_results_EPIC:
try:
'umm']['RelatedUrls'][0]['URL'])
granule_links_EPIC.append(result[except:
FOR_results_EPIC_bad.append(result)
for granule_link in sorted(granule_links_EPIC): print(granule_link)
for result in FOR_results_EPIC_bad: FOR_results_EPIC.remove(result)
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805004554_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805015122_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805025649_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805040216_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805050743_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805071838_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805082405_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805092932_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805103500_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805234028_03.he5
5.2. Download DSCOVR EPIC granules
and ensuring all granules have been downloaded
= earthaccess.download(FOR_results_EPIC, local_path='.',)
downloaded_files
# Checking whether all DSCOVR EPIC data files have been downloaded
for granule_link in granule_links_EPIC:
= granule_link.split('/')[-1]
EPIC_fname # check if file exists in the local directory
if not os.path.exists(EPIC_fname):
print(EPIC_fname, 'does not exist in local directory')
# repeat attempt to download
= earthaccess.download(granule_link,
downloaded_files ='.')
local_path# if file still does not exist in the directory, remove its link from the list of links
if not os.path.exists(EPIC_fname): granule_links_EPIC.remove(granule_link)
Getting 21 granules, approx download size: 0.0 GB
File DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805004554_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805015122_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805071838_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805092932_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805025649_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805082405_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805103500_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805050743_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805040216_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805234028_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5 already downloaded
6. For every DSCOVR EPIC granule, find simultaneous TEMPO granules and re-map DSCOVR EPIC data to geolocations of TEMPO
write re-mapped DSCOVR EPIC UVAI to a netCDF file and plot the original DSCOVR EPIC and TEMPO along with re-mapped DSCOVR EPIC UVAI
# Setting TEMPO name constants
= 'TEMPO_O3TOT_L2' # collection name to search for in the EarthData
short_name = 'V03' # this is the latest available version as of August 02, 2024
version
# cycle by found DSCOVR EPIC granules
for granule_link in sorted(granule_links_EPIC):
= granule_link.rfind('/')
last_slash_ind = granule_link[last_slash_ind+1 : ]
Dfname print(Dfname)
\
aod2D, fv_aod, uvai2D, fv_uvai, lat2D, fv_lat, lon2D, fv_lon= read_epic_l2_AER(Dfname)
, wl, fv_wl, yyyy, mm, dd, hh, mn, ss
if isinstance(lat2D, float): continue
= datetime(yyyy, mm, dd, hh, mn, ss)
timestamp # it was discovered that actual timespan of an EPIC granule begins 289 s before
# the granule timestamp and ends 107 s after it.
# This timeframe will be used for search of TEMPO granules
= timestamp + timedelta(seconds = -289)
timestamp1 = timestamp + timedelta(seconds = 107)
timestamp2 print(timestamp, timestamp1, timestamp2)
for attempt in range(2):
try:
= earthaccess.search_data(short_name = short_name\
results = version\
, version = (timestamp1, timestamp2))
, temporal break
except: continue
try: n_gr = len(results)
except: n_gr = 0
print('total number of TEMPO version ', version,' granules found', \
'\nwithin period of interes between', timestamp1, 'and', timestamp2,\
' is', n_gr)
if n_gr == 0: continue # if no TEMPO granules found within the DSCOVR EPIC timeframe, go to the next EPIC granule
# masking out DSCOVR fillvalues
= (lat2D != fv_lat)&(lon2D != fv_lon)&(uvai2D != fv_uvai)
mask = np.column_stack((lon2D[mask], lat2D[mask]))
points = uvai2D[mask]
ff
= earthaccess.download(results, local_path='.',)
downloaded_files
for r in results:
= r.data_links()
granule_links = granule_links[0].rfind('/')
last_slash_ind = granule_links[0][last_slash_ind+1 : ]
Tfname
\
lat, lon, fv_geo, time, uvai, uvai_QF, uvai_fv= read_TEMPO_O3TOT_L2_UVAI(Tfname)
= TEMPO_L2_polygon(lat, lon, fv_geo)
polygon = list(polygon)
coords_poly = Polygon(coords_poly)
poly
# create arrays in indices to restore 2D array after re-mapping
= lat.shape
(nx, ny) = np.tile(np.linspace(0,ny,ny, endpoint = False, dtype = int), (nx,1))
y_ind = np.tile(np.linspace(0,nx,nx, endpoint = False, dtype = int), (ny,1))\
x_ind
.transpose()
# masking out fill values of TEMPO lat/lon positions
= (lat != fv_geo)&(lon != fv_geo)&(uvai != uvai_fv)
mask_TEMPO = lon[mask_TEMPO]
lon1D = lat[mask_TEMPO]
lat1D = np.column_stack((lon1D, lat1D))
pp
= x_ind[mask_TEMPO]
x_ind_m = y_ind[mask_TEMPO]
y_ind_m
# masking out DSCOVR UVAI to the ranges of TEMPO granule
= min(lon1D)
min_TEMPO_lon = max(lon1D)
max_TEMPO_lon = min(lat1D)
min_TEMPO_lat = max(lat1D)
max_TEMPO_lat
= (uvai2D != fv_uvai)\
mask_DSCOVR &(lat2D > min_TEMPO_lat)&(lat2D < max_TEMPO_lat)\
&(lon2D > min_TEMPO_lon)&(lon2D < max_TEMPO_lon)
= lon2D[mask_DSCOVR]
lon1D_DSCOVR = lat2D[mask_DSCOVR]
lat1D_DSCOVR = uvai2D[mask_DSCOVR]
uvai1D_DSCOVR # number of DSCOVR pixels falling into ranges min_TEMPO_lat < lat2D < max_TEMPO_lat, min_TEMPO_lon < lat2D < max_TEMPO_lon
= len(uvai1D_DSCOVR)
n_DSCOVR_TEMPO if n_DSCOVR_TEMPO == 0:
print('no original DSCOVR pixels within TEMPO granule')
continue
= np.empty(n_DSCOVR_TEMPO, dtype = np.bool_)
mask_DSCOVR_TEMPO for i in range(n_DSCOVR_TEMPO):
= np.array([lon1D_DSCOVR[i], lat1D_DSCOVR[i]])
pp_DSCOVR = Point(pp_DSCOVR)
p = p.within(poly)
mask_DSCOVR_TEMPO[i]
= lon1D_DSCOVR[mask_DSCOVR_TEMPO]
lon1D_DSCOVR_TEMPO = lat1D_DSCOVR[mask_DSCOVR_TEMPO]
lat1D_DSCOVR_TEMPO = uvai1D_DSCOVR[mask_DSCOVR_TEMPO]
uvai1D_DSCOVR_TEMPO
# line below performs re-mapping
= griddata(points, ff, pp, method='linear'\
DSCOVR_TEMPO_uvai =-999., rescale=False)
, fill_value# check whether there are any values within valid range
= (DSCOVR_TEMPO_uvai>-30)&(DSCOVR_TEMPO_uvai<30)
valid_mask if len(DSCOVR_TEMPO_uvai[valid_mask]) == 0:
print('no re-mapped DSCOVR pixels within TEMPO granule')
continue
# create and fill 2D arrays to be restored
= np.empty([nx, ny])
lat2D_TEMPO = -999.
lat2D_TEMPO[:, :] = np.empty([nx, ny])
lon2D_TEMPO = -999.
lon2D_TEMPO[:, :] = np.empty([nx, ny])
uvai2D_TEMPO = -999.
uvai2D_TEMPO[:, :]
# restore 2D arrays
for ix, iy, lon1, lat1, uvai1 in\
zip(x_ind_m, y_ind_m, lon1D, lat1D, DSCOVR_TEMPO_uvai):
= lat1
lat2D_TEMPO[ix, iy] = lon1
lon2D_TEMPO[ix, iy] = uvai1
uvai2D_TEMPO[ix, iy]
# write restored 2D arrays to a netCDF file
= write_DSCOVR_TEMPO_UVAI(Tfname, lat2D_TEMPO, lon2D_TEMPO, uvai2D_TEMPO)
output_success if not output_success: print('failed to write DSCOVR UVAI re-mapped to TEMPO granule into the output file')
# plotting the output comparing TEMPO and DSCOVR EPIC UVAI
= plt.figure(figsize=(20, 9), dpi=300, facecolor = None)
fig
= ccrs.LambertConformal(central_longitude=(min_TEMPO_lon + max_TEMPO_lon)*.5 # -96.0
proj =39.0
, central_latitude=0.0
, false_easting=0.0
, false_northing=(33, 45)
, standard_parallels=None
, globe=10)
, cutoff=ccrs.PlateCarree()
transform
= (lat != fv_geo)&(lon != fv_geo)&(uvai != uvai_fv)
mask_TEMPO = lon[mask_TEMPO]
lon1D = lat[mask_TEMPO]
lat1D = uvai[mask_TEMPO]
uvai1D
= fig.add_subplot(132, projection=proj)
ax1 =transform)
ax1.set_extent([min_TEMPO_lon, max_TEMPO_lon, min_TEMPO_lat, max_TEMPO_lat], crs= ax1.scatter(lon1D, lat1D, c=uvai1D, s=1, cmap=plt.cm.jet\
im1 =-4., vmax=4., transform=transform)
, vmin='50m', color='black', linewidth=1)
ax1.coastlines(resolution= ax1.gridlines(draw_labels=True, dms=True)
gl = LONGITUDE_FORMATTER
gl.xformatter = LATITUDE_FORMATTER
gl.yformatter = plt.colorbar(im1, ticks=[-4, -2, 0, 2, 4], fraction=0.022, pad=0.01)
cb1 'UVAI', fontsize=10)
cb1.set_label('UVAI '+Tfname, size = 10)
ax1.set_title(
= fig.add_subplot(133, projection=proj)
ax2 =transform)
ax2.set_extent([min_TEMPO_lon, max_TEMPO_lon, min_TEMPO_lat, max_TEMPO_lat], crs= ax2.scatter(pp[valid_mask, 0], pp[valid_mask, 1]\
im2 =DSCOVR_TEMPO_uvai[valid_mask], s=1, cmap=plt.cm.jet\
, c=-4., vmax=4., transform=transform)
, vmin='50m', color='black', linewidth=1)
ax2.coastlines(resolution= ax2.gridlines(draw_labels=True, dms=True)
gl = LONGITUDE_FORMATTER
gl.xformatter = LATITUDE_FORMATTER
gl.yformatter = plt.colorbar(im2, ticks=[-4, -2, 0, 2, 4], fraction=0.022, pad=0.01)
cb2 'UVAI', fontsize=10)
cb2.set_label('DSCOVR EPIC UVAI re-mapped', size = 10)
ax2.set_title(
= fig.add_subplot(131, projection=proj)
ax3 =transform)
ax3.set_extent([min_TEMPO_lon, max_TEMPO_lon, min_TEMPO_lat, max_TEMPO_lat], crs= ax3.scatter(lon1D_DSCOVR_TEMPO, lat1D_DSCOVR_TEMPO, c=uvai1D_DSCOVR_TEMPO, s=1, cmap=plt.cm.jet\
im3 =-4., vmax=4., transform=transform)
, vmin='50m', color='black', linewidth=1)
ax3.coastlines(resolution= ax3.gridlines(draw_labels=True, dms=True)
gl = LONGITUDE_FORMATTER
gl.xformatter = LATITUDE_FORMATTER
gl.yformatter = plt.colorbar(im3, ticks=[-4, -2, 0, 2, 4], fraction=0.022, pad=0.01)
cb3 'UVAI', fontsize=10)
cb3.set_label('UVAI '+Dfname, size = 10)
ax3.set_title(
'UVAI_'+Tfname+'.png', dpi=300)
plt.savefig( plt.close()
DSCOVR_EPIC_L2_AER_03_20230805004554_03.he5
2023-08-05 00:45:54 2023-08-05 00:41:05 2023-08-05 00:47:41
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 00:41:05 and 2023-08-05 00:47:41 is 0
DSCOVR_EPIC_L2_AER_03_20230805015122_03.he5
2023-08-05 01:51:22 2023-08-05 01:46:33 2023-08-05 01:53:09
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 01:46:33 and 2023-08-05 01:53:09 is 0
DSCOVR_EPIC_L2_AER_03_20230805025649_03.he5
2023-08-05 02:56:49 2023-08-05 02:52:00 2023-08-05 02:58:36
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 02:52:00 and 2023-08-05 02:58:36 is 0
DSCOVR_EPIC_L2_AER_03_20230805040216_03.he5
2023-08-05 04:02:16 2023-08-05 03:57:27 2023-08-05 04:04:03
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 03:57:27 and 2023-08-05 04:04:03 is 0
DSCOVR_EPIC_L2_AER_03_20230805050743_03.he5
2023-08-05 05:07:43 2023-08-05 05:02:54 2023-08-05 05:09:30
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 05:02:54 and 2023-08-05 05:09:30 is 0
DSCOVR_EPIC_L2_AER_03_20230805071838_03.he5
2023-08-05 07:18:38 2023-08-05 07:13:49 2023-08-05 07:20:25
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 07:13:49 and 2023-08-05 07:20:25 is 0
DSCOVR_EPIC_L2_AER_03_20230805082405_03.he5
2023-08-05 08:24:05 2023-08-05 08:19:16 2023-08-05 08:25:52
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 08:19:16 and 2023-08-05 08:25:52 is 0
DSCOVR_EPIC_L2_AER_03_20230805092932_03.he5
2023-08-05 09:29:32 2023-08-05 09:24:43 2023-08-05 09:31:19
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 09:24:43 and 2023-08-05 09:31:19 is 0
DSCOVR_EPIC_L2_AER_03_20230805103500_03.he5
2023-08-05 10:35:00 2023-08-05 10:30:11 2023-08-05 10:36:47
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 10:30:11 and 2023-08-05 10:36:47 is 0
DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5
2023-08-05 11:40:28 2023-08-05 11:35:39 2023-08-05 11:42:15
Granules found: 0
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 11:35:39 and 2023-08-05 11:42:15 is 0
DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5
2023-08-05 12:45:55 2023-08-05 12:41:06 2023-08-05 12:47:42
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 12:41:06 and 2023-08-05 12:47:42 is 2
Getting 2 granules, approx download size: 0.21 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T123711Z_S001G05.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T124324Z_S001G06.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5
2023-08-05 13:51:23 2023-08-05 13:46:34 2023-08-05 13:53:10
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 13:46:34 and 2023-08-05 13:53:10 is 2
Getting 2 granules, approx download size: 0.2 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T134555Z_S002G06.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T135208Z_S002G07.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5
2023-08-05 14:56:50 2023-08-05 14:52:01 2023-08-05 14:58:37
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 14:52:01 and 2023-08-05 14:58:37 is 2
Getting 2 granules, approx download size: 0.2 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T144826Z_S003G06.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T145439Z_S003G07.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5
2023-08-05 16:02:17 2023-08-05 15:57:28 2023-08-05 16:04:04
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 15:57:28 and 2023-08-05 16:04:04 is 2
Getting 2 granules, approx download size: 0.2 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T155710Z_S004G07.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T160323Z_S004G08.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5
2023-08-05 17:07:45 2023-08-05 17:02:56 2023-08-05 17:09:32
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 17:02:56 and 2023-08-05 17:09:32 is 2
Getting 2 granules, approx download size: 0.21 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T165941Z_S005G07.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T170554Z_S005G08.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5
2023-08-05 18:13:12 2023-08-05 18:08:23 2023-08-05 18:14:59
Granules found: 3
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 18:08:23 and 2023-08-05 18:14:59 is 3
Getting 3 granules, approx download size: 0.31 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T180212Z_S006G07.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T180825Z_S006G08.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T181438Z_S006G09.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5
2023-08-05 19:18:39 2023-08-05 19:13:50 2023-08-05 19:20:26
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 19:13:50 and 2023-08-05 19:20:26 is 2
Getting 2 granules, approx download size: 0.21 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T191056Z_S007G08.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T191709Z_S007G09.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5
2023-08-05 20:24:06 2023-08-05 20:19:17 2023-08-05 20:25:53
Granules found: 3
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 20:19:17 and 2023-08-05 20:25:53 is 3
Getting 3 granules, approx download size: 0.31 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T201327Z_S008G08.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T201940Z_S008G09.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T202553Z_S008G10.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5
2023-08-05 21:29:34 2023-08-05 21:24:45 2023-08-05 21:31:21
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 21:24:45 and 2023-08-05 21:31:21 is 2
Getting 2 granules, approx download size: 0.21 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T212211Z_S009G09.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T212824Z_S009G10.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5
2023-08-05 22:35:01 2023-08-05 22:30:12 2023-08-05 22:36:48
Granules found: 2
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 22:30:12 and 2023-08-05 22:36:48 is 2
Getting 2 granules, approx download size: 0.21 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T222442Z_S010G09.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T223055Z_S010G10.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
DSCOVR_EPIC_L2_AER_03_20230805234028_03.he5
2023-08-05 23:40:28 2023-08-05 23:35:39 2023-08-05 23:42:15
Granules found: 1
total number of TEMPO version V03 granules found
within period of interes between 2023-08-05 23:35:39 and 2023-08-05 23:42:15 is 1
Getting 1 granules, approx download size: 0.1 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T233326Z_S011G10.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)