import earthaccess # needed to discover and download TEMPO data
import netCDF4 as nc # needed to read TEMPO data
import os
import sys
import copy
import platform
from subprocess import Popen
import shutil
from shapely.geometry import Point, Polygon # needed to search a point within a polygon
from scipy.interpolate import griddata # needed to interpolate TEMPO data to the point of interest
from scipy import stats # needed for linear regression analysis
import requests # needed to search for and download Pandora data
import codecs # needed to read Pandora data
import numpy as np
import h5py # needed to read DSCOVR_EPIC_L2_TO3 files
import matplotlib.pyplot as plt # needed to plot the resulting time series
from urllib.request import urlopen, Request # needed to search for and download Pandora data
from pathlib import Path # needed to check whether a needed data file is already downloaded
from datetime import datetime, timedelta # needed to work with time in plotting time series
TEMPO UV Aerosol Index from L2_O3TOT
This notebook illustrates comparison of TEMPO ultra-violet aerosol index (UVAI) against DSCOVR EPIC UVAI as well as comparison of DSCOVR EPIC AOD against AERONET.
Since AERONET data files cannot be downloaded programmatically, a user is supposed to upload an AERONET AOD level 1.5 or 2.0 data file and provide its name as a constant. Since TEMPO spatial coverage is regional and limited to North America, it is user’s responsibilty to select AERONET station within TEMPO’s field of regard (FOR). If the selected station is outside FOR, no TEMPO time series will be generated.
The user is allowed to choose the time period of interest by providing start and end dates in the form YYYYMMDD. Please be aware, that if the selecte period of interest is outside of available time span of one of the sensors, corresponding time series will not be generated.
TEMPO and DSCOVR data are downloaded on-the-fly with earthaccess library.
TEMPO and DSCOVR L2 AER data are interpolated to the location of the selected AERONET station.
1 Importing necessary libraries
2 Defining functions
2.1 function to read DSCOVR AER data files
function read_epic_l2_AER reads DSCOVR_EPIC_L2_AER product file given by its fname and returns arrays of 2D latitudes, longitudes, UVAI, and AOD along with their fill values, time, and wavelengths at which AOD are retrieved
def read_epic_l2_AER(fname):
= '/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/FinalAerosolOpticalDepth'
aod_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/UVAerosolIndex'
uvai_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Geolocation Fields/Latitude'
lat_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Geolocation Fields/Longitude'
lon_name = '/HDFEOS/SWATHS/Aerosol NearUV Swath/Data Fields/Wavelength'
wl_name
try:
= h5py.File(fname, "r" )
f
= f[aod_name]
item = np.array(item[:])
aod3D = item.fillvalue
fv_aod
= f[uvai_name]
item = np.array(item[:])
uvai2D = item.fillvalue
fv_uvai
= f[lat_name]
item = np.array(item[:])
lat2D = item.fillvalue
fv_lat
= f[lon_name]
item = np.array(item[:])
lon2D = item.fillvalue
fv_lon
= f[wl_name]
item = np.array(item[:])
wl = item.fillvalue
fv_wl
f.close()
# getting time from the granule's filename
= fname.split('_')
fname_split = fname_split[-2]
timestamp = int(timestamp[0 : 4])
yyyy= int(timestamp[4 : 6])
mm = int(timestamp[6 : 8])
dd = int(timestamp[8 : 10])
hh = int(timestamp[10 : 12])
mn = int(timestamp[12 : 14])
ss
except:
print("Unable to find or read hdf5 input granule file ", fname)
= 0.
aod3D = 0.
fv_aod = 0.
uvai2D = 0.
fv_uvai = 0.
lat2D = 0.
fv_lat = 0.
lon2D = 0.
fv_lon = 0.
wl = 0.
fv_wl = 0.
yyyy = 0.
mm = 0.
dd = 0.
hh = 0.
mn = 0.
ss
return aod3D, fv_aod, uvai2D, fv_uvai, lat2D, fv_lat, lon2D, fv_lon\
, wl, fv_wl, yyyy, mm, dd, hh, mn, ss
2.2 function to read AERONET data files
Function read_aeronet_mw takes aeronet file name, a set of wavelengths, and start and end dates of the timeframe of interest as arguments and returns a timeseries array of AODs at these wavelengths.
If the wavelength is not in the list of wavelengths of the data file, or if there are no retrieval at it, empty array is returned.
def read_aeronet_mw(fname, wl, start_date, end_date):
= np.array(copy.deepcopy(sorted(wl)))
wln print(wln)
= open(fname, 'r')
f_aero
while True:
= f_aero.readline()
line if not line: break
if 'Date' in line:
= line.split(',')
header = len(header)
nheader break
# finding positions of necessary information in the header
# date and time are always 1st and 2nd, so no need to search for these names in the header
= 0
date_pos = 1
time_pos # wavelengths first
= len(wln)
nwl = np.empty([nwl], dtype = int)
aod_pos = -1
aod_pos[:] for iwl in range(nwl):
= str('AOD_%dnm' %wln[iwl])
aod_str print(aod_str)
for i in range(nheader):
if header[i] == aod_str: aod_pos[iwl] = i
= wln[aod_pos >= 0]
wln = len(wln)
nwl = aod_pos[aod_pos >= 0]
aod_pos print(aod_pos)
print(wln)
if nwl == 0:
print('wavelengths not found')
= np.empty([0])
aod = []
wln = np.empty([0, 2])
date_time = -999.
lat = -999.
lon = ''
AERONET_name
return AERONET_name, lat, lon, wln, date_time, aod
# AERONET Site name
= -1
name_pos = 'AERONET_Site_Name'
name_str for i in range(nheader):
if header[i] == name_str: name_pos = i
# AERONET Site latitude
= -1
lat_pos = 'Site_Latitude(Degrees)'
lat_str for i in range(nheader):
if header[i] == lat_str: lat_pos = i
# AERONET Site longitude
= -1
lon_pos = 'Site_Longitude(Degrees)'
lon_str for i in range(nheader):
if header[i] == lon_str: lon_pos = i
= np.empty([0, nwl])
aod = np.empty([0, 2])
date_time # reading the 1st line of data, get name, lat, lon,
# and if aod is valid append arrays aod and date_time
= f_aero.readline()
line = line.split(',')
values = values[name_pos]
AERONET_name = float(values[lat_pos])
lat = float(values[lon_pos])
lon = np.empty([nwl])
aod_loc = values[date_pos]
date_loc = values[time_pos]
time_loc
= int(date_loc[0:2])
dd = int(date_loc[3:5])
mm = int(date_loc[6:10])
yyyy = yyyy*10000 + mm*100 + dd
date_stamp if date_stamp >= start_date and date_stamp <= end_date:
for iwl in range(nwl): aod_loc[iwl] = float(values[aod_pos[iwl]])
= np.append(aod, [aod_loc], axis = 0)
aod = np.append(date_time, [[date_loc, time_loc]], axis = 0)
date_time
# reading all other lines of data,
# if aod is valid append arrays aod and date_time
while True:
= f_aero.readline()
line if not line: break
= line.split(',')
values
= np.empty([nwl])
aod_loc = values[date_pos]
date_loc = values[time_pos]
time_loc
= int(date_loc[0:2])
dd = int(date_loc[3:5])
mm = int(date_loc[6:10])
yyyy = yyyy*10000 + mm*100 + dd
date_stamp if date_stamp < start_date or date_stamp > end_date: continue
for iwl in range(nwl): aod_loc[iwl] = float(values[aod_pos[iwl]])
= np.append(aod, [aod_loc], axis = 0)
aod = np.append(date_time, [[date_loc, time_loc]], axis = 0)
date_time
f_aero.close()
return AERONET_name, lat, lon, wln, date_time, aod
2.3 functions to work with TEMPO
2.3.1 function to read UVAI from TEMPO_O3TOT_L2
function read_TEMPO_O3TOT_L2_UVAI reads the following arrays from the TEMPO L2 O3TOT product TEMPO_O3TOT_L2_V01(2):
uv_aerosol_index;
quality_flag;
and returns respective fields along with their fill values and coordinates of the pixels.
If one requested variables cannot be read, all returned variables are zeroed.
def read_TEMPO_O3TOT_L2_UVAI(fn):
= 'uv_aerosol_index'
var_name = 'quality_flag'
var_QF_name
try:
= nc.Dataset(fn)
ds
= ds.groups['product'] # this opens group product, /product, as prod
prod
= prod.variables[var_name] # this reads variable column_amount_o3 from prod (group product, /product)
var = np.array(var)
uvai = var.getncattr('_FillValue')
uvai_fv
= prod.variables[var_QF_name] # this reads variable column_amount_o3 from prod (group product, /product)
var_QF = np.array(var_QF)
uvai_QF # there is no fill value for the quality flag.
# Once it is available in the next version of the product,
# un-comment the line below and add fv_QF to the return line.
# fv_QF = var_QF.getncattr('_FillValue')
= ds.groups['geolocation'] # this opens group geolocation, /geolocation, as geo
geo
= np.array(geo.variables['latitude']) # this reads variable latitude from geo (geolocation group, /geolocation) into a numpy array
lat = np.array(geo.variables['longitude']) # this reads variable longitude from geo (geolocation group, /geolocation) into a numpy array
lon = geo.variables['latitude'].getncattr('_FillValue')
fv_geo # it appeared that garbage values of latitudes and longitudes in the L2 files
# are 9.969209968386869E36 while fill value is -1.2676506E30
# (after deeper search it was found that actual value in the file is -1.2676506002282294E30).
# For this reason, fv_geo is set to 9.96921E36 to make the code working.
# Once the problem is resolved and garbage values of latitudes and longitudes
# equal to their fill value, the line below must be removed.
= 9.969209968386869E36
fv_geo
= np.array(geo.variables['time'] )# this reads variable longitude from geo (geolocation group, /geolocation) into a numpy array
time
ds.close()
except:
print('variable '+var_name+' cannot be read in file '+fn)
= 0.
lat = 0.
lon = 0.
time = 0.
fv_geo = 0.
uvai = 0.
uvai_QF = 0.
fv_uvai # fv_QF = -999
= ''
prod_unit
return lat, lon, fv_geo, time, uvai, uvai_QF, uvai_fv
2.3.2 function creating TEMPO O3 granule polygon
def TEMPO_L2_polygon(lat, lon, fv_geo):
= lon.shape[0]
nx = lon.shape[1]
ny print('granule has %3d scanlines by %4d pixels' %(nx, ny))
= np.empty([0,2])
dpos
= np.empty([nx, ny], dtype = int) # creating array in x indices
x_ind for ix in range(nx): x_ind[ix, :] = ix # populating array in x indices
= np.empty([nx, ny], dtype = int)
y_ind for iy in range(ny): y_ind[:, iy] = iy # populating array in x indices
= (lon[ix, iy] != fv_geo)&(lat[ix, iy] != fv_geo)
mask if len(lon[mask]) == 0:
print('the granule is empty - no meaningful positions')
return dpos
# right boundary
= min(x_ind[mask].flatten())
r_m = (lon[r_m, :] != fv_geo)&(lat[r_m, :] != fv_geo)
local_mask = np.stack((lon[r_m, local_mask], lat[r_m, local_mask])).T
r_b
# left boundary
= max(x_ind[mask].flatten())
l_m = (lon[l_m, :] != fv_geo)&(lat[l_m, :] != fv_geo)
local_mask = np.stack((lon[l_m, local_mask], lat[l_m, local_mask])).T
l_b
#top and bottom boundaries
= np.empty([0,2])
t_b = np.empty([0,2])
b_b for ix in range(r_m + 1, l_m):
= (lon[ix, :] != fv_geo)&(lat[ix, :] != fv_geo)
local_mask = y_ind[ix, local_mask]
local_y_ind = min(local_y_ind)
y_ind_top = max(local_y_ind)
y_ind_bottom = np.append(t_b, [[lon[ix, y_ind_top], lat[ix, y_ind_top]]], axis=0)
t_b = np.append(b_b, [[lon[ix, y_ind_bottom], lat[ix, y_ind_bottom]]], axis=0)
b_b
# combining right, top, left, and bottom boundaries together, going along the combined boundary counterclockwise
= np.append(dpos, r_b[ : :-1, :], axis=0) # this adds right boundary, counterclockwise
dpos = np.append(dpos, t_b, axis=0) # this adds top boundary, counterclockwise
dpos = np.append(dpos, l_b, axis=0) # this adds left boundary, counterclockwise
dpos = np.append(dpos, b_b[ : :-1, :], axis=0) # this adds bottom boundary, counterclockwise
dpos
print('polygon shape: ',dpos.shape)
return dpos
Main code begins here
3 Establishing access to EarthData
User needs to create an account at https://www.earthdata.nasa.gov/ Function earthaccess.login prompts for EarthData login and password.
# logging in
= earthaccess.login(strategy="interactive", persist=True)
auth
# Creating local directory
= os.path.expanduser("~") + os.sep
homeDir
with open(homeDir + '.dodsrc', 'w') as file:
file.write('HTTP.COOKIEJAR={}.urs_cookies\n'.format(homeDir))
file.write('HTTP.NETRC={}.netrc'.format(homeDir))
file.close()
print('Saved .dodsrc to:', homeDir)
# Set appropriate permissions for Linux/macOS
if platform.system() != "Windows":
'chmod og-rw ~/.netrc', shell=True)
Popen(else:
# Copy dodsrc to working directory in Windows
+ '.dodsrc', os.getcwd())
shutil.copy2(homeDir print('Copied .dodsrc to:', os.getcwd())
Saved .dodsrc to: /home/jovyan/
4 Select timeframe of interest
the timeframe is going to be common for all instruments
print('enter period of interest, start and end dates, in the form YYYYMMDD')
= input('enter start date of interest ')
datestamp_ini = input('enter end date of interest ')
datestamp_fin
= int(datestamp_ini)
start_date = int(datestamp_fin)
end_date
= start_date//10000
yyyy_ini = (start_date//100 - yyyy_ini*100)
mm_ini = (start_date - yyyy_ini*10000 - mm_ini*100)
dd_ini
= end_date//10000
yyyy_fin = (end_date//100 - yyyy_fin*100)
mm_fin = (end_date - yyyy_fin*10000 - mm_fin*100)
dd_fin print(yyyy_ini, mm_ini, dd_ini, yyyy_fin, mm_fin, dd_fin)
= str('%4.4i-%2.2i-%2.2i 00:00:00' %(yyyy_ini, mm_ini, dd_ini))
date_start = str('%4.4i-%2.2i-%2.2i 23:59:59' %(yyyy_fin, mm_fin, dd_fin)) date_end
enter period of interest, start and end dates, in the form YYYYMMDD
enter start date of interest 20230805
enter end date of interest 20230805
2023 8 5 2023 8 5
4.1 calculate datetime objects for the beginning and end of the timeframe of interest
using datetime library
= yyyy_ini
yyyy = mm_ini
mm = dd_ini
dd = 0
hh = 0
mn = 0
ss = 0 # microseconds
us = datetime(yyyy, mm, dd, hh, mn, ss, us)
dt_ini
= yyyy_fin
yyyy = mm_fin
mm = dd_fin
dd = 23
hh = 59
mn = 59
ss = 999999 # microseconds
us = datetime(yyyy, mm, dd, hh, mn, ss, us) # this is time 1 second before the end of the timeframe of interest dt_fin
5 work with AERONET data
As of now, April 24, 2024, AERONET data cannot be downloaded programmatically. User must upload an AERONET data file using AERONET download tool here https://aeronet.gsfc.nasa.gov/new_web/webtool_aod_v3.html. The first line below is the name of the AERONET file used in this example.
= '20230801_20241231_CCNY.lev15'
fn = [500, 340, 380] wl
5.1 read AERONET data
= read_aeronet_mw(fn, wl, start_date, end_date)
AERONET_name, lat, lon, wln, date_time, aod = lat
POI_lat = lon
POI_lon = AERONET_name
POI_name = len(date_time)
nt
print(wln)
print(len(aod))
print('name %s, latitude %9.4f, longitude %10.4f' %(AERONET_name, lat, lon))
= len(wln)
nwl for iwl in range(nwl): print(wln[iwl], len(aod[aod[:,iwl]>0, iwl]))
[340 380 500]
AOD_340nm
AOD_380nm
AOD_500nm
[25 24 18]
[340 380 500]
[340 380 500]
22
name CCNY, latitude 40.8213, longitude -73.9490
340 22
380 22
500 22
5.2 create timeseries of AERONET AODs
calculate time difference, dy_loc, in fractional days between AERONET observations and the beginning of the timeframe of interest, write date and time as yyyy, mm, dd, hh, mn, ss, dt_loc along with wavelengths and corresponding AODs to a file
= 'AOD_AERONET'
out_Q_AERONET = open(out_Q_AERONET+'_'+datestamp_ini+'_'+datestamp_fin+'_'\
fout +POI_name+'_'+str('%08.4fN_%08.4fW.txt' %(POI_lat, -POI_lon)), 'w')
'timeseries of '+out_Q_AERONET+' at '+POI_name+' '+str('%08.4fN %08.4fW' %(POI_lat, -POI_lon))+'\n')
fout.write('yyyy mm dd hh mn ss time,days wl,nm AOD wl,nm AOD wl,nm AOD\n')
fout.write(
for it in range(nt):
= date_time[it]
d_t = int(d_t[0][6:10])
yyyy = int(d_t[0][3:5])
mm = int(d_t[0][0:2])
dd = int(d_t[1][0:2])
hh = int(d_t[1][3:5])
mn = int(d_t[1][6:8])
ss = 0 # microseconds
us = datetime(yyyy, mm, dd, hh, mn, ss, us)
dt_AERONET = (dt_AERONET - dt_ini).total_seconds()/86400
dt_loc '%4i %2.2i %2.2i %2.2i %2.2i %2.2i %9.6f' %(yyyy, mm, dd, hh, mn, ss, dt_loc))
fout.write(for wl, tau in zip(wln, aod[it]): fout.write(' %5.0f %6.3f' %(wl, max(tau, -1.)))
'\n')
fout.write(print(d_t, yyyy, mm, dd, hh, mn, ss, dt_loc, aod[it])
fout.close()
['05:08:2023' '12:21:54'] 2023 8 5 12 21 54 0.5152083333333334 [0.494809 0.458947 0.333314]
['05:08:2023' '12:37:38'] 2023 8 5 12 37 38 0.5261342592592593 [0.460212 0.425241 0.306128]
['05:08:2023' '12:39:36'] 2023 8 5 12 39 36 0.5275 [0.468725 0.432512 0.311574]
['05:08:2023' '12:41:35'] 2023 8 5 12 41 35 0.5288773148148148 [0.484136 0.453612 0.324803]
['05:08:2023' '12:43:34'] 2023 8 5 12 43 34 0.5302546296296297 [0.488803 0.454636 0.328589]
['05:08:2023' '12:45:32'] 2023 8 5 12 45 32 0.5316203703703704 [0.490196 0.453847 0.335888]
['05:08:2023' '12:54:52'] 2023 8 5 12 54 52 0.5381018518518519 [0.478464 0.44467 0.325635]
['05:08:2023' '13:10:01'] 2023 8 5 13 10 1 0.5486226851851852 [0.494555 0.460215 0.333986]
['05:08:2023' '13:19:13'] 2023 8 5 13 19 13 0.5550115740740741 [0.486189 0.452743 0.330567]
['05:08:2023' '13:35:01'] 2023 8 5 13 35 1 0.5659837962962962 [0.418951 0.386741 0.276587]
['05:08:2023' '13:44:19'] 2023 8 5 13 44 19 0.5724421296296296 [0.404995 0.374061 0.266198]
['05:08:2023' '13:59:33'] 2023 8 5 13 59 33 0.5830208333333333 [0.427243 0.394882 0.280744]
['05:08:2023' '14:09:10'] 2023 8 5 14 9 10 0.5896990740740741 [0.400944 0.369637 0.262378]
['05:08:2023' '14:24:59'] 2023 8 5 14 24 59 0.6006828703703704 [0.402717 0.373348 0.269201]
['05:08:2023' '14:29:02'] 2023 8 5 14 29 2 0.6034953703703704 [0.376444 0.346455 0.245482]
['05:08:2023' '15:32:29'] 2023 8 5 15 32 29 0.6475578703703704 [0.326602 0.30022 0.208331]
['05:08:2023' '15:42:29'] 2023 8 5 15 42 29 0.6545023148148148 [0.35099 0.325715 0.23699 ]
['05:08:2023' '15:47:29'] 2023 8 5 15 47 29 0.657974537037037 [0.328565 0.302978 0.216409]
['05:08:2023' '16:02:30'] 2023 8 5 16 2 30 0.6684027777777778 [0.334116 0.308406 0.220808]
['05:08:2023' '16:04:48'] 2023 8 5 16 4 48 0.67 [0.334563 0.308769 0.221642]
['05:08:2023' '16:14:27'] 2023 8 5 16 14 27 0.6767013888888889 [0.405008 0.380736 0.294151]
['05:08:2023' '16:30:17'] 2023 8 5 16 30 17 0.6876967592592592 [0.356773 0.32694 0.246413]
6 work with DSCOVR data
6.1 search for DSCOVR EPIC granules
falling into the timeframe of interest and 1 degree box surrounding the AERONET location
= 'DSCOVR_EPIC_L2_AER' # collection name to search for in the EarthData
short_name
= (POI_lon - 0.5, POI_lat - 0.5, POI_lon + 0.5, POI_lat + 0.5)
bbox
= earthaccess.search_data(short_name = short_name\
POI_results_EPIC = (date_start, date_end)\
, temporal = bbox)
, bounding_box
= len(POI_results_EPIC)
n_EPIC
print('total number of DSCOVR EPIC L2_AER granules found for POI', POI_name\
'\nwithin period of interes between', date_start, 'and', date_end, '\nis', n_EPIC) ,
Granules found: 11
total number of DSCOVR EPIC L2_AER granules found for POI CCNY
within period of interes between 2023-08-05 00:00:00 and 2023-08-05 23:59:59
is 11
6.2 ensure all granules have download links
lines below before the call of earthaccess.download() check whether all found granules have download links. granules without links are removed from the list of search results without this step, those granules crash the call of earthaccess.download()
= []
granule_links_EPIC = []
POI_results_EPIC_bad for result in POI_results_EPIC:
try:
'umm']['RelatedUrls'][0]['URL'])
granule_links_EPIC.append(result[except:
POI_results_EPIC_bad.append(result)
for granule_link in granule_links_EPIC: print(granule_link)
for result in POI_results_EPIC_bad: POI_results_EPIC.remove(result)
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L2_AER_03/2023/08/DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5
6.3 download DSCOVR EPIC granules using earthaccess library
check whether all downloads were successful and try another time to get the failed granules if the second attempt was not successful, remove failed granules from the list
= earthaccess.download(POI_results_EPIC, local_path='.',)
downloaded_files
# Checking whether all DSCOVR EPIC data files have been downloaded
for granule_link in granule_links_EPIC:
= granule_link.split('/')[-1]
EPIC_fname # check if file exists in the local directory
if not os.path.exists(EPIC_fname):
print(EPIC_fname, 'does not exist in local directory')
# repeat attempt to download
= earthaccess.download(granule_link,
downloaded_files ='.')
local_path# if file still does not exist in the directory, remove its link from the list of links
if not os.path.exists(EPIC_fname): granule_links_EPIC.remove(granule_link)
Getting 11 granules, approx download size: 0.0 GB
File DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5 already downloaded
File DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5 already downloaded
6.4 read DSCOVR EPIC granules and compile timeseries of AODs and UVAI
# parameter geo_deviation defines maximum difference between lats and lons of EPIC pixels and that of AERONET
= 0.075
geo_deviation = np.array([POI_lon, POI_lat])
pp
= 'AOD_UVAI_EPIC'
out_Q_EPIC
= open(out_Q_EPIC+'_'+datestamp_ini+'_'+datestamp_fin+'_'\
fout +POI_name+'_'+str('%08.4fN_%08.4fW.txt' %(POI_lat, -POI_lon)), 'w')
'timeseries of '+out_Q_EPIC+' at '+POI_name+' '+str('%08.4fN %08.4fW' %(POI_lat, -POI_lon))+'\n')
fout.write('yyyy mm dd hh mn ss time,days wl,nm AOD wl,nm AOD wl,nm AOD UVAI\n')
fout.write(
for granule_link in sorted(granule_links_EPIC):
= granule_link.split('/')[-1]
EPIC_fname print(EPIC_fname)
\
aod3D, fv_aod, uvai2D, fv_uvai, lat2D, fv_lat, lon2D, fv_lon= read_epic_l2_AER(EPIC_fname)
, wl, fv_wl, yyyy, mm, dd, hh, mn, ss
= datetime(yyyy, mm, dd, hh, mn, ss)
dt_EPIC = (dt_EPIC - dt_ini).total_seconds()/86400
dt_loc print(yyyy, mm, dd, hh, mn, ss, (dt_EPIC - dt_ini).total_seconds()/86400)
'%4i %2.2i %2.2i %2.2i %2.2i %2.2i %9.6f' %(yyyy, mm, dd, hh, mn, ss, dt_loc))
fout.write(
# check whether POI is in the granule. If not - move to the next granule
= (lat2D < POI_lat+geo_deviation)&(lat2D > POI_lat-geo_deviation)\
mask_geo &(lon2D < POI_lon+geo_deviation)&(lon2D > POI_lon-geo_deviation)
= len(wl)
nwl_EPIC
for iwl in range(nwl_EPIC):
= mask_geo&(aod3D[:, :, iwl] > 0.)
mask
= lat2D[mask]
lat_loc = lon2D[mask]
lon_loc = aod3D[mask, iwl]
aod_loc = len(aod_loc)
n_loc # if n_loc < 1: continue
if n_loc < 1: prod_loc = -0.999
else:
= np.empty([0,2])
points = np.empty(0)
ff
for i in range(n_loc):
= np.append(points, [[lon_loc[i], lat_loc[i]]], axis=0)
points = np.append(ff, aod_loc[i])
ff
try:
= griddata(points, ff, pp, method='linear', fill_value=-0.999, rescale=False)
[prod_loc] except:
try:
= np.mean(ff)
prod_loc except: prod_loc = -0.999
# if prod_loc == -999.: continue
' %5.0f %6.3f' %(wl[iwl], prod_loc))
fout.write(print(wl[iwl], prod_loc)
= mask_geo&(uvai2D != fv_uvai)
mask
= lat2D[mask]
lat_loc = lon2D[mask]
lon_loc = uvai2D[mask]
uvai_loc = len(aod_loc)
n_loc # if n_loc < 1: continue
if n_loc < 1: prod_loc = -99.
else:
= np.empty([0,2])
points = np.empty(0)
ff
for i in range(n_loc):
= np.append(points, [[lon_loc[i], lat_loc[i]]], axis=0)
points = np.append(ff, uvai_loc[i])
ff
try:
= griddata(points, ff, pp, method='linear', fill_value=-99., rescale=False)
[prod_loc] except:
try:
= np.mean(ff)
prod_loc except: prod_loc = -99.
' %6.2f\n' %prod_loc)
fout.write(print('UVAI', prod_loc)
fout.close()
DSCOVR_EPIC_L2_AER_03_20230805114028_03.he5
2023 8 5 11 40 28 0.4864351851851852
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805124555_03.he5
2023 8 5 12 45 55 0.531886574074074
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805135123_03.he5
2023 8 5 13 51 23 0.5773495370370371
340.0 0.658370852470398
388.0 0.6070993542671204
500.0 0.5208560228347778
UVAI 1.0490291118621826
DSCOVR_EPIC_L2_AER_03_20230805145650_03.he5
2023 8 5 14 56 50 0.622800925925926
340.0 1.0037826597690582
388.0 0.925611823797226
500.0 0.7941211760044098
UVAI 1.269827425479889
DSCOVR_EPIC_L2_AER_03_20230805160217_03.he5
2023 8 5 16 2 17 0.6682523148148148
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805170745_03.he5
2023 8 5 17 7 45 0.7137152777777778
340.0 2.0829079262483576
388.0 1.6970317278376417
500.0 1.0809463897588194
UVAI -0.14213168194983128
DSCOVR_EPIC_L2_AER_03_20230805181312_03.he5
2023 8 5 18 13 12 0.7591666666666667
340.0 1.6575589177292698
388.0 1.528474366786192
500.0 1.3113422907254944
UVAI 0.9618683264479624
DSCOVR_EPIC_L2_AER_03_20230805191839_03.he5
2023 8 5 19 18 39 0.8046180555555555
340.0 1.46158707100744
388.0 1.347764097820558
500.0 1.1563033843281367
UVAI 1.5943157504218373
DSCOVR_EPIC_L2_AER_03_20230805202406_03.he5
2023 8 5 20 24 6 0.8500694444444444
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
DSCOVR_EPIC_L2_AER_03_20230805212934_03.he5
2023 8 5 21 29 34 0.8955324074074074
340.0 4.511193513870239
388.0 4.159878492355347
500.0 3.568934202194214
UVAI 3.3587218523025513
DSCOVR_EPIC_L2_AER_03_20230805223501_03.he5
2023 8 5 22 35 1 0.9409837962962962
340.0 -0.999
388.0 -0.999
500.0 -0.999
UVAI -99.0
7 Work with TEMPO data
7.1 Search for TEMPO data files within 0.5 degree range around the POI
(position of the AERONET station) falling into the timeframe of interest
# Setting TEMPO name constants
= 'TEMPO_O3TOT_L2' # collection name to search for in the EarthData
short_name = 'UVAI_TEMPO' # name of the output quantity with unit
out_Q = 'V03'
version
# Searching TEMPO data files within 0.5 degree range around the POI
= (POI_lon - 0.5, POI_lat - 0.5, POI_lon + 0.5, POI_lat + 0.5)
bbox
= earthaccess.search_data(short_name = short_name\
POI_results = version\
, version = (date_start, date_end)\
, temporal = bbox)
, bounding_box = len(POI_results)
n_gr print('total number of TEMPO version ', version,' granules found for POI', POI_name, \
'\nwithin period of interes between', date_start, 'and', date_end, ' is', n_gr)
if n_gr == 0:
print('program terminated')
sys.exit()
# Print links to the granules.
= []
granule_links for result in POI_results: granule_links.append(result['umm']['RelatedUrls'][0]['URL'])
for granule_link in granule_links: print(granule_link)
Granules found: 11
total number of TEMPO version V03 granules found for POI CCNY
within period of interes between 2023-08-05 00:00:00 and 2023-08-05 23:59:59 is 11
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T122445Z_S001G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T132716Z_S002G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T142947Z_S003G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T153218Z_S004G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T163449Z_S005G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T173720Z_S006G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T183951Z_S007G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T194222Z_S008G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T204453Z_S009G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T214724Z_S010G03.nc
https://data.asdc.earthdata.nasa.gov/asdc-prod-protected/TEMPO/TEMPO_O3TOT_L2_V03/2023.08.05/TEMPO_O3TOT_L2_V03_20230805T224955Z_S011G03.nc
7.2 download TEMPO files
check whether all downloads were successful, try to download failed granules yet another time. if second attempt fails, remove those granules from the list.
# Downloading TEMPO data files
= earthaccess.download(
downloaded_files
POI_results,='.')
local_path
# Checking whether all TEMPO data files have been downloaded
for granule_link in granule_links:
= granule_link.split('/')[-1]
TEMPO_fname # check if file exists in the local directory
if not os.path.exists(TEMPO_fname):
print(TEMPO_fname, 'does not exist in local directory')
# repeat attempt to download
= earthaccess.download(granule_link,
downloaded_files ='.')
local_path# if file still does not exist in the directory, remove its link from the list of links
if not os.path.exists(TEMPO_fname): granule_links.remove(granule_link)
Getting 11 granules, approx download size: 1.14 GB
Accessing cloud dataset using dataset endpoint credentials: https://data.asdc.earthdata.nasa.gov/s3credentials
Downloaded: TEMPO_O3TOT_L2_V03_20230805T122445Z_S001G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T132716Z_S002G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T142947Z_S003G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T153218Z_S004G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T163449Z_S005G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T173720Z_S006G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T183951Z_S007G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T194222Z_S008G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T204453Z_S009G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T214724Z_S010G03.nc
Downloaded: TEMPO_O3TOT_L2_V03_20230805T224955Z_S011G03.nc
7.3 compile TEMPO UVAI timeseries
= [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
days
= open(out_Q+'_'+datestamp_ini+'_'+datestamp_fin+'_'\
fout +POI_name+'_'+str('%08.4fN_%08.4fW.txt' %(POI_lat, -POI_lon)), 'w')
'timeseries of '+out_Q+' at '+POI_name+' '+str('%08.4fN %08.4fW' %(POI_lat, -POI_lon))+'\n')
fout.write('yyyy mm dd hh mn ss time,days UVAI\n')
fout.write(
= np.array([POI_lon, POI_lat])
pp = Point(pp)
p
for granule_link in sorted(granule_links):
= granule_link.rfind('/')
last_slash_ind = granule_link[last_slash_ind+1 : ]
fname print('\ngranule ', fname)
= read_TEMPO_O3TOT_L2_UVAI(fname)
lat, lon, fv_geo, time, uvai, uvai_QF, uvai_fv
if isinstance(lat, float): continue
= TEMPO_L2_polygon(lat, lon, fv_geo) # create granule polygon
polygon
= list(polygon)
coords_poly = Polygon(coords_poly)
poly
= lon.shape[0]
nx = lon.shape[1]
ny
# getting time from the granule filename.
# This time corresponds to the 1st element of array time above; that is GPS time in seconds
= fname.rfind('T')
Tind = int(fname[Tind-8 : Tind-4])
yyyy= int(fname[Tind-4 : Tind-2])
mm = int(fname[Tind-2 : Tind])
dd = int(fname[Tind+1 : Tind+3])
hh = int(fname[Tind+3 : Tind+5])
mn = float(fname[Tind+5 : Tind+7])
ss
# check whether POI is in the granule. If not - move to the next granule
if not p.within(poly): continue
print('point', p, 'is in granule polygon' )
= False
POI_found for ix in range(nx-1):
for iy in range(ny-1):
if lon[ix, iy] == fv_geo: continue
if lat[ix, iy] == fv_geo: continue
if lon[ix, iy+1] == fv_geo: continue
if lat[ix, iy+1] == fv_geo: continue
if lon[ix+1, iy+1] == fv_geo: continue
if lat[ix+1, iy+1] == fv_geo: continue
if lon[ix+1, iy] == fv_geo: continue
if lat[ix+1, iy] == fv_geo: continue
= [[lon[ix, iy], lat[ix, iy]], [lon[ix, iy+1], lat[ix, iy+1]] \
coords_poly_loc +1, iy+1], lat[ix+1, iy+1]], [lon[ix+1, iy], lat[ix+1, iy]]]
, [lon[ix= Polygon(coords_poly_loc)
poly_loc
if p.within(poly_loc):
print('scanl pixel latitude longitude UVAI UVAI_QF')
for scl in range(ix, ix+2, 1):
for pix in range(iy, iy+2, 1):
print(" %3d %4d %9.6f %10.6f %5.1f %6i"\
%(scl, pix, lat[scl, pix], lon[scl, pix]\
, uvai[scl, pix], uvai_QF[scl, pix]))
= True
POI_found print('POI', POI_name, 'at', POI_lat,'N ', POI_lon, 'E found')
= np.array([uvai[ix, iy],\
uvai_loc +1],\
uvai[ix, iy+1, iy+1],\
uvai[ix+1, iy]])
uvai[ix= np.array([uvai_QF[ix, iy],\
uvai_QF_loc +1],\
uvai_QF[ix, iy+1, iy+1],\
uvai_QF[ix+1, iy]])
uvai_QF[ix= np.array([lat[ix, iy], lat[ix, iy+1],\
lat_loc +1, iy+1], lat[ix+1, iy]])
lat[ix= np.array([lon[ix, iy], lon[ix, iy+1],\
lon_loc +1, iy+1], lon[ix+1, iy]])
lon[ix# mask_noFV = (uvai_loc != uvai_fv)&\
# (uvai_QF_loc == 0)
= (uvai_loc != uvai_fv)
mask_noFV
= np.column_stack((lon_loc[mask_noFV], lat_loc[mask_noFV]))
points_noFV = uvai_loc[mask_noFV]
ff_noFV
= np.empty([0,2])
points = np.empty(0)
ff
if ff_noFV.shape[0] == 0:
continue
elif ff_noFV.shape[0] < 4:
= np.mean(ff_noFV)
uvai_noFV elif ff_noFV.shape[0] == 4:
= griddata(points_noFV, ff_noFV, pp,\
uvai_noFV ='linear', fill_value=-99., rescale=False)[0]
methodif uvai_noFV == -99.: continue
# handling time first:
= (time[ix+1] + time[ix])*0.5 - time[0]
delta_t = ss + delta_t
ss if ss >= 60.:
= int(ss/60.)
delta_mn = ss - 60.*delta_mn
ss = mn + delta_mn
mn if mn >= 60:
= mn - 60
mn = hh + 1
hh if hh == 24:
= hh - 24
hh = dd + 1
dd = days[mm]
day_month if (yyyy//4)*4 == yyyy and mm == 2: day_month = day_month + 1
if dd > day_month:
= 1
dd = mm + 1
mm if mm > 12:
= 1
mm = yyyy + 1
yyyy
= int(ss)
int_ss = int((ss - int_ss)*10**6)
us = datetime(yyyy, mm, dd, hh, mn, int_ss, us)
dt_TEMPO = (dt_TEMPO - dt_ini).total_seconds()/86400
dt_loc
str('%4.4i %2.2i %2.2i %2.2i %2.2i %2.2i %9.6f %10.3e '\
fout.write(%(yyyy, mm, dd, hh, mn, ss, dt_loc, uvai_noFV)))
str('%9.4fN %9.4fW %10.3e '\
fout.write(%(lat[ix, iy], -lon[ix, iy], uvai[ix, iy])))
str('%9.4fN %9.4fW %10.3e '\
fout.write(%(lat[ix, iy+1], -lon[ix, iy+1], uvai[ix, iy+1])))
str('%9.4fN %9.4fW %10.3e '\
fout.write(%(lat[ix+1, iy+1], -lon[ix+1, iy+1], uvai[ix+1, iy+1])))
str('%9.4fN %9.4fW %10.3e\n'\
fout.write(%(lat[ix+1, iy], -lon[ix+1, iy], uvai[ix+1, iy])))
break
if POI_found: break
fout.close()
granule TEMPO_O3TOT_L2_V03_20230805T122445Z_S001G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
59 661 40.834648 -73.943382 -0.9 8
59 662 40.813667 -73.949684 -0.8 8
60 661 40.831947 -74.001534 -0.8 8
60 662 40.810905 -74.007851 -0.8 8
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T132716Z_S002G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.834454 -73.943253 -1.8 13
59 661 40.813469 -73.949562 -1.4 13
60 660 40.833084 -74.000923 -1.8 13
60 661 40.812099 -74.007202 -1.5 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T142947Z_S003G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
58 660 40.837505 -73.885612 -1.5 13
58 661 40.816528 -73.891937 -1.3 13
59 660 40.836674 -73.944603 -2.4 13
59 661 40.815800 -73.950844 -2.3 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T153218Z_S004G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
58 660 40.821766 -73.911758 -2.4 13
58 661 40.800797 -73.918060 -2.4 13
59 660 40.821194 -73.968063 -2.6 13
59 661 40.800110 -73.974419 -2.5 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T163449Z_S005G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
58 660 40.824287 -73.898758 -1.1 13
58 661 40.803333 -73.905052 -1.5 13
59 660 40.823364 -73.957039 -1.6 13
59 661 40.802170 -73.963463 -1.7 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T173720Z_S006G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
58 660 40.831722 -73.893791 -1.5 13
58 661 40.810757 -73.900108 -1.5 13
59 660 40.829990 -73.952919 -2.1 13
59 661 40.808990 -73.959221 -1.9 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T183951Z_S007G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
58 659 40.843174 -73.891533 -1.4 13
58 660 40.822109 -73.897896 -1.5 13
59 659 40.841095 -73.952232 -1.2 13
59 660 40.820091 -73.958534 -1.6 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T194222Z_S008G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.837013 -73.913071 -1.0 8
59 661 40.815960 -73.919426 -0.9 13
60 660 40.835320 -73.974220 -1.1 8
60 661 40.814457 -73.980431 -0.8 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T204453Z_S009G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.825069 -73.947708 -1.1 8
59 661 40.803844 -73.954155 -0.8 8
60 660 40.823502 -74.007164 -0.9 8
60 661 40.802341 -74.013550 -0.8 13
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T214724Z_S010G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
59 660 40.833599 -73.914108 -1.0 8
59 661 40.812668 -73.920380 -1.1 8
60 660 40.830616 -73.972809 -1.2 8
60 661 40.810017 -73.978859 -0.9 8
POI CCNY at 40.821313 N -73.949036 E found
granule TEMPO_O3TOT_L2_V03_20230805T224955Z_S011G03.nc
granule has 123 scanlines by 2048 pixels
polygon shape: (4338, 2)
point POINT (-73.949036 40.821313) is in granule polygon
scanl pixel latitude longitude UVAI UVAI_QF
59 661 40.826336 -73.932693 -1.0 8
59 662 40.805405 -73.938957 -1.0 8
60 661 40.823360 -73.993324 -1.2 8
60 662 40.802219 -73.999695 -1.3 8
POI CCNY at 40.821313 N -73.949036 E found
8 reading back timeseries written in the files and plot the result
8.1 AERONET
= open(out_Q_AERONET+'_'+datestamp_ini+'_'+datestamp_fin+'_'\
fin +POI_name+'_'+str('%08.4fN_%08.4fW.txt' %(POI_lat, -POI_lon)), 'r')
= fin.readlines()
data_lines
fin.close()
= np.empty([0, 2])
timeseries_AERONET_340 = np.empty([0, 2])
timeseries_AERONET_380 = np.empty([0, 2])
timeseries_AERONET_500 for data_line in data_lines[2 : ]:
= data_line.split()
split = float(split[6])
tt = float(split[8])
aod_340 = float(split[10])
aod_380 = float(split[12])
aod_500 if aod_340 > 0.: timeseries_AERONET_340 = np.append(timeseries_AERONET_340, [[tt, aod_340]], axis = 0)
if aod_380 > 0.: timeseries_AERONET_380 = np.append(timeseries_AERONET_380, [[tt, aod_380]], axis = 0)
if aod_500 > 0.: timeseries_AERONET_500 = np.append(timeseries_AERONET_500, [[tt, aod_500]], axis = 0)
8.2 DSCOVR EPIC
= open(out_Q_EPIC+'_'+datestamp_ini+'_'+datestamp_fin+'_'\
fin +POI_name+'_'+str('%08.4fN_%08.4fW.txt' %(POI_lat, -POI_lon)), 'r')
= fin.readlines()
data_lines
fin.close()
= np.empty([0, 2])
timeseries_EPIC_340 = np.empty([0, 2])
timeseries_EPIC_388 = np.empty([0, 2])
timeseries_EPIC_500 = np.empty([0, 2])
timeseries_EPIC_UVAI for data_line in data_lines[2 : ]:
= data_line.split()
split = float(split[6])
tt = float(split[8])
aod_EPIC_340 = float(split[10])
aod_EPIC_388 = float(split[12])
aod_EPIC_500 = float(split[13])
EPIC_UVAI if aod_EPIC_340 > 0.: timeseries_EPIC_340 = np.append(timeseries_EPIC_340, [[tt, aod_EPIC_340]], axis = 0)
if aod_EPIC_388 > 0.: timeseries_EPIC_388 = np.append(timeseries_EPIC_388, [[tt, aod_EPIC_388]], axis = 0)
if aod_EPIC_500 > 0.: timeseries_EPIC_500 = np.append(timeseries_EPIC_500, [[tt, aod_EPIC_500]], axis = 0)
if EPIC_UVAI > -99.: timeseries_EPIC_UVAI = np.append(timeseries_EPIC_UVAI, [[tt, EPIC_UVAI]], axis = 0)# third, TEMPO UVAI
8.3 TEMPO
= open(out_Q+'_'+datestamp_ini+'_'+datestamp_fin+'_'\
fin +POI_name+'_'+str('%08.4fN_%08.4fW.txt' %(POI_lat, -POI_lon)), 'r')
= fin.readlines()
data_lines
fin.close()
= np.empty([0, 2])
timeseries_TEMPO_UVAI for data_line in data_lines[2 : ]:
= data_line.split()
split = float(split[6])
tt = float(split[7])
TEMPO_UVAI if TEMPO_UVAI > -99.: timeseries_TEMPO_UVAI = np.append(timeseries_TEMPO_UVAI, [[tt, TEMPO_UVAI]], axis = 0)
8.4 plot Aerosol Optical Depth (AOD) from AERONET and DSCOVR EPIC
= 'AOD_'+datestamp_ini+'_'+datestamp_fin+'\n'+POI_name
plot_title = 'AOD_'+datestamp_ini+'_'+datestamp_fin+'_'+POI_name+'.jpg'
img_name
0], timeseries_AERONET_340[:, 1],\
plt.plot(timeseries_AERONET_340[:, = "AOD AERONET at 340nm", linestyle='None', markersize=2,\
label ='s', markerfacecolor='r', markeredgecolor='r')
marker0], timeseries_AERONET_380[:, 1],\
plt.plot(timeseries_AERONET_380[:, = "AOD AERONET at 380nm", linestyle='None', markersize=2,\
label ='o', markerfacecolor='r', markeredgecolor='r')
marker0], timeseries_AERONET_500[:, 1],\
plt.plot(timeseries_AERONET_500[:, = "AOD AERONET at 500nm", linestyle='None', markersize=2,\
label ='D', markerfacecolor='r', markeredgecolor='r')
marker
0], timeseries_EPIC_340[:, 1],\
plt.plot(timeseries_EPIC_340[:, = "AOD EPIC at 340nm", linestyle='None', markersize=2,\
label ='s', markerfacecolor='c', markeredgecolor='c')
marker0], timeseries_EPIC_388[:, 1],\
plt.plot(timeseries_EPIC_388[:, = "AOD EPIC at 388nm", linestyle='None', markersize=2,\
label ='o', markerfacecolor='c', markeredgecolor='c')
marker0], timeseries_EPIC_500[:, 1],\
plt.plot(timeseries_EPIC_500[:, = "AOD EPIC at 500nm", linestyle='None', markersize=2,\
label ='D', markerfacecolor='c', markeredgecolor='c')
marker
# Set the range of x-axis
= 0.
l_lim = ((dt_fin - dt_ini).total_seconds() + 1.)/86400.
u_lim
plt.xlim(l_lim, u_lim)
# Set the range of y-axis
= 0.
l_lim = 2.
u_lim
plt.ylim(l_lim, u_lim)
r'GMT, day from beginning of '+datestamp_ini, fontsize=12)
plt.xlabel(r'AOD', fontsize=12)
plt.ylabel(
='lower left')
plt.legend(loc
+str(', %08.4fN %08.4fW' %(POI_lat, -POI_lon)))
plt.title(plot_titleformat='jpg', dpi=300)
plt.savefig(img_name,
plt.close()
8.5 plot UV Aerosol Index
= 'UVAI_'+datestamp_ini+'_'+datestamp_fin+'\n'+POI_name
plot_title = 'UVAI_'+datestamp_ini+'_'+datestamp_fin+'_'+POI_name+'.jpg'
img_name
0], timeseries_EPIC_UVAI[:, 1],\
plt.plot(timeseries_EPIC_UVAI[:, = "UVAI EPIC", linestyle='None', markersize=2,\
label ='o', markerfacecolor='c', markeredgecolor='c')
marker0], timeseries_TEMPO_UVAI[:, 1],\
plt.plot(timeseries_TEMPO_UVAI[:, = "UVAI TEMPO", linestyle='None', markersize=2,\
label ='o', markerfacecolor='m', markeredgecolor='m')
marker
# Set the range of x-axis
= 0.
l_lim = ((dt_fin - dt_ini).total_seconds() + 1.)/86400.
u_lim
plt.xlim(l_lim, u_lim)
# Set the range of y-axis
= -3.
l_lim = 3.
u_lim
plt.ylim(l_lim, u_lim)
r'GMT, day from beginning of '+datestamp_ini, fontsize=12)
plt.xlabel(r'UV Aerosol Index', fontsize=12)
plt.ylabel(
='lower left')
plt.legend(loc
+str(', %08.4fN %08.4fW' %(POI_lat, -POI_lon)))
plt.title(plot_titleformat='jpg', dpi=300)
plt.savefig(img_name,
plt.close()