PRO GRID_COMPOSITE, LIST, NAME, DATAGRID, $ DIMENSION=DIMENSION, DAY=DAY, NIGHT=NIGHT, RANGE=RANGE, GEOLIST=GEOLIST ; Composite a variable from a list of MODIS Level-2 product files, e.g. ; grid_composite, list, 'Total_Ozone', datagrid, /day ; grid_composite, list, 'Retrieved_Temperature_Profile', dimension=10 ;- Check arguments if (n_elements(list) eq 0) then message, 'Argument LIST is undefined' if (n_elements(name) eq 0) then message, 'Argument NAME is undefined' if (arg_present(datagrid) eq 0) then message, 'Argument DATAGRID cannot be set' if (n_elements(dimension) eq 0) then dimension = 0 ;- Save start time t0 = systime(1) ;- Initialize compositing arrays numarr = fltarr(814880) totarr = fltarr(814880) minarr = replicate( 1.0e20, 814880) maxarr = replicate(-1.0e20, 814880) ;- Loop through each file for file_index = 0L, n_elements(list) - 1L do begin ;- Open this file hdf_id = hdf_sd_start(list[file_index]) ;- Read the SDS data and extract dimension if required hdf_sd_varread, hdf_id, name, data ndims = size(data, /n_dimensions) if (ndims eq 3) then data = reform(data[*, *, dimension]) ;- Read the SDS attributes result = hdf_sd_attinfo(hdf_id, name, 'scale_factor') scale_factor = result.data result = hdf_sd_attinfo(hdf_id, name, 'add_offset') add_offset = result.data result = hdf_sd_attinfo(hdf_id, name, '_FillValue') fill_value = result.data ;- Get geolocation information if (n_elements(geolist) eq 0) then begin ;- Read geolocation information from the product file geo_id = hdf_id zen_name = 'Solar_Zenith' endif else begin ;- Read geolocation from the corresponding geolocation file result = strsplit(list[file_index], '.', /extract) search_string = result[1] + '.' + result[2] loc = where(strpos(geolist, search_string) ge 0, count) if (count eq 0) then begin print, 'No geolocation file found for ', list[file_index] goto, continue endif else begin geo_id = hdf_sd_start(geolist[loc[0]]) zen_name = 'SolarZenith' endelse endelse ;- Read the lat/lon arrays hdf_sd_varread, geo_id, 'Latitude', lat hdf_sd_varread, geo_id, 'Longitude', lon hdf_sd_varread, geo_id, zen_name, zen zen = zen * 0.01 ;- Close the input file(s) if (geo_id ne hdf_id) then hdf_sd_end, geo_id hdf_sd_end, hdf_id ;- Check data size if (n_elements(data) ne n_elements(lat)) then begin help, data, lat message, 'DATA and LAT arrays are not the same size' endif ;- Check for valid pixels test = (data ne fill_value) and $ (lat ge -90.0 and lat le 90.0) and $ (lon ge -180.0 and lon le 180.0) and $ (zen ge 0.0 and zen le 180.0) ;- Check for day or night pixels if required if keyword_set(day) then test = test and (zen le 85.0) if keyword_set(night) then test = test and (zen gt 85.0) ;- Unscale the data (HDF/netCDF standard method) data = (temporary(data) - add_offset) * scale_factor ;- Check for data in valid range if required if (n_elements(range) eq 2) then begin test = test and (data ge range[0]) and (data le range[1]) endif ;- Get indices of valid pixels valid_index = where(test, valid_count) ;- Composite the valid pixels if (valid_count gt 0) then begin ;- Extract valid pixels data = (temporary(data))[valid_index] lat = (temporary(lat))[valid_index] lon = (temporary(lon))[valid_index] ;- Get grid indices grid_index = get_grid_index(lat, lon) ;- Composite the statistics arrays numarr = vecadd(numarr, grid_index, replicate(1.0, n_elements(grid_index))) totarr = vecadd(totarr, grid_index, data) minarr = vecmin(minarr, grid_index, data) maxarr = vecmax(maxarr, grid_index, data) t1 = systime(1) endif continue: print, 'Processed ', list[file_index] endfor ;- Create output array datagrid = transpose([[numarr], [totarr], [minarr], [maxarr]]) ;- Compute processing rate elapsed = systime(1) - t0 print, 'Processing rate (files/sec): ', n_elements(list) / elapsed END