Adding Time Dimension to an existing Voxel / NetCDF file or creating a Voxel layer with time dimension included

1498
2
05-28-2021 12:32 AM
Labels (1)
TasBarsdell
New Contributor II

Hi all - using ArcGIS Pro 2.8, so-so python skills

I am trying to create a time series showing air quality over time in a specific sample site. I can create several Voxel layers of the data for different time stamps (days) but I am unsure how to:

  1. Combine the voxel layers into one voxel layer that can be time enabled OR
  2. Create time enabled Voxel layers to use in a time series.

Essentially I would like to create something similar to this oceans output here. 

The workflow I have followed is similar to that of the Interpolate 3D oxygen measurements in Monterey Bay found here. I have 5 days worth of air quality data captured from a drone at different elevations. This input data has a time field. I create interpolated results for each day using the Emperical Baysian Kriging 3D tool. Then I convert each output to a Voxel layer to visualise as a predicted volumetric cube using the GA Layer 3D to NetCDF tool.  

So I have 5 voxel layers with no time dimensions. What is the best approach to create a time capable voxel layer? Can you edit existing voxel layers to add a time dimension? Or do you have to follow a different procedure?

My python code for the process for reference is below:

 

# import modules
import os
import arcpy
import datetime as dt


def EBK_Process(pm_field):
    '''Main Process creating interpolation output'''

    try:

        # To prevent overwriting outputs change option to False.
        arcpy.env.overwriteOutput = True
        arcpy.env.workspace = r"C:\Users\TasB\DroneMapping.gdb"
        # Check out any necessary licenses.
        arcpy.CheckOutExtension("GeoStats")
        print("Checked out GeoStats License")

        # input variables
        drone_csv = r"C:\Users\TasB\Inputs\DroneAirQuality.csv"
        out_points_fc = "drone_loc"
        x_coords = "Longitude"
        y_coords = "Latitude"
        z_coords = "Altitude m"

        # create points fc from csv table
        xy_fc = arcpy.XYTableToPoint_management(drone_csv, out_points_fc,
                                                x_coords, y_coords, z_coords,
                                                arcpy.SpatialReference(4326, 115700))

        # create a list of date values to loop through
        date_list = []
        with arcpy.da.SearchCursor(xy_fc, "GMT_Date") as date_cursor:
            for row in date_cursor:
                if row[0] not in date_list:
                    date_list.append(row[0])
        del date_cursor

        for date in date_list:

            # create date_str for output names
            date_str = dt.datetime.strptime(date, '%m/%d/%Y').strftime('%Y%m%d')

            # select date field for sample process
            clause = """ "GMT_Date" = '%s' """ % date
            new_select = arcpy.SelectLayerByAttribute_management(xy_fc, "NEW_SELECTION", clause)
            select_copy = arcpy.CopyFeatures_management(new_select,"xy_copy")

            # specify input spatial reference from existing fc
            dsc = arcpy.Describe("DroneAirQuality_Proj")
            coord_sys = dsc.SpatialReference

            # define projection
            copy_proj = arcpy.Project_management(select_copy,
                                                 "drone_loc_proj", coord_sys)

            # run ebk 3D analysis tool
            output_ebk_layer = "ebk_" + pm_field + "_" + date_str
            out_ebk = arcpy.EmpiricalBayesianKriging3D_ga(copy_proj, 
                                                         "Shape.Z", 
                                                          pm_field,
                                                          output_ebk_layer,
                                                          "METER", "",
                                                          "EXPONENTIAL",
                                                          "LOGEMPIRICAL", 
                                                          100,
                                                          1, 100,
                                                          "FIRST", None,
                                                          "NBRTYPE=Standard3D 
RADIUS=nan NBR_MAX=2 NBR_MIN=1 SECTOR_TYPE=TWELVE_SECTORS",
                                                          None, "PREDICTION",
                                                          0.5, "EXCEED",None)

            print("EBK Created")
            if pm_field == "PM2_5":
                pm_field = "PM25"
            out_voxel = os.path.join(r"C:\Users\TasB\Outputs", pm_field + "_" + date_str + ".nc")

            predict_points = output_ebk_layer + " PREDICTION #;" + output_ebk_layer + " PREDICTION_STANDARD_ERROR #"
            arcpy.GALayer3DToNetCDF_ga(out_ebk, out_voxel, "3D_GRIDDED_POINTS", "2.21662307693026 Meters", "2.01896153844129 Meters", "3.12820512820513 Meters", None, predict_points, None)

        arcpy.CheckInExtension("GeoStats")
        print("Checked in GeoStats License")

        print("Process Completed Successuflly")

    except arcpy.ExecuteError:
        print(arcpy.GetMessages(2))


if __name__ == '__main__':
    # Global Environment settings
    EBK_Process("PM2_5")
    EBK_Process("PM10")

 

 

0 Kudos
2 Replies
TasBarsdell
New Contributor II

Apparently there are two alternate workflows that would allow adding a time-slider to Voxel layers as per below. Will give these a try and update once completed.

1. would be creating a Mosaic Dataset that stores all the NetCDF files, and adding a date field to the Mosaic Dataset that stores datetime attributes. 

2. There is an ArcGIS Blog post that covers advanced methods of creating NetCDF files using Python that can preserve custom attributes.

0 Kudos
Felix10546
New Contributor III

Thank TasBarsdell,

for providing the solutions. I have similar problem that I want to add the time attribute to the netCDF generated by GA Layer3D to netCDF. 

I followed the method2 to learn the skill but I found that the example works with x,y and t which showing t as height in z-axis in ArcGIS Pro. (pic1_work_with_t,y,x)

However, when I add the four axis to the data so that et(time,z,y,x) has value 1.1, 2.1, 3.1 at 3 different days.

Problem1. The feature layer cannot be rendered (pic2_not_work_with_t_z_y_x)

I am sure the resulting file has date field recognized by ArcGIS Pro since I can selected using time range of this layer.

Problem2. When I start in time bar, it just crashed.(pic3_crash_after_start_playing_in_time)

I am still finding any directly importable t,z,y,x online to see what format of data can work but most their time format is not recognized by ArcGIS Pro.

 

Can anyone help on this topic? Thanks very much.

Here is my code for generating t,z,y,x data from the Blog post 

The data cannot be downloaded so I just replaced it by constants

 

 

 

from netCDF4 import Dataset
import numpy as np
import datetime as dt

 

nc = Dataset('esri_with_time.nc', 'w')

# Global attributes
nc.title = 'ET-Amazon'
nc.summary = ('Actual monthly evapotranspiration in Rondonia, Brazil '
              'for June 2005, 2009, and 2013')
nc.keywords = 'Evapotranspiration, Amazon, Water cycle'
nc.license = ('This work is licensed under a Creative Commons '
              'Attribution 4.0 International License.')
nc.references = ('Paca, V.H., Espinoza-Davalos, G.E., Hessels, T.M., '
                 'Moreira, D., Comair, G.F., Bastiaanssen, W. (2019). '
                 'The Spatial Variability of Actual Evapotranspiration '
                 'Across the Amazon River Basin Based on Remote Sensing '
                 'Models Validated with Flux-Towers. Ecological Processes. '
                 '8(1), 6. https://doi.org/10.1186/s13717-019-0158-8')
nc.source = 'https://www.hydroshare.org/resource/24792a48a6394dcba52da62fa324ae40/'
nc.Conventions = 'CF-1.6'
nc.institution = 'Esri'
nc.history = '1'

#dim
lat_dim = nc.createDimension('latitude', 176)
lon_dim = nc.createDimension('longitude', 244)
z_dim = nc.createDimension('z', 3)
tim_dim = nc.createDimension('time', 3)

#var
lat_var = nc.createVariable('latitude', np.float64, ('latitude'))
lat_var.units = 'degrees_north'
lat_var.standard_name = 'latitude'
lat_var.axis = 'Y'

lon_var = nc.createVariable('longitude', np.float64, ('longitude'))
lon_var.units = 'degrees_east'
lon_var.standard_name = 'longitude'
lon_var.axis = 'X'

z_var = nc.createVariable('z', np.float64, ('z'))
z_var.units = 'z'
z_var.standard_name = 'z'
z_var.axis = 'Z'


time_var = nc.createVariable('time', np.int32, ('time'))
time_var.standard_name = 'time'
time_var.calendar = 'gregorian'
time_var.time_step = 'Monthly'
time_var.units = 'Seconds since 1970-01-01 00:00:00'
time_var.axis = 'T'

crs_var = nc.createVariable('crs', np.int8, ())
crs_var.standard_name = 'crs'
crs_var.grid_mapping_name = 'latitude_longitude'
crs_var.crs_wkt = ("GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',"
                   "SPHEROID['WGS_1984',6378137.0,298.257223563]],"
                   "PRIMEM['Greenwich',0.0],"
                   "UNIT['Degree',0.0174532925199433]]")

#np.int16
#et_var = nc.createVariable('ET', np.int16, ('time', 'z','latitude', 'longitude'),
#                           fill_value=9999)


et_var = nc.createVariable('ET', np.float64, ('time', 'z','latitude', 'longitude'),
                           fill_value=9999)

et_var.units = 'mm/month'
et_var.long_name = 'Actual Evapotranspiration'
et_var.short_name = 'ET'
et_var.grid_mapping = 'crs'

#assign values
lat_values = np.arange(-9.6491667 - 0.0025/2, -10.0891667, -0.0025)
lon_values = np.arange(-64.735 + 0.0025/2, -64.125, 0.0025)
z_values= np.arange(0,1400,500)
lat_var[:] = lat_values
lon_var[:] = lon_values
z_var[:] = z_values


date_200506 = int((dt.datetime(2005,6,1) - dt.datetime(1970,1,1)).total_seconds())
date_200906 = int((dt.datetime(2009,6,1) - dt.datetime(1970,1,1)).total_seconds())
date_201306 = int((dt.datetime(2013,6,1) - dt.datetime(1970,1,1)).total_seconds())
time_values = [date_200506, date_200906, date_201306]
time_var[:] = time_values

et_var[0, :, :, :] = np.ones((1,3, 176,244))*1.1
et_var[1, :, :, :] = np.ones((1,3, 176,244))*2.1
et_var[2, :, :, :] = np.ones((1,3, 176,244))*3.1

nc.close()

 

 

 

 

 

0 Kudos