# Import modules... import arcpy from arcpy.sa import * import multiprocessing import os # Define the worker function... def calcVS(vsFC, elevationRas, outputWS, count): arcpy.CheckOutExtension("Spatial") scratchWS = os.path.join(outputWS, "VS_%s" % (count)) os.mkdir(scratchWS) arcpy.env.scratchWorkspace = scratchWS outVS = arcpy.sa.Viewshed(elevationRas, vsFC) outVS.save("vs_%s.img" % (count)) return "Completed: vs_%s" % (count) if __name__ == "__main__": # list of shapefiles to be used as observers, # pretend this is populated... fcs = [] elevRas = r"C:\Working01\DEM.img" outWS = r"C:\Working01\Test01" cores = multiprocessing.cpu_count() # Start pool pool = multiprocessing.Pool(cores - 1) jobs = [] counter = 1 for fc in fcs: jobs.append(pool.apply_async(calcVS, (fc, elevRas, outWS, counter))) # Clean up pool... pool.close() pool.join() # Print results results = [job.get() for job in jobs] print results
Solved! Go to Solution.
# Imports... import arcpy from arcpy.sa import * import multiprocessing import ntpath import os from re import split import shutil import time def calcVS(argsCSV): import arcpy argsList = split(",", argsCSV) vsFC = argsList[0] elevRas = argsList[1] outWS = argsList[2] head, tail = ntpath.split(vsFC) buffVS = os.path.join(outWS, "buff_%s" % (tail[:-4])) arcpy.Buffer_analysis(vsFC, buffVS, "RADIUS2") arcpy.env.extent = buffVS arcpy.Delete_management(buffVS) scratchWS = os.path.join(outWS, tail[:-4]) os.mkdir(scratchWS) arcpy.env.scratchWorkspace = scratchWS arcpy.CheckOutExtension("Spatial") outViewshed = Viewshed(elevRas, vsFC) outViewshed.save(os.path.join(outWS, "%s.tif" % (tail[:-4]))) shutil.rmtree(scratchWS) arcpy.Delete_management(vsFC) if __name__ == "__main__": obsShp = r"C:\Data\test.shp" uidField = "UID" elevationRas = r"C:\Data\DEM.img" outputWS = r"C:\Working01" arcpy.env.workspace = outputWS arcpy.gp.overwriteOutput = True start = time.clock() rows = arcpy.SearchCursor(obsShp) obsInfoList = [row.getValue(uidField) for row in rows] del row, rows vsList = [] for obs in obsList: obsFileName = "%s_obs_base.shp" % (obs) arcpy.Select_analysis(obsShp, obsFileName, uidField+" = "+str(obs)) vsList.append("%s,%s,%s" % (os.path.join(outputWS, obsFileName), elevationRas, outputWS)) pool = multiprocessing.Pool() pool.map(calcVS, vsList) pool.close() pool.join() end = time.clock() duration = end - start hours, remainder = divmod(duration, 3600) minutes, seconds = divmod(remainder, 60) print "Completed in %dh:%dm:%fs" % (hours, minutes, seconds)
# Imports... import arcpy from arcpy.sa import * import multiprocessing import ntpath import os from re import split import shutil import time def calcVS(argsCSV): import arcpy argsList = split(",", argsCSV) vsFC = argsList[0] elevRas = argsList[1] outWS = argsList[2] head, tail = ntpath.split(vsFC) buffVS = os.path.join(outWS, "buff_%s" % (tail[:-4])) arcpy.Buffer_analysis(vsFC, buffVS, "RADIUS2") arcpy.env.extent = buffVS arcpy.Delete_management(buffVS) scratchWS = os.path.join(outWS, tail[:-4]) os.mkdir(scratchWS) arcpy.env.scratchWorkspace = scratchWS arcpy.CheckOutExtension("Spatial") outViewshed = Viewshed(elevRas, vsFC) outViewshed.save(os.path.join(outWS, "%s.tif" % (tail[:-4]))) shutil.rmtree(scratchWS) arcpy.Delete_management(vsFC) if __name__ == "__main__": obsShp = r"C:\Data\test.shp" uidField = "UID" elevationRas = r"C:\Data\DEM.img" outputWS = r"C:\Working01" arcpy.env.workspace = outputWS arcpy.gp.overwriteOutput = True start = time.clock() rows = arcpy.SearchCursor(obsShp) obsInfoList = [row.getValue(uidField) for row in rows] del row, rows vsList = [] for obs in obsList: obsFileName = "%s_obs_base.shp" % (obs) arcpy.Select_analysis(obsShp, obsFileName, uidField+" = "+str(obs)) vsList.append("%s,%s,%s" % (os.path.join(outputWS, obsFileName), elevationRas, outputWS)) pool = multiprocessing.Pool() pool.map(calcVS, vsList) pool.close() pool.join() end = time.clock() duration = end - start hours, remainder = divmod(duration, 3600) minutes, seconds = divmod(remainder, 60) print "Completed in %dh:%dm:%fs" % (hours, minutes, seconds)
@Dwight
Nice implementation. Just a couple questions I had. Did you find a particular advantage to using arcpy.gp.overwriteOutput vs .env? Also, how do you pass in your obsList object? Or is that supposed to be the obsInfoList?
Awesome! This thread has just saved me from wanting to jump off the nearest bridge!
I've been developing some multiprocessing code which makes use of several arcpy environment settings and spatial analyst. My code was consistently failing/aborting and sometimes completing without error! I came across this thread saw that you were importing arcpy within the worker function. I updated my code and now everything is running as I expect. I owe you one a pint!
Thanks for this really useful thread.