Python API timing out

960
3
08-31-2020 03:29 PM
GrantHaynes
Occasional Contributor

Hi all, so I'm having some trouble with using the python api. I'm getting code 10054 when it goes to push data to AGOL and I can't figure out why.

import os, datetime, sys, traceback, time, arcpy, arcgis
from arcgis.gis import GIS

# Logging function to keep track of activity and errors
def Logger(logMessage😞
    try:
        with open("Log.txt""a"as logFile:
            logFile.write("\n{0} ; {1}".format(logMessage, datetime.datetime.now().strftime("%H:%M:%S , %m/%d/%Y")))
    except Exception as ex:
        print("Activity logging failed {}".format(ex))

def TruncateWebLayer(gis=Nonetarget=None😞
    try:
        lyr = arcgis.features.FeatureLayer(target, gis)
        lyr.manager.truncate()
    except:
        print("Failed truncating: " + str(target))
        sys.exit()

# User input dialog, note raw input for the file paths
userContinue = "N"
priorityMarkShpPath = input("Priority Mark Shapefile Path: ")
hex10kmShpPath = input("10km Hex Bin Shapefile path: ")
hex2kmShpPath = input("2km Hex Bin Shapefile path: ")
userContinue = input("Continue Y/N: ")
if userContinue in ("N""n""no"😞
    sys.exit(0)

try:
    if os.path.exists(hex10kmShpPath) and os.path.exists(hex10kmShpPath) and os.path.exists(priorityMarkShpPath):

        # Convert the input shapefiels into JSON
        print("Converting Shapefiles to JSON, this may take some time...")
        priorityMarkShpJSONResult = arcpy.FeaturesToJSON_conversion(priorityMarkShpPath, "priorityMarkShpJSON.json""FORMATTED")
        hex10kmJSONResult = arcpy.FeaturesToJSON_conversion(hex10kmShpPath, "hex10kmJSON.json""FORMATTED")
        hex2kmJSONResult = arcpy.FeaturesToJSON_conversion(hex2kmShpPath, "hex2kmJSON.json""FORMATTED")

        # Create a FeatureSet objects using the API from the JSONs
        priorityMarkNew = arcgis.features.FeatureSet.from_json(open("priorityMarkShpJSON.json").read())
        hex10kmNew = arcgis.features.FeatureSet.from_json(open("hex10kmJSON.json").read())
        hex2kmNew = arcgis.features.FeatureSet.from_json(open("hex2kmJSON.json").read())
        
        # URLs to individual layers of the service
        priorityMarkURL = r"#####"
        hex10kmURL = r"#####"
        hex2kmURL = r"#####"

        print("Logging into AGOL...")
        gis = GIS(url = "#####"username = "#####"password = "#####")
        gpsOnBmTestFS = gis.content.get("#####")
        foo = gpsOnBmTestFS[0].properties.capabilities
        # Remove all features from the existing web layers
        print("Truncating existing web layers...")
        TruncateWebLayer(gis, priorityMarkURL)
        TruncateWebLayer(gis, hex10kmURL)
        TruncateWebLayer(gis, hex2kmURL)

        # Reference the empty layers
        priorityMarkEmptyFl = arcgis.features.FeatureLayer(priorityMarkURL, gis)
        hex10kmEmptyFl = arcgis.features.FeatureLayer(hex10kmURL, gis)
        hex2kmEmptyFl = arcgis.features.FeatureLayer(hex2kmURL, gis)

        # Now add those featureset objects to the empty web layers
         # error happens here
        print("Publishing new Layers...")
        priorityMarkEmptyFl.edit_features(adds = priorityMarkNew)
        time.sleep(5)
        hex10kmEmptyFl.edit_features(adds = hex10kmNew)
        time.sleep(5)
        hex2kmEmptyFl.edit_features(adds = hex2kmNew)

        print("Completed Successfully")
    else:
        sys.exit("Invalid files please try again")
except Exception as ex:
    print("Error see log for traceback")
    Logger(traceback.format_exc())
finally:
    print("Deleting Temp Files...")
    if os.path.exists("priorityMarkShpJSON.json"😞
        os.remove("priorityMarkShpJSON.json")
    if os.path.exists("hex10kmJSON.json"😞
        os.remove("hex10kmJSON.json")
    if os.path.exists("hex2kmJSON.json"😞
        os.remove("hex2kmJSON.json")
0 Kudos
3 Replies
VictorTey
Esri Contributor

Grant Haynes

Can you check that priorityMarkNew you are adding is an array? Can you also print out priorityMarkNew

GrantHaynes
Occasional Contributor

I figured it out, the datasets were too big so I wrote a function to break up the data and push those groups up.

0 Kudos
JonPainter
New Contributor II

Do you mind sharing the code with the function? I'm getting the same error as you above.

Thanks for your time.