Select to view content in your preferred language

Feature Layer Group Downloads Get Throttled

72
0
Tuesday
JohnPantzlaff1
Emerging Contributor
I am having trouble adapting the code to download hosted feature service to download all 200+ features with a single run (so we can update the local backups of all our layers every few months). (third cell block from: https://github.com/pklingman/arcgis-online-python-notebooks/blob/master/Download%20Hosted%20Feature%...
 As far as I can tell, the downloads are getting throttled on ESRI servers. I tried breaking the downloads into small blocks with breaks between them but this didn't help. It runs through the first block but gets hung up waiting for ESRI servers to send a file download for a layer in the second block.
Is there a way to get ESRI servers to consistently process the download request or do I need to create a master list the program keeps trying until complete? Other ideas?
Thanks for any help you can provide.
Code for Reference:
import traceback
import time
import datetime as dt
#Disables HTTPS wornings preventing AGOL chocking the downloads
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


def chunk_list(lst, size):
    for i in range(0, len(lst), size):
        yield lst[i:i+size]


def get_status(item):
    s = getattr(item, "status", None)

    # If status is a function (older API)
    if callable(s):
        try:
            return (s() or "").lower()
        except:
            return ""

    # If status is a string (newer API)
    if isinstance(s, str):
        return s.lower()

    # Try properties
    if hasattr(item, "properties"):
        return (item.properties.get("status") or "").lower()

    return ""

def download_as_fgdb(item_list, backup_location):

    skip_titles = ["Parcels_2025", "County Parcels 2024", "County_AddressPoints_2025"]
    
    completed = []
    failed = []

    for item in item_list:
        try:
            print(f"\n Processing: {item.title}")
            print(f" Type keywords: {item.typeKeywords}")
            if 'View Service' in item.typeKeywords:
                print(item.title + " is view, not downloading. Skipping")
                continue
            #Skipping particular layers
            if item.title in skip_titles:
                print(f"   Skipping {item.title} (manually excluded).")
                continue
            # 2️ Check for View or non-hosted layer
            if any(kw in item.typeKeywords for kw in ["View Service", "Map Service", "Image Service"]):
                print(f"   {item.title}: View or non-hosted service, skipping.")
                continue 

            # 3️ Check for "Hosted Service" keyword
            if "Hosted Service" not in item.typeKeywords:
                print(f"   {item.title}: Not a hosted feature layer (likely ArcGIS Server). Skipping")
                continue 

            # 4️ Check export capability from item properties
            data = item.get_data()
            if data and "capabilities" in data:
                capabilities = data["capabilities"].lower()
                if "extract" not in capabilities:
                    print(f"   {item.title}: Service does not support extract/export. Skipping")
                    continue 

            # 5️ Finally, check export setting from the item properties
            item_info = item.get_data()
            if item_info and isinstance(item_info, dict):
                if item_info.get("exportEnabled") is False:
                    print(f"   {item.title}: Export explicitly disabled. Skipping")
                    continue

           
        
            print("Downloading " + item.title)
            version = dt.datetime.now().strftime("%d_%b_%Y")
            time.sleep(10)
            print("Version")
            export_item = item.export(item.title + "_" + version, "File Geodatabase")
            print("Export Item")
            time.sleep(10)

                        
            export_item.download(save_path=backup_location)
            time.sleep(5)
            print("Download Successful")
            completed.append(item)

            try:
                export_item.delete()
                print("Successfully downloaded" + item.title)
            except Exception as e:
                print(f"An error occurred deleting {item.title}: {e}")
            print("Successfully downloaded" + item.title)

        except Exception as e:
            print(f" An error occurred downloading {item.title}: {e}")
            traceback.print_exc()

    return completed, failed


def run_in_blocks(items, folder_path, block_size=10, break_minutes=45):
    all_completed = []
    all_failed = []

    blocks = list(chunk_list(items, block_size))

    for i, block in enumerate(blocks, start=1):
        print(f"\n================= BLOCK {i}/{len(blocks)} =================")

        completed, failed = download_as_fgdb(block, folder_path)

        all_completed += completed
        all_failed += failed



        if i < len(blocks):
            print(f"\n Waiting {break_minutes} minutes before next block...\n")
            time.sleep(break_minutes * 60)

    print("\n ALL BLOCKS COMPLETE")
    print(f" Total successful: {len(all_completed)}")
    print(f" Total failed: {len(all_failed)}")

    return all_completed, all_failed

    
#download_as_fgdb(items, folder_path)
completed, failed = run_in_blocks(items, folder_path, block_size=10, break_minutes=45)

 

0 Kudos
0 Replies