Select to view content in your preferred language

Layers not showing up despite being shown in the REST endpoint?

44
0
5 hours ago
HritikArora1
New Contributor

Hey guys! I was tinkering around with python to try and automate updates of certain layers to ArcGIS online and I've run into something quite strange. I was hoping it someone else also ran into this issue.

After my code runs to update a hosted feature layer, I see that any additional layers I add to the ArcGIS pro map doesn't seem to be reflected in the hosted feature layer even though the code does say it was successful in overwriting the layer and the SD file? What's even more strange is that the new layers do show up in the REST end point but not in the actual feature layer or Map Viewer or even when I open that layer into ArcGIS using the portal. 

(No the layers are not out of order or anything)

Has anyone else come across this issue?

I've provided a few photos to this post. And here's a snippet of the code:

# ---------- robust logging ----------
class RunLogger:
    def __init__(self, base_dir: str, prefix: str = "AGOL_SD_Overwrite"):
        self._log_path = self._make_log_path(base_dir, prefix)
        self._lines = []

    @staticmethod
    def _make_log_path(base_dir, prefix):
        stamp_date = datetime.datetime.now().strftime("%Y-%m-%d")
        stamp_full = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
        target_dir = os.path.join(base_dir, stamp_date)
        try:
            os.makedirs(target_dir, exist_ok=True)
            return os.path.join(target_dir, f"{prefix}_{stamp_full}.log")
        except Exception:
            td = os.path.join(tempfile.gettempdir(), "AGOL_SD", stamp_date)
            os.makedirs(td, exist_ok=True)
            return os.path.join(td, f"{prefix}_{stamp_full}.log")

    @property
    def path(self):
        return self._log_path

    def log(self, msg):
        stamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        line = f"[{stamp}] {msg}"
        self._lines.append(line)
        print(line)

    def flush(self):
        folder = os.path.dirname(self._log_path)
        os.makedirs(folder, exist_ok=True)
        tmp = self._log_path + ".tmp"
        with open(tmp, "w", encoding="utf-8") as f:
            f.write("\n".join(self._lines) + "\n")
        os.replace(tmp, self._log_path)

# ---------- helpers ----------
def norm_key(k: str) -> str:
    return (k or "").strip().strip('"').strip().lower()

def norm_val(v: str) -> str:
    return (v or "").strip().strip('"').strip()

def is_truthy(s: str) -> bool:
    return norm_val(s).upper() in ("Y", "YES", "TRUE", "1")

def connect_gis(logger: RunLogger):
    if GIS_PROFILE:
        gis = GIS(GIS_PROFILE)
    else:
        gis = GIS(PORTAL_URL, PORTAL_USER, PORTAL_PASS)
    logger.log(f"Connected to AGOL as: {gis.users.me.username}")
    arcpy.SignInToPortal(PORTAL_URL, PORTAL_USER, PORTAL_PASS)
    return gis

def send_email(subject, body, attachments=None):
    if not EMAIL_ENABLE:
        return
    msg = EmailMessage()
    msg["From"] = EMAIL_FROM
    msg["To"] = ", ".join(EMAIL_TO)
    msg["Subject"] = subject
    msg.set_content(body)

    for att in attachments or []:
        try:
            with open(att, "rb") as f:
                data = f.read()
            msg.add_attachment(
                data, maintype="text", subtype="plain",
                filename=os.path.basename(att)
            )
        except Exception as e:
            body += f"\n\n(Attachment failed: {att} - {e})"
            msg.set_content(body)

    with smtplib.SMTP(SMTP_SERVER, SMTP_PORT, timeout=60) as s:
        s.ehlo()
        try:
            s.starttls(); s.ehlo()
        except Exception:
            pass
        if SMTP_USERNAME and SMTP_PASSWORD:
            s.login(SMTP_USERNAME, SMTP_PASSWORD)
        s.send_message(msg)

# ---------- build SD from curated APRX map (no binding) ----------
def build_sd_from_aprx_map(aprx_path, service_name, log):
    """
    Open the curated APRX, select the map whose name == service_name,
    and export & stage an SD. Assumes map layers already point to the correct Master GDB.
    """
    if not os.path.exists(aprx_path):
        raise FileNotFoundError(f"APRX not found: {aprx_path}")

    work = tempfile.mkdtemp(prefix="sdaprx_")
    temp_aprx = os.path.join(work, "temp.aprx")
    arcpy.mp.ArcGISProject(aprx_path).saveACopy(temp_aprx)
    aprx = arcpy.mp.ArcGISProject(temp_aprx)

    maps = {m.name: m for m in aprx.listMaps()}
    if service_name in maps:
        m = maps[service_name]
    elif len(maps) == 1:
        m = list(maps.values())[0]
        log(f"Map named '{service_name}' not found; using the only map in APRX: '{m.name}'")
    else:
        raise RuntimeError(f"No map named '{service_name}' in APRX and multiple maps exist. Add a map named exactly '{service_name}'.")

    log(f"Using curated map: '{m.name}' (no rebinding)")
    try: aprx.save()
    except: pass

    sddraft = os.path.join(work, f"{service_name}.sddraft")
    sd_file = os.path.join(work, f"{service_name}.sd")
    sdraft = m.getWebLayerSharingDraft("HOSTING_SERVER", "FEATURE", service_name)
    sdraft.overwriteExistingService = True
    sdraft.copyDataToServer = True

    log(f"Export SDDraft: {sddraft}")
    sdraft.exportToSDDraft(sddraft)

    log(f"Stage SD: {sd_file}")
    arcpy.env.overwriteOutput = True
    arcpy.SetLogHistory(False)
    arcpy.StageService_server(sddraft, sd_file)

    try: aprx.save()
    except: pass
    return sd_file

# ---------------------------------------------------
def main():
    logger = RunLogger(LOG_DIR)
    log = logger.log

    try:
        gis = connect_gis(logger)
    except Exception as e:
        log(f"ERROR: cannot connect/login: {e}")
        logger.flush()
        send_email("AGOL SD overwrite: FAILED (login)", f"Login failed.\nLog: {logger.path}", [logger.path])
        return

    # Read manifest (CSV of services to process)
    try:
        with open(CSV_MANIFEST, newline="", encoding="utf-8-sig") as f:
            rdr = csv.DictReader(f)
            raw_rows = list(rdr)
    except Exception as e:
        log(f"ERROR: cannot read manifest: {CSV_MANIFEST} ({e})")
        logger.flush()
        send_email("AGOL SD overwrite: FAILED (manifest)", f"Could not read manifest.\nLog: {logger.path}", [logger.path])
        return

    rows = [{norm_key(k): v for k, v in r.items()} for r in raw_rows]
    if rows:
        log(f"Manifest headers detected: {sorted(rows[0].keys())}")
    else:
        log("Manifest appears empty.")
        logger.flush()
        return

    log(f"=== Start SD overwrite run: {len(rows)} rows in manifest ===")
    results = [] # (service_name, ok_bool, msg)

    for r in rows:
        en = is_truthy(r.get("enabled", ""))
        if not en:
            continue

        gdb = norm_val(r.get("mastergdb", ""))
        sname = norm_val(r.get("servicename", "")) or os.path.splitext(os.path.basename(gdb))[0]
        sdid = norm_val(r.get("sditemid", ""))

        share_org = is_truthy(r.get("shareorg", ""))
        share_everyone = is_truthy(r.get("shareeveryone", ""))
        share_groups = [g.strip() for g in norm_val(r.get("sharegroups", "")).split(",") if g.strip()]

        try:

            sd_item = gis.content.get(sdid)
            if not sd_item or sd_item.type != "Service Definition":
                msg = f"SKIP (bad SDItemID or not SD): {sdid}"
                log(msg); results.append((sname, False, msg)); continue

            log(f"Preparing SD from curated map for service: '{sname}' ")
            sd_file = build_sd_from_aprx_map(BLANK_APRX, sname, log)

            log(f"Updating SD item data: {sd_item.title} ({sd_item.id})")
            sd_item.update(data=sd_file)

            log(f"Publishing overwrite for: {sd_item.title}")
            fs_item = sd_item.publish(overwrite=True)
            
            #refresh service definition
            flc = FeatureLayerCollection.fromitem(fs_item)
            try:
                flc.manager.refresh()
            except Exception:
                try:
                    gis._con.post(f"{flc.url}/refresh", {})
                except Exception:
                    pass
                    
            #nudge item metadata to force a reindex
            try:
                fs_item.update(item_Properties={"snippet":fs_item.snippet or ''})
            except Exception:
                pass

            if share_org or share_everyone or share_groups:
                try:
                    fs_item.share(org=share_org, everyone=share_everyone,
                                  groups=",".join(share_groups) if share_groups else None)
                    log(f"Shared: org={share_org} everyone={share_everyone} groups={share_groups}")
                except Exception as e:
                    log(f"Share warning: {e}")

            log(f"Overwrite OK: {fs_item.title} ({fs_item.id})")
            results.append((sname, True, "OK"))

            time.sleep(2)

        except Exception as e:
            msg = f"ERROR processing {sname}: {e}"
            log(msg)
            results.append((sname, False, str(e)))

    ok = sum(1 for _, okb, _ in results if okb)
    fail = sum(1 for _, okb, _ in results if not okb)
    log(f"=== End SD overwrite run: OK={ok} FAIL={fail} ===")

    logger.flush()

    if EMAIL_ENABLE:
        subject = f"AGOL SD Overwrite: OK={ok} FAIL={fail}"
        body = [subject, "", "Details:"]
        for n, okb, msg in results:
            body.append(f"- {n}: {'OK' if okb else 'FAIL'} ({msg})")
        send_email(subject, "\n".join(body), attachments=[logger.path])


if __name__ == "__main__":
    main()



Looking forward to your thoughts!

0 Kudos
0 Replies