AnsweredAssumed Answered

join data after unzipping a service replica

Question asked by alex.gole_ELDORADOCOUNTY on Sep 5, 2017
Latest reply on Feb 27, 2018 by ahuygen

Hi all,

I am trying to create a script that would allow me to join data after programmatically after a replica is created.


I am not sure how to select the feature class and table located in the unzipped gdb.


Here is the script:

import urllib, urllib2, json, time, datetime, os,fnmatch, zipfile, arcpy, sys, logging
pattern = '*.zip'
gdbextension =  '*.gdb'

logging.basicConfig(filename=r"\\arcgisserver\cronjobs\stormwater\backup.log",format='%(asctime)s %(levelname)s: %(message)s',datefmt='%m/%d/%Y %I:%M:%S %p',level=logging.INFO)

username = ""                                             #CHANGE
password = ""                                             #CHANGE
replicaURL = ""    #CHANGE
replicaLayers = []                                               #CHANGE
replicaName = ""                                       #CHANGE

def sendRequest(request):
    response = urllib2.urlopen(request)
    readResponse =
    jsonResponse = json.loads(readResponse)
    return jsonResponse'..Starting process..')
print ("Generating token")
url = ""
data = {'username': username,
        'password': password,
        'referer': "",
        'f': 'json'}
request = urllib2.Request(url, urllib.urlencode(data))
jsonResponse = sendRequest(request)
token = jsonResponse['token']

print("Creating the replica")
data = {'f' : 'json',
    'replicaName' : replicaName,
    'layers' : replicaLayers,
    'returnAttachments' : 'true',
    'returnAttachmentsDatabyURL' : 'false',
    'syncModel' : 'none',
    'dataFormat' : 'filegdb',
    'async' : 'true',
    'token': token}
request = urllib2.Request(replicaURL, urllib.urlencode(data))
jsonResponse = sendRequest(request)


print("Pinging the server")
responseUrl = jsonResponse['statusUrl']
url = "{}?f=json&token={}".format(responseUrl, token)
request = urllib2.Request(url)
jsonResponse = sendRequest(request)
while not jsonResponse.get("status") == "Completed":
    request = urllib2.Request(url)
    jsonResponse = sendRequest(request)
# Create string date for today's date
dt = str(
# Create new folder with today's date
newfolder = 'C:\\replicas'+ "\\" + dt
if not os.path.exists(newfolder): os.makedirs(newfolder)

print("Downloading the replica. In case this fails note that the replica URL is: \n")
jres = jsonResponse['resultUrl']
url = "{0}?token={1}".format(jres, token)
print(url)"Replica URL: ")
f = urllib2.urlopen(url)
with open(newfolder + "\\" + os.path.basename(jres), "wb") as local_file:
print("\n Finished!")"finished creating replica")

for root, dirs, files in os.walk(newfolder):
    for filename in fnmatch.filter(files, pattern):
        print(os.path.join(root, filename))
        zipfile.ZipFile(os.path.join(root, filename)).extractall(os.path.join(root, os.path.splitext(filename)[0]))