Hi Marco,
Sorry for the late answer, here's what I ended up with that works, hope it's helpful -- does a bit of logging for you too:
# -*- coding: utf-8 -*-
import os
from os import path
import arcpy
from arcpy import env
from sets import Set
import time, datetime
from datetime import date
arcpy.env.overwriteOutput = True
newProjection = arcpy.SpatialReference(25832) # wkid for 'ETRS_1989_UTM_Zone_32N'
# ################### #
# FUNCTIONS #
# ################### #
def recursiveDatasets(workspace):
"""
Function iterates through a workspace and its subfolders using arcpy.da.walk and returns
a list of spatial reference objects of all geographic datasets (including those inside
geodatabases). Parameter: target workspace
"""
paths = []
uniquePaths = []
for dirpath, dirnames, filenames in arcpy.da.Walk(workspace):
for filename in filenames:
desc = arcpy.Describe(os.path.join(dirpath, filename))
if hasattr(desc, 'path'):
descPth = arcpy.Describe(desc.path)
if hasattr(descPth, 'dataType'):
# return list of FD paths and FC paths not in FD's
if descPth.dataType == 'FeatureDataset':
paths.append(dirpath)
elif desc.datasetType == 'FeatureClass':
paths.append(os.path.join(dirpath, filename))
# make list without duplicates
uniquePaths = list(set(paths))
return uniquePaths
def defineProjection(workspace, toRedefine):
""" Function redefines datasets in a given projection in a geodatabase. Parameters: workspace, projection to consider
"""
paths = recursiveDatasets(workspace)
for path in paths:
# create a description object for every dataset (incl. tables, relationship classes) not residing in FD
desc = arcpy.Describe(path)
if desc.spatialReference.Name == toRedefine:
arcpy.management.DefineProjection(desc.Name, newProjection)
print desc.Name + ' wurde neu definiert.'
else:
print desc.Name + ' wurde nicht neu definiert.'
def Logging(myLog, writeFormat, runDate, headers, list1, list2, colWdth):
""" Function will generates a log file composed of two columns (list1 and list2) with defined
headers and date of execution. Parameters: full log filepath, write format, date of execution,
headers, list1, list2, column width
"""
with open(myLog, writeFormat) as myLog:
myLog.write(runDate)
myLog.write(headers)
for item in zip(list1, list2):
myLog.write(colWdth.format(*item))
# ################# #
# MAIN #
# ################# #
def main():
print 'Processing geodatabase...'
workspace = arcpy.env.workspace = r'C:\Scripts\Redefine_Projection\Thomas\2016-01-25_GeoDB03_gd_Export.gdb'
toRedefine = 'ETRS_1989_UTM_Zone_32N_6stellen'
defineProjection(workspace, toRedefine)
datasets = recursiveDatasets(workspace)
print 'Logging results...'
# list of projections
projections = []
for dataset in datasets:
desc = arcpy.Describe(dataset)
projections.append(desc.spatialReference.Name)
# logging
myLog = r"C:\Scripts\Redefine_Projection\Log\RedefineProjection.log"
today = date.today()
runDate = 'Tag der Durchführung: ' + today.strftime("%Y.%m.%d") + '\n'
# first column header is 'Feature Class' justified left w/125 character buffer before 'Neue Spatial Reference' header
headers = '{0: <125}'.format('Pfad der Datensatz') + 'Spatial Reference' + '\n'
colWdth ="{:<125}"*2 + "\n" # 2 columns with 125 character width
Logging(myLog, 'a', runDate, headers, datasets, projections, colWdth)
if __name__ == '__main__':
main()