Python Script for batch coversion: from DBF to CSV files

2640
3
08-22-2011 06:14 PM
ZiaAhmed
New Contributor III
Following python script (http://forums.esri.com/Thread.asp?c=93&f=1729&t=279777) works well for converstion of a DBF file to a CSV file. But I want to convert 100s of dbf files to cvs files. I need   help  to add a loop in this script to create a list of dbf file in an input  directory("K:\\SSURGO_TABLE\\DATA\\DBF") and save as csv file in a output directory("K:\\SSURGO_TABLE\\DATA\\CSV").
Thanks
Zia


#-------------------------------------
import sys, traceback, os
import arcgisscripting

gp = arcgisscripting.create()

path = r'K:\\SSURGO_TABLE\\DATA\\DBF'
#outWorkspace= r'K:\\SSURGO_TABLE\\DATA\\CSV'
gp.workspace = path
#table = gp.ListTables()
table = 'co_cornmukey.dbf'
csvseparator = ','
outputpath = os.path.join(path,'co_cornmukey.csv')

def print_exception():
    tb = sys.exc_info()[2]
    l = traceback.format_tb(tb)
    l.reverse()
    tbinfo = "".join(l)
    pymsg = "ERROR:\nTraceback Info:\n" + tbinfo + "Error Info:\n    " +  str(sys.exc_type)+ ": " + str(sys.exc_value) + ""
    print pymsg

def get_fieldnames(fields, ignorefields=[]):
    fields_output = []
    for field in iter(fields.next, None):
        if not field.name in ignorefields:
            fields_output.append(field.name)
    return fields_output

try:
    info = gp.describe(table)
    ignore_fields = []
    fieldnames = get_fieldnames(info.fields, ignore_fields)
    print fieldnames
    rows = gp.searchcursor(table)

    output = []
    output.append(csvseparator.join(fieldnames))
   
    for row in iter(rows.next, None):
        outputrow = []
        for fieldname in fieldnames:
            outputrow.append(str(row.getvalue(fieldname)))
        outputrow = csvseparator.join(outputrow)
        output.append(outputrow)
    print 'found', str(len(output)), 'rows'
    f = open(outputpath, 'w')
    f.write('\n'.join(output))
    f.close()
except:
    print_exception()
    gp.getmessages(2)
Tags (2)
0 Kudos
3 Replies
StacyRendall1
Occasional Contributor III
Try something like os.walk("K:\\SSURGO_TABLE\\DATA\\DBF")... Read around for more info on how to use it (can't find an example on my computer right now.
0 Kudos
StacyRendall1
Occasional Contributor III
Check out this post - it has an example of os.walk() used in an arcpy script...

http://forums.arcgis.com/threads/37795-Read-a-MXD-file-and-change-all-comma-and-blank-spaces-to-noth...
0 Kudos
RaphaelR
Occasional Contributor II
The glob module should work for this as well.

    # Import modules
    import glob,os
    import arcpy


   # Set geoprocessing workspace and working directory
   path = r'C:\example'
   print "Setting workspace to: " + path
   arcpy.env.workspace = path 
   os.chdir(path)

   listFiles = glob.glob('*.dbf')
   print listFiles

    
   # Loop through the list of text files
   for currentFile in listFiles:


        # Find the current file name and path
        filename = currentFile[:-4]
        FilePath = path + '/' + currentFile       

       # then run your conversion stuff on the current filename
0 Kudos