A while back I showed how to import a .csv to create a shapefile. The script that I had written for a client went unused for quite sometime and after trying out the resulting shapefile it was determined that the fields from the .csv were being formatted incorrectly in the resulting shapefile.
Last night I set out to fix this. The idea was to create a blank database table with the proper field formats and then copy the values from the .csv to the database table. What I thought was going to take 30 minutes to complete, turned into a long adventure. About 6 hours later I finally figured it out. Interestingly enough, I found almost NO info on what I was trying to do. Finally I found a partial reference in a forum thread from 2006 and wasn’t sure if it would even work.
# import required modules import sys, os, glob import arcgisscripting # Create the geoprocessor object gp = arcgisscripting.create() # I was having issues with a table view not deleting so I added the overwrite gp.overwriteoutput = 1 ## System arguments # Input .csv File_in = sys.argv # Output shapefile File_out = sys.argv # Input table_template tabletemp = sys.argv # Set the workspace for the processing files gp.workspace = os.path.dirname(sys.argv) # Convert the .csv to .dbf in the directory from sys.argv gp.AddMessage("\nCreating processing .dbf from .csv...") gp.TableToDbase_conversion(File_in, os.path.dirname(sys.argv)) ## Make copies so that processing doesn't take place on the originals. gp.AddMessage("\nCreating processing file...") gp.Copy_management(File_in[:-4]+".dbf", "tempfile.dbf") gp.Copy_management(tabletemp, "temptable.dbf") # Need to use a search cursor to read the values from the input file # and then use a insert cursor to write to the new table. # The part I could not figure out is the SetValue function. By using a # while statement we can read thru each of the records in the input .csv # and then write them out using SetValue. # The SetValue takes two parameters; the first is the field name in the new # table and the second is the field name in the read file. Quite similiar # to Field Mapping actually. The input field name must not match the output # field name, however the formats do need to match. gp.AddMessage("\nSearch cursor process...") intable = "tempfile.dbf" outtable = "temptable.dbf" table = gp.SearchCursor(intable) run = table.Next() outRows = gp.InsertCursor(outtable) while run: outRow = outRows.NewRow() outRow.SetValue("FieldName1", run.FieldName1) outRow.SetValue("FieldName2", run.FieldName2) outRow.SetValue("FieldName3", run.FieldName3) outRow.SetValue("FieldName4", run.FieldName4) run = table.Next() del outRows # From here you can do whatever you wish with the new table "temptable.dbf" # If you want to make a points shapefile you will need to use the MakeEventLayer # as shown below. gp.AddMessage("Creating the output points file...") # Make a point shapefile based on the X,Y in the table gp.MakeXYEventLayer_management("temptable.dbf", "X", "Y", "Table_output") gp.CopyFeatures_management("Table_output", "temp.shp") # Create a prj file for temp.shp coordsys = "Coordinate Systems/Geographic Coordinate Systems/World/WGS 1984.prj" gp.defineprojection("temp.shp", coordsys) gp.AddMessage("Creating final output file...") # Create the output files based on the user's input gp.Rename_management("temp.shp", File_out) gp.AddMessage("Cleanup...") # Delete "process" files for file in glob.glob(gp.workspace+"/temp*"): os.remove(file) for file in glob.glob(gp.workspace+"/*xml"): os.remove(file) gp.AddMessage("\nImport Successful.\n")
Hope this helps!