Skip to content
Browse files

added automated column support

  • Loading branch information...
1 parent 7e187c1 commit 7a672202293453ce930e7de44368a8c846c246e0 Andrew Willingham committed Oct 15, 2012
Showing with 14 additions and 6 deletions.
  1. +1 −1 README.md
  2. +13 −5 csv2coll.py
View
2 README.md
@@ -6,4 +6,4 @@ python script that converts an excel-exported .csv file into a .txt file readabl
USAGE:
===========
-Replace "INPUT FILE NAME.csv" and "OUTPUT FILE NAME.txt" with your filenames and set "columns" variable to the number of columns in your inputfile. Run, reference the .txt output file in a coll object in max/msp (make sure the .txt is in your max/msp project directory), and you're good to go!
+Replace "INPUT FILE NAME.csv" and "OUTPUT FILE NAME.txt" with your filenames. Run, reference the .txt output file in a coll object in max/msp (make sure the .txt is in your max/msp project directory), and you're good to go!
View
18 csv2coll.py
@@ -1,11 +1,19 @@
+#csv2coll by Andrew Willingham
+#updated 10/15/2012
+#www.wilhelmsound.com
+#http://github.com/wilione/csv2coll
+
import csv
index = 0;
-datalist = []
-inputfile = "INPUT FILE NAME.csv" #the name of the .csv file you want to convert
-outputfile = "OUTPUT FILE NAME.txt" #the desired name of the output file
-columns = 7; #number of columns in the .csv file
+inputfile = "INPUT FILENAME.csv" #the name of the .csv file you want to convert
+outputfile = "OUTPUT FILENAME.txt" #the desired name of the output file
+
+#calculate number of columns in .csv file
+columnstemp = csv.reader(open(inputfile, "rU"), dialect=csv.excel)
+columns = len(zip(*columnstemp))
+#read inputfile into array/index
infile = csv.reader(open(inputfile, "rU"), dialect=csv.excel)
lines = [[0] + l for l in infile] #read the csv file into an array and create column for index value
@@ -18,4 +26,4 @@
with open(outputfile, mode="w") as outfile:
writer = csv.writer(outfile, delimiter=' ')
- writer.writerows(lines)
+ writer.writerows(lines)

0 comments on commit 7a67220

Please sign in to comment.
Something went wrong with that request. Please try again.