Made csvimport somewhat tolerant of load errors.

It used to abruptly abort if a csv file were missing, which wasn't very
nice when I'd just added a new table definition and was trying to reload
everything else.

Now it prints a status per table while loading, and will declare missing
tables to be...  missing.
This commit is contained in:
Eevee 2009-05-01 06:24:09 -07:00
parent 9f6f210fa5
commit d9a2d96ede
1 changed files with 11 additions and 2 deletions

View File

@ -42,9 +42,17 @@ def csvimport(engine_uri, dir='.'):
for table in sorted(instrumentation_registry.manager_finders.keys(),
key=lambda self: self.__table__.name):
table_name = table.__table__.name
print table_name
# Print the table name but leave the cursor in a fixed column
print table_name + '...', ' ' * (40 - len(table_name)),
reader = csv.reader(open("%s/%s.csv" % (dir, table_name), 'rb'), lineterminator='\n')
try:
csvfile = open("%s/%s.csv" % (dir, table_name), 'rb')
except IOError:
# File doesn't exist; don't load anything!
print 'no data!'
continue
reader = csv.reader(csvfile, lineterminator='\n')
column_names = [unicode(column) for column in reader.next()]
for csvs in reader:
@ -71,6 +79,7 @@ def csvimport(engine_uri, dir='.'):
session.add(row)
session.commit()
print 'loaded'
# Shouldn't matter since this is usually the end of the program and thus
# the connection too, but let's change this back just in case