X-Git-Url: http://git.veekun.com/zzz-pokedex.git/blobdiff_plain/830d38a5620c676f9369e7acadbd972464e57984..638bc0eb815f3cb8bff0bfc027d4d0d931ab7273:/pokedex/__init__.py diff --git a/pokedex/__init__.py b/pokedex/__init__.py index 38c0487..02b8ce5 100644 --- a/pokedex/__init__.py +++ b/pokedex/__init__.py @@ -1,9 +1,10 @@ # encoding: utf8 import sys +from sqlalchemy.exc import IntegrityError import sqlalchemy.types -from .db import connect, metadata +from .db import connect, metadata, tables as tables_module def main(): if len(sys.argv) <= 1: @@ -20,33 +21,50 @@ def main(): help() -def csvimport(engine_uri, dir='.'): +def csvimport(engine_uri, directory='.'): import csv from sqlalchemy.orm.attributes import instrumentation_registry - session = connect(engine_uri) + # Use autocommit in case rows fail due to foreign key incest + session = connect(engine_uri, autocommit=True, autoflush=False) metadata.create_all() - # Oh, mysql-chan. - # TODO try to insert data in preorder so we don't need this hack and won't - # break similarly on other engines - if 'mysql' in engine_uri: - session.execute('SET FOREIGN_KEY_CHECKS = 0') - - # This is a secret attribute on a secret singleton of a secret class that - # appears to hopefully contain all registered classes as keys. - # There is no other way to accomplish this, as far as I can tell. - # Fuck. - for table in sorted(instrumentation_registry.manager_finders.keys(), - key=lambda self: self.__table__.name): - table_name = table.__table__.name + # SQLAlchemy is retarded and there is no way for me to get a list of ORM + # classes besides to inspect the module they all happen to live in for + # things that look right. + table_base = tables_module.TableBase + orm_classes = {} # table object => table class + + for name in dir(tables_module): + # dir() returns strings! How /convenient/. + thingy = getattr(tables_module, name) + + if not isinstance(thingy, type): + # Not a class; bail + continue + elif not issubclass(thingy, table_base): + # Not a declarative table; bail + continue + elif thingy == table_base: + # Declarative table base, so not a real table; bail + continue + + # thingy is definitely a table class! Hallelujah. + orm_classes[thingy.__table__] = thingy + + # Okay, run through the tables and actually load the data now + for table_obj in metadata.sorted_tables: + table_class = orm_classes[table_obj] + table_name = table_obj.name + # Print the table name but leave the cursor in a fixed column print table_name + '...', ' ' * (40 - len(table_name)), + sys.stdout.flush() try: - csvfile = open("%s/%s.csv" % (dir, table_name), 'rb') + csvfile = open("%s/%s.csv" % (directory, table_name), 'rb') except IOError: # File doesn't exist; don't load anything! print 'no data!' @@ -55,11 +73,17 @@ def csvimport(engine_uri, dir='.'): reader = csv.reader(csvfile, lineterminator='\n') column_names = [unicode(column) for column in reader.next()] + # Self-referential tables may contain rows with foreign keys of + # other rows in the same table that do not yet exist. We'll keep + # a running list of these and try inserting them again after the + # rest are done + failed_rows = [] + for csvs in reader: - row = table() + row = table_class() for column_name, value in zip(column_names, csvs): - column = table.__table__.c[column_name] + column = table_obj.c[column_name] if column.nullable and value == '': # Empty string in a nullable column really means NULL value = None @@ -76,18 +100,35 @@ def csvimport(engine_uri, dir='.'): setattr(row, column_name, value) - session.add(row) - - session.commit() - print 'loaded' - - # Shouldn't matter since this is usually the end of the program and thus - # the connection too, but let's change this back just in case - if 'mysql' in engine_uri: - session.execute('SET FOREIGN_KEY_CHECKS = 1') - - -def csvexport(engine_uri, dir='.'): + try: + session.add(row) + session.flush() + except IntegrityError as e: + failed_rows.append(row) + + # Loop over the failed rows and keep trying to insert them. If a loop + # doesn't manage to insert any rows, bail. + do_another_loop = True + while failed_rows and do_another_loop: + do_another_loop = False + + for i, row in enumerate(failed_rows): + try: + session.add(row) + session.flush() + + # Success! + del failed_rows[i] + do_another_loop = True + except IntegrityError as e: + pass + + if failed_rows: + print len(failed_rows), "rows failed" + else: + print 'loaded' + +def csvexport(engine_uri, directory='.'): import csv session = connect(engine_uri) @@ -95,7 +136,8 @@ def csvexport(engine_uri, dir='.'): print table_name table = metadata.tables[table_name] - writer = csv.writer(open("%s/%s.csv" % (dir, table_name), 'wb'), lineterminator='\n') + writer = csv.writer(open("%s/%s.csv" % (directory, table_name), 'wb'), + lineterminator='\n') columns = [col.name for col in table.columns] writer.writerow(columns)