Skip to content

Instantly share code, notes, and snippets.

@squarepegsys
Last active August 20, 2020 14:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save squarepegsys/31e96d24666a3906df5c9fa63bbae811 to your computer and use it in GitHub Desktop.
Save squarepegsys/31e96d24666a3906df5c9fa63bbae811 to your computer and use it in GitHub Desktop.
creating a large data insert in a django migration
# Generated by Django 3.1 on 2020-08-19 15:28
from django.db import migrations
from pathlib import Path
import csv
from django.db import transaction
def parse_file(apps, schema_editor):
State = apps.get_model("states", "State")
County = apps.get_model("states", "County")
City = apps.get_model("states", "City")
us_info = Path("us_cities_states_counties.csv")
transaction.set_autocommit(False)
with us_info.open() as info_fp:
rows = csv.DictReader(info_fp, delimiter="|")
obj_count = 0
for row in rows:
if not row["State full"] or not row["State short"]:
continue
state, created = State.objects.get_or_create(
name=row["State full"], symbol=row["State short"],
)
if created:
print(state.name)
obj_count += 1
county, created = County.objects.get_or_create(
name=row["County"], state=state
)
if created:
obj_count += 1
city, created = City.objects.get_or_create(name=row["City"], county=county)
if created:
obj_count += 1
if obj_count >= 200:
transaction.commit()
obj_count = 0
transaction.commit()
class Migration(migrations.Migration):
atomic = False
dependencies = [
("states", "0001_initial"),
]
operations = [
migrations.RunPython(parse_file,),
]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment