How to speed up Python CSV Read to MySQL Write

I have a 5GB CSV of IP addresses that I need to parse to a MySQL database.

Currently reading rows from the CSV and inserting into the MySQL. It works great however I would love to make it fast.

Could I parallel the reading and writing somehow? Or perhaps chuck the csv down and spawn from processes to read & write each split csv?

import csv
from csv import reader
from csv import writer
import mysql.connector

cnx = mysql.connector.connect(user='root', password='', host='', database='ips')
cursor = cnx.cursor()
i = 1

with open('iplist.csv', 'r') as read_obj:
    csv_reader = reader(read_obj)
    for row in csv_reader:
        query = """INSERT INTO ips (ip_start,ip_end,continent) VALUES ('%s','%s','%s')""" % (row[0],row[1],row[2])
        print (query)
        i = i + 1

Any help is appreciated.


I created a pseudo-random CSV file where each row is of the style “111.222.333.444,555.666.777.888,A continent”. The file contains 33 million rows. The following code was able to insert all rows into a MySQL database table in ~3 minutes:-

import mysql.connector
import time
import concurrent.futures
import csv
import itertools


def doBulkInsert(rows):
    with mysql.connector.connect(user='andy', password='monster', host='localhost', database='andy') as connection:
        connection.cursor().executemany(f'INSERT INTO ips (ip_start, ip_end, continent) VALUES (%s, %s, %s)', rows)

def main():
    _s = time.perf_counter()
    with open(CSVFILE) as csvfile:
        csvdata = csv.reader(csvfile)
        _s = time.perf_counter()
        with concurrent.futures.ThreadPoolExecutor() as executor:
            while (data := list(itertools.islice(csvdata, CHUNK))):
                executor.submit(doBulkInsert, data)
            print(f'Duration = {time.perf_counter()-_s}')    

if __name__ == '__main__':