cleanup, refactoring/sep of concerns
This commit is contained in:
33
db/util/insert_file_contents.py
Normal file
33
db/util/insert_file_contents.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import csv
|
||||
from psycopg2 import sql
|
||||
|
||||
# function to read from a given csv file into postgres
|
||||
def insert_file_contents(conn, cur, file_path, table_name, column_count):
|
||||
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
||||
reader = csv.reader(f)
|
||||
header_names = ""
|
||||
first_row_accessed = False
|
||||
|
||||
for row in reader:
|
||||
# get row values from first row of reader
|
||||
if not first_row_accessed:
|
||||
header_names = [item for item in row]
|
||||
first_row_accessed = True
|
||||
continue
|
||||
|
||||
# execute table insertion for each row of csv based on number of columns
|
||||
if column_count == 3:
|
||||
cur.execute(sql.SQL("INSERT INTO {table} ({h1}, {h2}, {h3}) VALUES (%s, %s, %s)".format(
|
||||
table=table_name,
|
||||
h1=header_names[0],
|
||||
h2=header_names[1],
|
||||
h3=header_names[2]
|
||||
)),
|
||||
row
|
||||
)
|
||||
elif column_count == 1:
|
||||
cur.execute(sql.SQL("INSERT INTO {table} ({field}) VALUES (%s)".format(table=table_name, field=header_names[0])), row)
|
||||
else:
|
||||
raise
|
||||
|
||||
conn.commit()
|
||||
@@ -1,7 +1,8 @@
|
||||
import psycopg2
|
||||
from psycopg2 import sql
|
||||
import csv
|
||||
import os
|
||||
from insert_file_contents import insert_file_contents
|
||||
from psycopg2 import sql
|
||||
|
||||
# read data from environment if present
|
||||
env_path = "../../.env"
|
||||
@@ -26,45 +27,9 @@ os.close(fd)
|
||||
conn = psycopg2.connect("dbname=e-commerce-092122 user=mikayladobson")
|
||||
cur = conn.cursor()
|
||||
|
||||
# read and print data from selection
|
||||
cur.execute("SELECT * FROM users;")
|
||||
for i in cur.fetchall():
|
||||
print(i)
|
||||
|
||||
# function to read from a given csv file into postgres
|
||||
def insert_file_contents(file_path, table_name, column_count):
|
||||
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
||||
reader = csv.reader(f)
|
||||
header_names = ""
|
||||
first_row_accessed = False
|
||||
|
||||
for row in reader:
|
||||
# get row values from first row of reader
|
||||
if not first_row_accessed:
|
||||
header_names = [item for item in row]
|
||||
first_row_accessed = True
|
||||
continue
|
||||
|
||||
# execute table insertion for each row of csv based on number of columns
|
||||
if column_count == 3:
|
||||
cur.execute(sql.SQL("INSERT INTO {table} ({h1}, {h2}, {h3}) VALUES (%s, %s, %s)".format(
|
||||
table=table_name,
|
||||
h1=header_names[0],
|
||||
h2=header_names[1],
|
||||
h3=header_names[2]
|
||||
)),
|
||||
row
|
||||
)
|
||||
elif column_count == 1:
|
||||
cur.execute(sql.SQL("INSERT INTO {table} ({field}) VALUES (%s)".format(table=table_name, field=header_names[0])), row)
|
||||
else:
|
||||
raise
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
insert_file_contents("./categories.csv", 'category', 1)
|
||||
insert_file_contents("./regions.csv", 'region', 1)
|
||||
insert_file_contents("./products.csv", 'product', 3)
|
||||
# read contents of each file into postgres
|
||||
insert_file_contents(conn, cur, "./data/categories.csv", 'category', 1)
|
||||
insert_file_contents(conn, cur, "./data/regions.csv", 'region', 1)
|
||||
insert_file_contents(conn, cur, "./data/products.csv", 'product', 3)
|
||||
|
||||
print("Database insertions executed successfully.")
|
||||
Reference in New Issue
Block a user