cleanup, refactoring/sep of concerns

This commit is contained in:
Mikayla Dobson
2022-09-23 11:18:24 -05:00
parent eb18065135
commit 42fc75c006
7 changed files with 44 additions and 46 deletions

5
.gitignore vendored
View File

@@ -1,5 +1,4 @@
node_modules/
*.env
__pycache__
.DS_Store
.env
config.env

View File

@@ -0,0 +1,33 @@
import csv
from psycopg2 import sql
# function to read from a given csv file into postgres
def insert_file_contents(conn, cur, file_path, table_name, column_count):
with open(file_path, 'r', encoding='utf-8-sig') as f:
reader = csv.reader(f)
header_names = ""
first_row_accessed = False
for row in reader:
# get row values from first row of reader
if not first_row_accessed:
header_names = [item for item in row]
first_row_accessed = True
continue
# execute table insertion for each row of csv based on number of columns
if column_count == 3:
cur.execute(sql.SQL("INSERT INTO {table} ({h1}, {h2}, {h3}) VALUES (%s, %s, %s)".format(
table=table_name,
h1=header_names[0],
h2=header_names[1],
h3=header_names[2]
)),
row
)
elif column_count == 1:
cur.execute(sql.SQL("INSERT INTO {table} ({field}) VALUES (%s)".format(table=table_name, field=header_names[0])), row)
else:
raise
conn.commit()

View File

@@ -1,7 +1,8 @@
import psycopg2
from psycopg2 import sql
import csv
import os
from insert_file_contents import insert_file_contents
from psycopg2 import sql
# read data from environment if present
env_path = "../../.env"
@@ -26,45 +27,9 @@ os.close(fd)
conn = psycopg2.connect("dbname=e-commerce-092122 user=mikayladobson")
cur = conn.cursor()
# read and print data from selection
cur.execute("SELECT * FROM users;")
for i in cur.fetchall():
print(i)
# function to read from a given csv file into postgres
def insert_file_contents(file_path, table_name, column_count):
with open(file_path, 'r', encoding='utf-8-sig') as f:
reader = csv.reader(f)
header_names = ""
first_row_accessed = False
for row in reader:
# get row values from first row of reader
if not first_row_accessed:
header_names = [item for item in row]
first_row_accessed = True
continue
# execute table insertion for each row of csv based on number of columns
if column_count == 3:
cur.execute(sql.SQL("INSERT INTO {table} ({h1}, {h2}, {h3}) VALUES (%s, %s, %s)".format(
table=table_name,
h1=header_names[0],
h2=header_names[1],
h3=header_names[2]
)),
row
)
elif column_count == 1:
cur.execute(sql.SQL("INSERT INTO {table} ({field}) VALUES (%s)".format(table=table_name, field=header_names[0])), row)
else:
raise
conn.commit()
insert_file_contents("./categories.csv", 'category', 1)
insert_file_contents("./regions.csv", 'region', 1)
insert_file_contents("./products.csv", 'product', 3)
# read contents of each file into postgres
insert_file_contents(conn, cur, "./data/categories.csv", 'category', 1)
insert_file_contents(conn, cur, "./data/regions.csv", 'region', 1)
insert_file_contents(conn, cur, "./data/products.csv", 'product', 3)
print("Database insertions executed successfully.")

View File

@@ -6,7 +6,8 @@
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "nodemon server.js",
"seed": "cd db && node seed.js"
"seed": "cd db && node seed.js",
"populate": "cd db/util && python3 main.py"
},
"engines": {
"node": "v16.13.1"