cleanup, refactoring/sep of concerns
This commit is contained in:
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,5 +1,4 @@
|
|||||||
node_modules/
|
node_modules/
|
||||||
.DS_Store
|
*.env
|
||||||
|
__pycache__
|
||||||
.env
|
.DS_Store
|
||||||
config.env
|
|
||||||
33
db/util/insert_file_contents.py
Normal file
33
db/util/insert_file_contents.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import csv
|
||||||
|
from psycopg2 import sql
|
||||||
|
|
||||||
|
# function to read from a given csv file into postgres
|
||||||
|
def insert_file_contents(conn, cur, file_path, table_name, column_count):
|
||||||
|
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
||||||
|
reader = csv.reader(f)
|
||||||
|
header_names = ""
|
||||||
|
first_row_accessed = False
|
||||||
|
|
||||||
|
for row in reader:
|
||||||
|
# get row values from first row of reader
|
||||||
|
if not first_row_accessed:
|
||||||
|
header_names = [item for item in row]
|
||||||
|
first_row_accessed = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
# execute table insertion for each row of csv based on number of columns
|
||||||
|
if column_count == 3:
|
||||||
|
cur.execute(sql.SQL("INSERT INTO {table} ({h1}, {h2}, {h3}) VALUES (%s, %s, %s)".format(
|
||||||
|
table=table_name,
|
||||||
|
h1=header_names[0],
|
||||||
|
h2=header_names[1],
|
||||||
|
h3=header_names[2]
|
||||||
|
)),
|
||||||
|
row
|
||||||
|
)
|
||||||
|
elif column_count == 1:
|
||||||
|
cur.execute(sql.SQL("INSERT INTO {table} ({field}) VALUES (%s)".format(table=table_name, field=header_names[0])), row)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
import psycopg2
|
import psycopg2
|
||||||
from psycopg2 import sql
|
|
||||||
import csv
|
import csv
|
||||||
import os
|
import os
|
||||||
|
from insert_file_contents import insert_file_contents
|
||||||
|
from psycopg2 import sql
|
||||||
|
|
||||||
# read data from environment if present
|
# read data from environment if present
|
||||||
env_path = "../../.env"
|
env_path = "../../.env"
|
||||||
@@ -26,45 +27,9 @@ os.close(fd)
|
|||||||
conn = psycopg2.connect("dbname=e-commerce-092122 user=mikayladobson")
|
conn = psycopg2.connect("dbname=e-commerce-092122 user=mikayladobson")
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
|
||||||
# read and print data from selection
|
# read contents of each file into postgres
|
||||||
cur.execute("SELECT * FROM users;")
|
insert_file_contents(conn, cur, "./data/categories.csv", 'category', 1)
|
||||||
for i in cur.fetchall():
|
insert_file_contents(conn, cur, "./data/regions.csv", 'region', 1)
|
||||||
print(i)
|
insert_file_contents(conn, cur, "./data/products.csv", 'product', 3)
|
||||||
|
|
||||||
# function to read from a given csv file into postgres
|
|
||||||
def insert_file_contents(file_path, table_name, column_count):
|
|
||||||
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
|
||||||
reader = csv.reader(f)
|
|
||||||
header_names = ""
|
|
||||||
first_row_accessed = False
|
|
||||||
|
|
||||||
for row in reader:
|
|
||||||
# get row values from first row of reader
|
|
||||||
if not first_row_accessed:
|
|
||||||
header_names = [item for item in row]
|
|
||||||
first_row_accessed = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
# execute table insertion for each row of csv based on number of columns
|
|
||||||
if column_count == 3:
|
|
||||||
cur.execute(sql.SQL("INSERT INTO {table} ({h1}, {h2}, {h3}) VALUES (%s, %s, %s)".format(
|
|
||||||
table=table_name,
|
|
||||||
h1=header_names[0],
|
|
||||||
h2=header_names[1],
|
|
||||||
h3=header_names[2]
|
|
||||||
)),
|
|
||||||
row
|
|
||||||
)
|
|
||||||
elif column_count == 1:
|
|
||||||
cur.execute(sql.SQL("INSERT INTO {table} ({field}) VALUES (%s)".format(table=table_name, field=header_names[0])), row)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
insert_file_contents("./categories.csv", 'category', 1)
|
|
||||||
insert_file_contents("./regions.csv", 'region', 1)
|
|
||||||
insert_file_contents("./products.csv", 'product', 3)
|
|
||||||
|
|
||||||
print("Database insertions executed successfully.")
|
print("Database insertions executed successfully.")
|
||||||
@@ -6,7 +6,8 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: no test specified\" && exit 1",
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
"start": "nodemon server.js",
|
"start": "nodemon server.js",
|
||||||
"seed": "cd db && node seed.js"
|
"seed": "cd db && node seed.js",
|
||||||
|
"populate": "cd db/util && python3 main.py"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "v16.13.1"
|
"node": "v16.13.1"
|
||||||
|
|||||||
Reference in New Issue
Block a user