diff --git a/db/Seed.js b/db/Seed.js index 88da143..21cdec3 100644 --- a/db/Seed.js +++ b/db/Seed.js @@ -1,5 +1,5 @@ const { Client } = require('pg'); -const insertAll = require('./util/insertAll'); +const readCSV = require('./util/readCSV'); require('dotenv').config({ path: "../.env" }); async function main() { @@ -72,10 +72,6 @@ async function main() { ); `; - // const populateProductTable = ` - // \copy products(name, regionid, categoryid, price, inventory, unit, description) FROM './data/products.csv' DELIMITER ',' CSV HEADER - // `; - // products_carts const createProductsCarts = ` CREATE TABLE IF NOT EXISTS products_carts ( @@ -101,6 +97,14 @@ async function main() { createProductsCarts, createProductsOrders ]; + const categoryInsert = readCSV('./util/data/categories.csv', 'category'); + const regionInsert = readCSV('./util/data/regions.csv', 'region'); + const productInsert = readCSV('./util/data/products.csv', 'product'); + + const allInsertions = [ + categoryInsert, regionInsert, productInsert + ] + let status; try { @@ -110,6 +114,12 @@ async function main() { await client.query(q); } + for (let section of allInsertions) { + for (let s of section) { + await client.query(s); + } + } + await client.end(); status = "Database initialization successful."; } catch(e) { diff --git a/db/util/insertAll.js b/db/util/insertAll.js deleted file mode 100644 index ba10fd1..0000000 --- a/db/util/insertAll.js +++ /dev/null @@ -1,11 +0,0 @@ -const { readFileSync } = require('fs'); - -// insertFromCSV('./data/products.csv'); - -module.exports = (path) => { - return readFileSync(path) - .toString() - .split('\n') - .map(s => s.trim()) - .map(s => s.split(',').map(s => s.trim())); -} \ No newline at end of file diff --git a/db/util/insert_file_contents.py b/db/util/insert_file_contents.py deleted file mode 100644 index 6c15439..0000000 --- a/db/util/insert_file_contents.py +++ /dev/null @@ -1,30 +0,0 @@ -import csv -import psycopg2 -from psycopg2 import sql - -# function to read from a given csv file into postgres -def insert_file_contents(conn, cur, file_path, table_name): - with open(file_path, 'r', encoding='utf-8-sig') as f: - reader = csv.reader(f) - first_row_accessed = False - header_names = "" - num_columns = 0 - - for row in reader: - # get row values from first row of reader - if not first_row_accessed: - header_names = [item for item in row] - num_columns = len(header_names) - first_row_accessed = True - continue - - mapped_columns = [header_names[i] for i in range(num_columns)] - prepared_q = sql.SQL("INSERT INTO {TABLE} ({COLS}) VALUES ({VALS})").format( - TABLE=sql.Identifier(table_name), - COLS=sql.SQL(', ').join(map(sql.Identifier, mapped_columns)), - VALS=sql.SQL(', ').join(sql.Placeholder() * len(mapped_columns)) - ) - - cur.execute(prepared_q, [item for item in row]) - - conn.commit() \ No newline at end of file diff --git a/db/util/main.py b/db/util/main.py deleted file mode 100644 index a289432..0000000 --- a/db/util/main.py +++ /dev/null @@ -1,38 +0,0 @@ -from insert_file_contents import insert_file_contents -import psycopg2 -import os - -# read data from environment if present -supabase_string = os.getenv('CONNECTION') - -env_path = "../../.env" -fd = os.open(env_path, os.O_RDONLY) -n = 300 - -file_data = os.read(fd, n) -parsed_string = "" - -# convert each ASCII value to its corresponding character -for c in file_data: - parsed_string = parsed_string + chr(c) - -start = parsed_string.find('postgres') -end = parsed_string.find('EXPRESS') - -# receive conn string from env -constring = parsed_string[start:end] -os.close(fd) - -# connect to local database instance and open a cursor -conn = psycopg2.connect("dbname=e-commerce-092122 user=mikayladobson") -cur = conn.cursor() - -print("Now attempting to populate database...") - -# read contents of each file into postgres -insert_file_contents(conn, cur, "./data/categories.csv", 'category') -insert_file_contents(conn, cur, "./data/regions.csv", 'region') -insert_file_contents(conn, cur, "./data/products.csv", 'product') - -print("Insertions executed successfully.") -print("Database preparations complete!") \ No newline at end of file diff --git a/db/util/parseOutput.js b/db/util/parseOutput.js deleted file mode 100644 index 1068bfc..0000000 --- a/db/util/parseOutput.js +++ /dev/null @@ -1,26 +0,0 @@ -const insertAll = require('./insertAll'); -const pgp = require('pg-promise')({ capSQL: true }); - -const parseOutput = (arr, tableName) => { - let i = 0; - let data; - let cols = null - - for (let row of arr) { - if (i == 0) { - cols = row; - i++; - } else { - if (cols == null) { - cols = row; - } else { - data.concat(row); - } - } - } - - let query = pgp.helpers.insert(data, cols, tableName); - console.log(query); -} - -parseOutput(insertAll('./data/products.csv'), 'products'); \ No newline at end of file diff --git a/db/util/readCSV.js b/db/util/readCSV.js new file mode 100644 index 0000000..767aefc --- /dev/null +++ b/db/util/readCSV.js @@ -0,0 +1,35 @@ +const { readFileSync } = require('fs'); +const pgp = require('pg-promise')({ capSQL: true }); + +module.exports = (path, tableName) => { + const arr = readFileSync(path) + .toString() + .split('\n') + .map(s => s.trim()) + .map(s => s.split(',').map(s => s.trim())); + + let data = []; + let queries = []; + let cols; + + for (let row of arr) { + if (!cols) { + cols = row; + } else { + let formattedData = {}; + for (let j = 0; j < row.length; j++) { + const key = cols[j]; + const value = row[j]; + formattedData[key] = value; + } + + data.push(formattedData); + } + } + + for (let each of data) { + queries.push(pgp.helpers.insert(each, cols, tableName)); + } + + return queries; +} diff --git a/package.json b/package.json index 998a08a..4696030 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,7 @@ "test": "echo \"Error: no test specified\" && exit 1", "start": "node server.js", "dev": "nodemon server.js", - "seed": "cd db && node seed.js && cd util && python3 main.py" + "seed": "cd db && node seed.js" }, "engines": { "node": "v16.13.1"