database seed now integrates with supabase, python scripts replaced with node
This commit is contained in:
20
db/Seed.js
20
db/Seed.js
@@ -1,5 +1,5 @@
|
||||
const { Client } = require('pg');
|
||||
const insertAll = require('./util/insertAll');
|
||||
const readCSV = require('./util/readCSV');
|
||||
require('dotenv').config({ path: "../.env" });
|
||||
|
||||
async function main() {
|
||||
@@ -72,10 +72,6 @@ async function main() {
|
||||
);
|
||||
`;
|
||||
|
||||
// const populateProductTable = `
|
||||
// \copy products(name, regionid, categoryid, price, inventory, unit, description) FROM './data/products.csv' DELIMITER ',' CSV HEADER
|
||||
// `;
|
||||
|
||||
// products_carts
|
||||
const createProductsCarts = `
|
||||
CREATE TABLE IF NOT EXISTS products_carts (
|
||||
@@ -101,6 +97,14 @@ async function main() {
|
||||
createProductsCarts, createProductsOrders
|
||||
];
|
||||
|
||||
const categoryInsert = readCSV('./util/data/categories.csv', 'category');
|
||||
const regionInsert = readCSV('./util/data/regions.csv', 'region');
|
||||
const productInsert = readCSV('./util/data/products.csv', 'product');
|
||||
|
||||
const allInsertions = [
|
||||
categoryInsert, regionInsert, productInsert
|
||||
]
|
||||
|
||||
let status;
|
||||
|
||||
try {
|
||||
@@ -110,6 +114,12 @@ async function main() {
|
||||
await client.query(q);
|
||||
}
|
||||
|
||||
for (let section of allInsertions) {
|
||||
for (let s of section) {
|
||||
await client.query(s);
|
||||
}
|
||||
}
|
||||
|
||||
await client.end();
|
||||
status = "Database initialization successful.";
|
||||
} catch(e) {
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
const { readFileSync } = require('fs');
|
||||
|
||||
// insertFromCSV('./data/products.csv');
|
||||
|
||||
module.exports = (path) => {
|
||||
return readFileSync(path)
|
||||
.toString()
|
||||
.split('\n')
|
||||
.map(s => s.trim())
|
||||
.map(s => s.split(',').map(s => s.trim()));
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
import csv
|
||||
import psycopg2
|
||||
from psycopg2 import sql
|
||||
|
||||
# function to read from a given csv file into postgres
|
||||
def insert_file_contents(conn, cur, file_path, table_name):
|
||||
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
||||
reader = csv.reader(f)
|
||||
first_row_accessed = False
|
||||
header_names = ""
|
||||
num_columns = 0
|
||||
|
||||
for row in reader:
|
||||
# get row values from first row of reader
|
||||
if not first_row_accessed:
|
||||
header_names = [item for item in row]
|
||||
num_columns = len(header_names)
|
||||
first_row_accessed = True
|
||||
continue
|
||||
|
||||
mapped_columns = [header_names[i] for i in range(num_columns)]
|
||||
prepared_q = sql.SQL("INSERT INTO {TABLE} ({COLS}) VALUES ({VALS})").format(
|
||||
TABLE=sql.Identifier(table_name),
|
||||
COLS=sql.SQL(', ').join(map(sql.Identifier, mapped_columns)),
|
||||
VALS=sql.SQL(', ').join(sql.Placeholder() * len(mapped_columns))
|
||||
)
|
||||
|
||||
cur.execute(prepared_q, [item for item in row])
|
||||
|
||||
conn.commit()
|
||||
@@ -1,38 +0,0 @@
|
||||
from insert_file_contents import insert_file_contents
|
||||
import psycopg2
|
||||
import os
|
||||
|
||||
# read data from environment if present
|
||||
supabase_string = os.getenv('CONNECTION')
|
||||
|
||||
env_path = "../../.env"
|
||||
fd = os.open(env_path, os.O_RDONLY)
|
||||
n = 300
|
||||
|
||||
file_data = os.read(fd, n)
|
||||
parsed_string = ""
|
||||
|
||||
# convert each ASCII value to its corresponding character
|
||||
for c in file_data:
|
||||
parsed_string = parsed_string + chr(c)
|
||||
|
||||
start = parsed_string.find('postgres')
|
||||
end = parsed_string.find('EXPRESS')
|
||||
|
||||
# receive conn string from env
|
||||
constring = parsed_string[start:end]
|
||||
os.close(fd)
|
||||
|
||||
# connect to local database instance and open a cursor
|
||||
conn = psycopg2.connect("dbname=e-commerce-092122 user=mikayladobson")
|
||||
cur = conn.cursor()
|
||||
|
||||
print("Now attempting to populate database...")
|
||||
|
||||
# read contents of each file into postgres
|
||||
insert_file_contents(conn, cur, "./data/categories.csv", 'category')
|
||||
insert_file_contents(conn, cur, "./data/regions.csv", 'region')
|
||||
insert_file_contents(conn, cur, "./data/products.csv", 'product')
|
||||
|
||||
print("Insertions executed successfully.")
|
||||
print("Database preparations complete!")
|
||||
@@ -1,26 +0,0 @@
|
||||
const insertAll = require('./insertAll');
|
||||
const pgp = require('pg-promise')({ capSQL: true });
|
||||
|
||||
const parseOutput = (arr, tableName) => {
|
||||
let i = 0;
|
||||
let data;
|
||||
let cols = null
|
||||
|
||||
for (let row of arr) {
|
||||
if (i == 0) {
|
||||
cols = row;
|
||||
i++;
|
||||
} else {
|
||||
if (cols == null) {
|
||||
cols = row;
|
||||
} else {
|
||||
data.concat(row);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let query = pgp.helpers.insert(data, cols, tableName);
|
||||
console.log(query);
|
||||
}
|
||||
|
||||
parseOutput(insertAll('./data/products.csv'), 'products');
|
||||
35
db/util/readCSV.js
Normal file
35
db/util/readCSV.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const { readFileSync } = require('fs');
|
||||
const pgp = require('pg-promise')({ capSQL: true });
|
||||
|
||||
module.exports = (path, tableName) => {
|
||||
const arr = readFileSync(path)
|
||||
.toString()
|
||||
.split('\n')
|
||||
.map(s => s.trim())
|
||||
.map(s => s.split(',').map(s => s.trim()));
|
||||
|
||||
let data = [];
|
||||
let queries = [];
|
||||
let cols;
|
||||
|
||||
for (let row of arr) {
|
||||
if (!cols) {
|
||||
cols = row;
|
||||
} else {
|
||||
let formattedData = {};
|
||||
for (let j = 0; j < row.length; j++) {
|
||||
const key = cols[j];
|
||||
const value = row[j];
|
||||
formattedData[key] = value;
|
||||
}
|
||||
|
||||
data.push(formattedData);
|
||||
}
|
||||
}
|
||||
|
||||
for (let each of data) {
|
||||
queries.push(pgp.helpers.insert(each, cols, tableName));
|
||||
}
|
||||
|
||||
return queries;
|
||||
}
|
||||
Reference in New Issue
Block a user