From db3e0f958b051caa77948ebedf4eed49ff17a3f7 Mon Sep 17 00:00:00 2001 From: IZZY Date: Mon, 12 May 2025 14:19:52 +0100 Subject: [PATCH] All the route files --- {src/databases => public}/Observatories.csv | 1 - {src/databases => public}/Scientists.csv | 1 - public/earthquakes.csv | 2 +- src/app/api/import-artifacts/route.ts | 69 +++++++++++++++++++ src/app/api/import-observatories/route.ts | 73 +++++++++++++++++++++ src/app/api/import-scientists/route.ts | 57 ++++++++++++++++ src/app/api/import-users/route.ts | 57 ++++++++++++++++ 7 files changed, 257 insertions(+), 3 deletions(-) rename {src/databases => public}/Observatories.csv (97%) rename {src/databases => public}/Scientists.csv (97%) create mode 100644 src/app/api/import-artifacts/route.ts create mode 100644 src/app/api/import-observatories/route.ts create mode 100644 src/app/api/import-scientists/route.ts create mode 100644 src/app/api/import-users/route.ts diff --git a/src/databases/Observatories.csv b/public/Observatories.csv similarity index 97% rename from src/databases/Observatories.csv rename to public/Observatories.csv index 3d2c238..5557499 100644 --- a/src/databases/Observatories.csv +++ b/public/Observatories.csv @@ -1,4 +1,3 @@ -Name,Location,Latitude,Longitude,Date Established,Functional Pacific Apex Seismic Center,"Aleutian Trench, Alaska, USA",53.0000,-168.0000,1973-06-15,Yes Cascadia Quake Research Institute,"Oregon Coast, USA",44.5000,-124.0000,1985-03-22,Yes Andes Fault Survey Observatory,"Nazca-South American Plate, Santiago, Chile",-33.4500,-70.6667,1992-10-10,Yes diff --git a/src/databases/Scientists.csv b/public/Scientists.csv similarity index 97% rename from src/databases/Scientists.csv rename to public/Scientists.csv index 30e9701..aa1bf1b 100644 --- a/src/databases/Scientists.csv +++ b/public/Scientists.csv @@ -1,4 +1,3 @@ -Name,Level,Superior Dr. Emily Neighbour Carter,Senior,None Dr. Rajiv Menon,Senior,None Dr. Izzy Patterson,Senior,None diff --git a/public/earthquakes.csv b/public/earthquakes.csv index 2574341..5437682 100644 --- a/public/earthquakes.csv +++ b/public/earthquakes.csv @@ -1,4 +1,4 @@ -Date,Magnitude,Latitude,Longitude,Location,Depth + Date,Magnitude,Latitude,Longitude,Location,Depth 2024-01-01,4.7,23.031,119.8622,Banda Arc Indonesia,17 km 2024-01-01,4.8,-84.496,-111.9335,South Sandwich Plate Collision Zone South Sandwich Islands,56 km 2024-01-01,8.6,38.2523,-167.0921,Cape Fold Belt South Africa,150 km diff --git a/src/app/api/import-artifacts/route.ts b/src/app/api/import-artifacts/route.ts new file mode 100644 index 0000000..55ed4ee --- /dev/null +++ b/src/app/api/import-artifacts/route.ts @@ -0,0 +1,69 @@ +import { NextResponse } from "next/server"; +import { PrismaClient } from "@prisma/client"; +import fs from "fs/promises"; +import path from "path"; +import { parse } from "csv-parse/sync"; + +// CSV location +const csvFilePath = path.resolve(process.cwd(), "public/artefacts.csv"); +const prisma = new PrismaClient(); + +type CsvRow = { + Type: string; + WarehouseArea: string; + EarthquakeId: string; + Required?: string; + ShopPrice?: string; + PickedUp?: string; +}; + +function stringToBool(val: string | undefined, defaultValue: boolean = false): boolean { + if (!val) return defaultValue; + return /^true$/i.test(val.trim()); +} + +export async function POST() { + try { + // 1. Read file + const fileContent = await fs.readFile(csvFilePath, "utf8"); + + // 2. Parse CSV + const records: CsvRow[] = parse(fileContent, { + columns: true, + skip_empty_lines: true, + }); + + // 3. Map records to artefact input + const artefacts = records.map((row) => ({ + type: row.Type, + warehouseArea: row.WarehouseArea, + earthquakeId: parseInt(row.EarthquakeId, 10), + required: stringToBool(row.Required, true), // default TRUE + shopPrice: row.ShopPrice && row.ShopPrice !== "" + ? parseFloat(row.ShopPrice) + : null, + pickedUp: stringToBool(row.PickedUp, false), // default FALSE + creatorId: null, + purchasedById: null, + })); + + // 4. Bulk insert + await prisma.artefact.createMany({ + data: artefacts, + skipDuplicates: true, + }); + + return NextResponse.json({ + success: true, + count: artefacts.length, + }); + } catch (error: any) { + console.error(error); + return NextResponse.json( + { success: false, error: error.message }, + { status: 500 } + ); + } finally { + await prisma.$disconnect(); + } +} \ No newline at end of file diff --git a/src/app/api/import-observatories/route.ts b/src/app/api/import-observatories/route.ts new file mode 100644 index 0000000..dc7f755 --- /dev/null +++ b/src/app/api/import-observatories/route.ts @@ -0,0 +1,73 @@ +import { NextResponse } from "next/server"; +import { PrismaClient } from "@prisma/client"; +import fs from "fs/promises"; +import path from "path"; +import { parse } from "csv-parse/sync"; + +// CSV location (update filename as needed) +const csvFilePath = path.resolve(process.cwd(), "public/observatories.csv"); +const prisma = new PrismaClient(); + +type CsvRow = { + Name: string; + Location: string; + Latitude: string; + Longitude: string; + DateEstablished?: string; + Functional: string; + SeismicSensorOnline?: string; +}; + +function stringToBool(val: string | undefined): boolean { + // Accepts "TRUE", "true", "True", etc. + if (!val) return false; + return /^true$/i.test(val.trim()); +} + +export async function POST() { + try { + // 1. Read file + const fileContent = await fs.readFile(csvFilePath, "utf8"); + + // 2. Parse CSV + const records: CsvRow[] = parse(fileContent, { + columns: true, + skip_empty_lines: true, + }); + + // 3. Map records to Prisma inputs + const observatories = records.map((row) => ({ + name: row.Name, + location: row.Location, + latitude: row.Latitude, + longitude: row.Longitude, + dateEstablished: row.DateEstablished + ? parseInt(row.DateEstablished, 10) + : null, + functional: stringToBool(row.Functional), + seismicSensorOnline: row.SeismicSensorOnline + ? stringToBool(row.SeismicSensorOnline) + : true, // default true per schema + creatorId: null, + })); + + // 4. Bulk insert + await prisma.observatory.createMany({ + data: observatories, + skipDuplicates: true, + }); + + return NextResponse.json({ + success: true, + count: observatories.length, + }); + } catch (error: any) { + console.error(error); + return NextResponse.json( + { success: false, error: error.message }, + { status: 500 } + ); + } finally { + await prisma.$disconnect(); + } +} \ No newline at end of file diff --git a/src/app/api/import-scientists/route.ts b/src/app/api/import-scientists/route.ts new file mode 100644 index 0000000..f195bd7 --- /dev/null +++ b/src/app/api/import-scientists/route.ts @@ -0,0 +1,57 @@ +import { NextResponse } from "next/server"; +import { PrismaClient } from "@prisma/client"; +import fs from "fs/promises"; +import path from "path"; +import { parse } from "csv-parse/sync"; + +// Path to CSV file +const csvFilePath = path.resolve(process.cwd(), "public/scientists.csv"); +const prisma = new PrismaClient(); + +type CsvRow = { + Name: string; + Level?: string; + UserId: string; + SuperiorId?: string; +}; + +function normalizeLevel(level: string | undefined): string { + // Only allow JUNIOR, SENIOR; default JUNIOR + if (!level || !level.trim()) return "JUNIOR"; + const lv = level.trim().toUpperCase(); + return ["JUNIOR", "SENIOR"].includes(lv) ? lv : "JUNIOR"; +} + +export async function POST() { + try { + // 1. Read the CSV file + const fileContent = await fs.readFile(csvFilePath, "utf8"); + + // 2. Parse the CSV + const records: CsvRow[] = parse(fileContent, { + columns: true, + skip_empty_lines: true, + }); + + // 3. Transform each record for Prisma + const scientists = records.map(row => ({ + name: row.Name, + level: normalizeLevel(row.Level), + userId: parseInt(row.UserId, 10), + superiorId: row.SuperiorId && row.SuperiorId.trim() !== "" ? parseInt(row.SuperiorId, 10) : null, + })); + + // 4. Bulk create scientists in database + await prisma.scientist.createMany({ + data: scientists, + skipDuplicates: true, // in case the scientist/userid combo already exists + }); + + return NextResponse.json({ success: true, count: scientists.length }); + } catch (error: any) { + console.error(error); + return NextResponse.json({ success: false, error: error.message }, { status: 500 }); + } finally { + await prisma.$disconnect(); + } +} \ No newline at end of file diff --git a/src/app/api/import-users/route.ts b/src/app/api/import-users/route.ts new file mode 100644 index 0000000..db350ec --- /dev/null +++ b/src/app/api/import-users/route.ts @@ -0,0 +1,57 @@ +import { NextResponse } from "next/server"; +import { PrismaClient } from "@prisma/client"; +import fs from "fs/promises"; +import path from "path"; +import { parse } from "csv-parse/sync"; + +// Path to users.csv - adjust as needed +const csvFilePath = path.resolve(process.cwd(), "public/users.csv"); +const prisma = new PrismaClient(); + +type CsvRow = { + Name: string; + Email: string; + PasswordHash: string; + Role?: string; +}; + +function normalizeRole(role: string | undefined): string { + // Only allow ADMIN, SCIENTIST, GUEST; default GUEST + if (!role || !role.trim()) return "GUEST"; + const r = role.trim().toUpperCase(); + return ["ADMIN", "SCIENTIST", "GUEST"].includes(r) ? r : "GUEST"; +} + +export async function POST() { + try { + // 1. Read the CSV file + const fileContent = await fs.readFile(csvFilePath, "utf8"); + + // 2. Parse the CSV + const records: CsvRow[] = parse(fileContent, { + columns: true, + skip_empty_lines: true, + }); + + // 3. Transform each CSV row to User model format + const users = records.map(row => ({ + name: row.Name, + email: row.Email, + passwordHash: row.PasswordHash, + role: normalizeRole(row.Role), + })); + + // 4. Bulk create users in database + await prisma.user.createMany({ + data: users, + skipDuplicates: true, // because email is unique + }); + + return NextResponse.json({ success: true, count: users.length }); + } catch (error: any) { + console.error(error); + return NextResponse.json({ success: false, error: error.message }, { status: 500 }); + } finally { + await prisma.$disconnect(); + } +} \ No newline at end of file