Switched to local prismaclient

This commit is contained in:
Tim Howitz 2025-05-19 14:36:29 +01:00
parent de02f94f6a
commit 1bd327ea1a
17 changed files with 605 additions and 1141 deletions

View File

@ -55,6 +55,7 @@
},
"importSorter.generalConfiguration.sortOnBeforeSave": true,
"cSpell.words": [
"prismaclient",
"vars"
]
}

1140
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -42,7 +42,6 @@
},
"devDependencies": {
"@eslint/eslintrc": "^3",
"@types/bcryptjs": "^5.0.2",
"@types/express": "^5.0.1",
"@types/node": "^20",
"@types/react": "^19",
@ -53,4 +52,4 @@
"tailwindcss": "^3.4.1",
"typescript": "^5"
}
}
}

View File

@ -1,6 +1,6 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import { PrismaClient } from "@prismaclient";
const usingPrisma = false;
let prisma: PrismaClient;

View File

@ -1,69 +1,65 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import fs from "fs/promises";
import path from "path";
import { parse } from "csv-parse/sync";
import fs from "fs/promises";
import { NextResponse } from "next/server";
import path from "path";
import { PrismaClient } from "@prismaclient";
// CSV location
const csvFilePath = path.resolve(process.cwd(), "public/artefacts.csv");
const prisma = new PrismaClient();
type CsvRow = {
Type: string;
WarehouseArea: string;
EarthquakeId: string;
Required?: string;
ShopPrice?: string;
PickedUp?: string;
Type: string;
WarehouseArea: string;
EarthquakeId: string;
Required?: string;
ShopPrice?: string;
PickedUp?: string;
};
function stringToBool(val: string | undefined, defaultValue: boolean = false): boolean {
if (!val) return defaultValue;
return /^true$/i.test(val.trim());
if (!val) return defaultValue;
return /^true$/i.test(val.trim());
}
export async function POST() {
try {
// 1. Read file
const fileContent = await fs.readFile(csvFilePath, "utf8");
try {
// 1. Read file
const fileContent = await fs.readFile(csvFilePath, "utf8");
// 2. Parse CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 2. Parse CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 3. Map records to artefact input
const artefacts = records.map((row) => ({
type: row.Type,
warehouseArea: row.WarehouseArea,
earthquakeId: parseInt(row.EarthquakeId, 10),
required: stringToBool(row.Required, true), // default TRUE
shopPrice: row.ShopPrice && row.ShopPrice !== ""
? parseFloat(row.ShopPrice)
: null,
pickedUp: stringToBool(row.PickedUp, false), // default FALSE
creatorId: null,
purchasedById: null,
}));
// 3. Map records to artefact input
const artefacts = records.map((row) => ({
type: row.Type,
warehouseArea: row.WarehouseArea,
earthquakeId: parseInt(row.EarthquakeId, 10),
required: stringToBool(row.Required, true), // default TRUE
shopPrice: row.ShopPrice && row.ShopPrice !== "" ? parseFloat(row.ShopPrice) : null,
pickedUp: stringToBool(row.PickedUp, false), // default FALSE
creatorId: null,
purchasedById: null,
}));
// 4. Bulk insert
await prisma.artefact.createMany({
data: artefacts,
skipDuplicates: true,
});
// 4. Bulk insert
await prisma.artefact.createMany({
data: artefacts,
skipDuplicates: true,
});
return NextResponse.json({
success: true,
count: artefacts.length,
});
} catch (error: any) {
console.error(error);
return NextResponse.json(
{ success: false, error: error.message },
{ status: 500 }
);
} finally {
await prisma.$disconnect();
}
}
return NextResponse.json({
success: true,
count: artefacts.length,
});
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}

View File

@ -1,55 +1,56 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import fs from "fs/promises";
import path from "path";
import { parse } from "csv-parse/sync";
import fs from "fs/promises";
import { NextResponse } from "next/server";
import path from "path";
import { PrismaClient } from "@prismaclient";
// Path to your earthquakes.csv
const csvFilePath = path.resolve(process.cwd(), "public/earthquakes.csv");
const prisma = new PrismaClient();
type CsvRow = {
Date: string;
Code: string;
Magnitude: string;
Type: string;
Latitude: string;
Longitude: string;
Location: string;
Depth: string;
Date: string;
Code: string;
Magnitude: string;
Type: string;
Latitude: string;
Longitude: string;
Location: string;
Depth: string;
};
export async function POST() {
try {
// 1. Read the CSV file
const fileContent = await fs.readFile(csvFilePath, "utf8");
// 2. Parse the CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true
});
// 3. Transform to fit Earthquake model
const earthquakes = records.map(row => ({
date: new Date(row.Date),
code: row.Code,
magnitude: parseFloat(row.Magnitude),
type: row.Type,
latitude: parseFloat(row.Latitude),
longitude: parseFloat(row.Longitude),
location: row.Location,
depth: row.Depth, // store as received
creatorId: null
}));
// 4. Bulk create earthquakes in database:
await prisma.earthquake.createMany({
data: earthquakes,
skipDuplicates: true,
});
return NextResponse.json({ success: true, count: earthquakes.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}
try {
// 1. Read the CSV file
const fileContent = await fs.readFile(csvFilePath, "utf8");
// 2. Parse the CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 3. Transform to fit Earthquake model
const earthquakes = records.map((row) => ({
date: new Date(row.Date),
code: row.Code,
magnitude: parseFloat(row.Magnitude),
type: row.Type,
latitude: parseFloat(row.Latitude),
longitude: parseFloat(row.Longitude),
location: row.Location,
depth: row.Depth, // store as received
creatorId: null,
}));
// 4. Bulk create earthquakes in database:
await prisma.earthquake.createMany({
data: earthquakes,
skipDuplicates: true,
});
return NextResponse.json({ success: true, count: earthquakes.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}

View File

@ -1,73 +1,67 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import fs from "fs/promises";
import path from "path";
import { parse } from "csv-parse/sync";
import fs from "fs/promises";
import { NextResponse } from "next/server";
import path from "path";
import { PrismaClient } from "@prismaclient";
// CSV location (update filename as needed)
const csvFilePath = path.resolve(process.cwd(), "public/observatories.csv");
const prisma = new PrismaClient();
type CsvRow = {
Name: string;
Location: string;
Latitude: string;
Longitude: string;
DateEstablished?: string;
Functional: string;
SeismicSensorOnline?: string;
Name: string;
Location: string;
Latitude: string;
Longitude: string;
DateEstablished?: string;
Functional: string;
SeismicSensorOnline?: string;
};
function stringToBool(val: string | undefined): boolean {
// Accepts "TRUE", "true", "True", etc.
if (!val) return false;
return /^true$/i.test(val.trim());
// Accepts "TRUE", "true", "True", etc.
if (!val) return false;
return /^true$/i.test(val.trim());
}
export async function POST() {
try {
// 1. Read file
const fileContent = await fs.readFile(csvFilePath, "utf8");
try {
// 1. Read file
const fileContent = await fs.readFile(csvFilePath, "utf8");
// 2. Parse CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 2. Parse CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 3. Map records to Prisma inputs
const observatories = records.map((row) => ({
name: row.Name,
location: row.Location,
latitude: row.Latitude,
longitude: row.Longitude,
dateEstablished: row.DateEstablished
? parseInt(row.DateEstablished, 10)
: null,
functional: stringToBool(row.Functional),
seismicSensorOnline: row.SeismicSensorOnline
? stringToBool(row.SeismicSensorOnline)
: true, // default true per schema
creatorId: null,
}));
// 3. Map records to Prisma inputs
const observatories = records.map((row) => ({
name: row.Name,
location: row.Location,
latitude: row.Latitude,
longitude: row.Longitude,
dateEstablished: row.DateEstablished ? parseInt(row.DateEstablished, 10) : null,
functional: stringToBool(row.Functional),
seismicSensorOnline: row.SeismicSensorOnline ? stringToBool(row.SeismicSensorOnline) : true, // default true per schema
creatorId: null,
}));
// 4. Bulk insert
await prisma.observatory.createMany({
data: observatories,
skipDuplicates: true,
});
// 4. Bulk insert
await prisma.observatory.createMany({
data: observatories,
skipDuplicates: true,
});
return NextResponse.json({
success: true,
count: observatories.length,
});
} catch (error: any) {
console.error(error);
return NextResponse.json(
{ success: false, error: error.message },
{ status: 500 }
);
} finally {
await prisma.$disconnect();
}
}
return NextResponse.json({
success: true,
count: observatories.length,
});
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}

View File

@ -1,8 +1,9 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import fs from "fs/promises";
import path from "path";
import { parse } from "csv-parse/sync";
import fs from "fs/promises";
import { NextResponse } from "next/server";
import path from "path";
import { PrismaClient } from "@prismaclient";
const csvFilePath = path.resolve(process.cwd(), "public/requests.csv");
const prisma = new PrismaClient();
@ -11,58 +12,52 @@ type RequestType = "NEW_USER" | "CHANGE_LEVEL" | "DELETE";
type RequestOutcome = "FULFILLED" | "REJECTED" | "IN_PROGRESS" | "CANCELLED" | "OTHER";
type CsvRow = {
RequestType: string;
RequestingUserId: string;
Outcome?: string;
RequestType: string;
RequestingUserId: string;
Outcome?: string;
};
const validRequestTypes: RequestType[] = ["NEW_USER", "CHANGE_LEVEL", "DELETE"];
const validOutcomes: RequestOutcome[] = [
"FULFILLED",
"REJECTED",
"IN_PROGRESS",
"CANCELLED",
"OTHER",
];
const validOutcomes: RequestOutcome[] = ["FULFILLED", "REJECTED", "IN_PROGRESS", "CANCELLED", "OTHER"];
function normalizeRequestType(type: string | undefined): RequestType {
if (!type) return "NEW_USER";
const norm = type.trim().toUpperCase().replace(" ", "_");
return (validRequestTypes.includes(norm as RequestType) ? norm : "NEW_USER") as RequestType;
if (!type) return "NEW_USER";
const norm = type.trim().toUpperCase().replace(" ", "_");
return (validRequestTypes.includes(norm as RequestType) ? norm : "NEW_USER") as RequestType;
}
function normalizeOutcome(outcome: string | undefined): RequestOutcome {
if (!outcome) return "IN_PROGRESS";
const norm = outcome.trim().toUpperCase().replace(" ", "_");
return (validOutcomes.includes(norm as RequestOutcome) ? norm : "IN_PROGRESS") as RequestOutcome;
if (!outcome) return "IN_PROGRESS";
const norm = outcome.trim().toUpperCase().replace(" ", "_");
return (validOutcomes.includes(norm as RequestOutcome) ? norm : "IN_PROGRESS") as RequestOutcome;
}
export async function POST() {
try {
const fileContent = await fs.readFile(csvFilePath, "utf8");
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
try {
const fileContent = await fs.readFile(csvFilePath, "utf8");
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
const requests = records.map(row => ({
requestType: normalizeRequestType(row.RequestType),
requestingUserId: parseInt(row.RequestingUserId, 10),
outcome: normalizeOutcome(row.Outcome),
}));
const requests = records.map((row) => ({
requestType: normalizeRequestType(row.RequestType),
requestingUserId: parseInt(row.RequestingUserId, 10),
outcome: normalizeOutcome(row.Outcome),
}));
const filteredRequests = requests.filter(r => !isNaN(r.requestingUserId));
const filteredRequests = requests.filter((r) => !isNaN(r.requestingUserId));
await prisma.request.createMany({
data: filteredRequests,
skipDuplicates: true,
});
await prisma.request.createMany({
data: filteredRequests,
skipDuplicates: true,
});
return NextResponse.json({ success: true, count: filteredRequests.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}
return NextResponse.json({ success: true, count: filteredRequests.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}

View File

@ -1,57 +1,58 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import fs from "fs/promises";
import path from "path";
import { parse } from "csv-parse/sync";
import fs from "fs/promises";
import { NextResponse } from "next/server";
import path from "path";
import { PrismaClient } from "@prismaclient";
// Path to CSV file
const csvFilePath = path.resolve(process.cwd(), "public/scientists.csv");
const prisma = new PrismaClient();
type CsvRow = {
Name: string;
Level?: string;
UserId: string;
SuperiorId?: string;
Name: string;
Level?: string;
UserId: string;
SuperiorId?: string;
};
function normalizeLevel(level: string | undefined): string {
// Only allow JUNIOR, SENIOR; default JUNIOR
if (!level || !level.trim()) return "JUNIOR";
const lv = level.trim().toUpperCase();
return ["JUNIOR", "SENIOR"].includes(lv) ? lv : "JUNIOR";
// Only allow JUNIOR, SENIOR; default JUNIOR
if (!level || !level.trim()) return "JUNIOR";
const lv = level.trim().toUpperCase();
return ["JUNIOR", "SENIOR"].includes(lv) ? lv : "JUNIOR";
}
export async function POST() {
try {
// 1. Read the CSV file
const fileContent = await fs.readFile(csvFilePath, "utf8");
try {
// 1. Read the CSV file
const fileContent = await fs.readFile(csvFilePath, "utf8");
// 2. Parse the CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 2. Parse the CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 3. Transform each record for Prisma
const scientists = records.map(row => ({
name: row.Name,
level: normalizeLevel(row.Level),
userId: parseInt(row.UserId, 10),
superiorId: row.SuperiorId && row.SuperiorId.trim() !== "" ? parseInt(row.SuperiorId, 10) : null,
}));
// 3. Transform each record for Prisma
const scientists = records.map((row) => ({
name: row.Name,
level: normalizeLevel(row.Level),
userId: parseInt(row.UserId, 10),
superiorId: row.SuperiorId && row.SuperiorId.trim() !== "" ? parseInt(row.SuperiorId, 10) : null,
}));
// 4. Bulk create scientists in database
await prisma.scientist.createMany({
data: scientists,
skipDuplicates: true, // in case the scientist/userid combo already exists
});
// 4. Bulk create scientists in database
await prisma.scientist.createMany({
data: scientists,
skipDuplicates: true, // in case the scientist/userid combo already exists
});
return NextResponse.json({ success: true, count: scientists.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}
return NextResponse.json({ success: true, count: scientists.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}

View File

@ -1,57 +1,58 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import fs from "fs/promises";
import path from "path";
import { parse } from "csv-parse/sync";
import fs from "fs/promises";
import { NextResponse } from "next/server";
import path from "path";
import { PrismaClient } from "@prismaclient";
// Path to users.csv - adjust as needed
const csvFilePath = path.resolve(process.cwd(), "public/users.csv");
const prisma = new PrismaClient();
type CsvRow = {
Name: string;
Email: string;
PasswordHash: string;
Role?: string;
Name: string;
Email: string;
PasswordHash: string;
Role?: string;
};
function normalizeRole(role: string | undefined): string {
// Only allow ADMIN, SCIENTIST, GUEST; default GUEST
if (!role || !role.trim()) return "GUEST";
const r = role.trim().toUpperCase();
return ["ADMIN", "SCIENTIST", "GUEST"].includes(r) ? r : "GUEST";
// Only allow ADMIN, SCIENTIST, GUEST; default GUEST
if (!role || !role.trim()) return "GUEST";
const r = role.trim().toUpperCase();
return ["ADMIN", "SCIENTIST", "GUEST"].includes(r) ? r : "GUEST";
}
export async function POST() {
try {
// 1. Read the CSV file
const fileContent = await fs.readFile(csvFilePath, "utf8");
try {
// 1. Read the CSV file
const fileContent = await fs.readFile(csvFilePath, "utf8");
// 2. Parse the CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 2. Parse the CSV
const records: CsvRow[] = parse(fileContent, {
columns: true,
skip_empty_lines: true,
});
// 3. Transform each CSV row to User model format
const users = records.map(row => ({
name: row.Name,
email: row.Email,
passwordHash: row.PasswordHash,
role: normalizeRole(row.Role),
}));
// 3. Transform each CSV row to User model format
const users = records.map((row) => ({
name: row.Name,
email: row.Email,
passwordHash: row.PasswordHash,
role: normalizeRole(row.Role),
}));
// 4. Bulk create users in database
await prisma.user.createMany({
data: users,
skipDuplicates: true, // because email is unique
});
// 4. Bulk create users in database
await prisma.user.createMany({
data: users,
skipDuplicates: true, // because email is unique
});
return NextResponse.json({ success: true, count: users.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}
return NextResponse.json({ success: true, count: users.length });
} catch (error: any) {
console.error(error);
return NextResponse.json({ success: false, error: error.message }, { status: 500 });
} finally {
await prisma.$disconnect();
}
}

View File

@ -1,11 +1,11 @@
import bcryptjs from 'bcryptjs';
import { SignJWT } from 'jose';
import { NextResponse } from 'next/server';
import bcryptjs from "bcryptjs";
import { SignJWT } from "jose";
import { NextResponse } from "next/server";
import { PrismaClient } from '@prisma/client';
import { env } from '@utils/env';
import { PrismaClient } from "@prismaclient";
import { env } from "@utils/env";
import { findUserByEmail, readUserCsv, User } from '../functions/csvReadWrite';
import { findUserByEmail, readUserCsv, User } from "../functions/csvReadWrite";
const usingPrisma = false;
let prisma: PrismaClient;

View File

@ -1,6 +1,6 @@
import { NextResponse } from "next/server";
import { PrismaClient } from "@prisma/client";
import { PrismaClient } from "@prismaclient";
const usingPrisma = false;
let prisma: PrismaClient;

View File

@ -1,13 +1,11 @@
import bcryptjs from 'bcryptjs';
import { SignJWT } from 'jose';
import { NextResponse } from 'next/server';
import bcryptjs from "bcryptjs";
import { SignJWT } from "jose";
import { NextResponse } from "next/server";
import { PrismaClient } from '@prisma/client';
import { env } from '@utils/env';
import { PrismaClient } from "@prismaclient";
import { env } from "@utils/env";
import {
findUserByEmail, passwordStrengthCheck, readUserCsv, User, writeUserCsv
} from '../functions/csvReadWrite';
import { findUserByEmail, passwordStrengthCheck, readUserCsv, User, writeUserCsv } from "../functions/csvReadWrite";
const usingPrisma = false;
let prisma: PrismaClient;

View File

@ -1,7 +1,7 @@
import { NextResponse } from "next/server";
import { env } from "@utils/env";
import { PrismaClient } from "@prisma/client";
import { PrismaClient } from "@prismaclient";
import { env } from "@utils/env";
import { verifyJwt } from "@utils/verifyJwt";
const usingPrisma = false;

View File

@ -1,11 +1,12 @@
"use client";
import { useState, useMemo } from "react";
import { FaCalendarPlus, FaWarehouse, FaCartShopping } from "react-icons/fa6";
import { IoFilter, IoFilterCircleOutline, IoFilterOutline, IoToday } from "react-icons/io5";
import { Dispatch, SetStateAction, useMemo, useState } from "react";
import { FaTimes } from "react-icons/fa";
import { SetStateAction, Dispatch } from "react";
// import type { Artefact } from "@prisma/client";
import { Artefact } from "@appTypes/Prisma";
import { FaCalendarPlus, FaCartShopping, FaWarehouse } from "react-icons/fa6";
import { IoFilter, IoFilterCircleOutline, IoFilterOutline, IoToday } from "react-icons/io5";
// import { Artefact } from "@appTypes/Prisma";
import type { Artefact } from "@prismaclient";
// Warehouse Artefacts Data
const warehouseArtefacts: Artefact[] = [
@ -14,7 +15,7 @@ const warehouseArtefacts: Artefact[] = [
name: "Solidified Lava Chunk",
description: "A chunk of solidified lava from the 2023 Iceland eruption.",
location: "Reykjanes, Iceland",
earthquakeId: "EQ2023ICL",
code: "EQ2023ICL",
isRequired: true,
isSold: false,
isCollected: false,
@ -25,7 +26,7 @@ const warehouseArtefacts: Artefact[] = [
name: "Tephra Sample",
description: "Foreign debris from the 2022 Tonga volcanic eruption.",
location: "Tonga",
earthquakeId: "EQ2022TGA",
code: "EQ2022TGA",
isRequired: false,
isSold: true,
isCollected: true,
@ -36,7 +37,7 @@ const warehouseArtefacts: Artefact[] = [
name: "Ash Sample",
description: "Volcanic ash from the 2021 La Palma eruption.",
location: "La Palma, Spain",
earthquakeId: "EQ2021LPA",
code: "EQ2021LPA",
isRequired: false,
isSold: false,
isCollected: false,
@ -47,7 +48,7 @@ const warehouseArtefacts: Artefact[] = [
name: "Ground Soil",
description: "Soil sample from the 2020 Croatia earthquake site.",
location: "Zagreb, Croatia",
earthquakeId: "EQ2020CRO",
code: "EQ2020CRO",
isRequired: true,
isSold: false,
isCollected: false,
@ -58,7 +59,7 @@ const warehouseArtefacts: Artefact[] = [
name: "Basalt Fragment",
description: "Basalt rock from the 2019 New Zealand eruption.",
location: "White Island, New Zealand",
earthquakeId: "EQ2019NZL",
code: "EQ2019NZL",
isRequired: false,
isSold: true,
isCollected: false,

View File

@ -1,4 +1,4 @@
import { PrismaClient } from "@prisma/client";
import { PrismaClient } from "@prismaclient";
const prisma = new PrismaClient();

View File

@ -25,6 +25,7 @@
"@utils/*": ["./src/utils/*"],
"@appTypes/*": ["./src/types/*"],
"@zod/*": ["./src/zod/*"],
"@prismaclient": ["./src/generated/prisma/client"],
"@/*": ["./src/*"]
}
},