Initial commit: add .gitignore and README
Some checks failed
CI / lint-and-test (push) Has been cancelled

This commit is contained in:
defiQUG
2026-02-09 21:51:50 -08:00
commit 93df3c8c20
116 changed files with 10080 additions and 0 deletions

38
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: CI
on:
push:
branches: [main]
pull_request:
branches: [main]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
lint-and-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
with:
version: 9
- uses: actions/setup-node@v4
with:
node-version: "20"
cache: "pnpm"
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Lint
run: pnpm run lint
- name: Test
run: pnpm run test
- name: Build
run: pnpm run build

16
.gitignore vendored Normal file
View File

@@ -0,0 +1,16 @@
node_modules
dist
build
.env
.env.*
!.env.example
*.log
.DS_Store
coverage
.nyc_output
.turbo
*.tsbuildinfo
.idea
.vscode/*
!.vscode/extensions.json
!.vscode/settings.json

1
.nvmrc Normal file
View File

@@ -0,0 +1 @@
20

42
README.md Normal file
View File

@@ -0,0 +1,42 @@
# Sankofa HW Infra
Hardware procurement, inventory, and operations platform for **sovereign cloud operations**: offer intake, inspection workflows, purchasing controls, asset lifecycle, multi-site management, and integrations. UniFi is used as a **hardware-aware, compliance-relevant infrastructure layer** (product intelligence, support horizon, per-sovereign controller topology). See [docs/architecture.md](docs/architecture.md), [docs/integration-spec-unifi.md](docs/integration-spec-unifi.md), and [docs/sovereign-controller-topology.md](docs/sovereign-controller-topology.md).
## Stack
- **Monorepo**: pnpm workspaces
- **API**: Fastify (Node), REST `/api/v1`, JWT + RBAC/ABAC
- **Web**: React + Vite
- **DB**: PostgreSQL (Drizzle), S3-compatible object storage
- **Workflow**: Embedded state machines (PO approval, inspection)
## Quick start
1. Copy `env.example` to `.env` and set `DATABASE_URL`, optional `S3_*`, `JWT_SECRET`.
2. Start Postgres: `cd infra && docker compose up -d`
3. Migrate: `pnpm db:migrate`
4. Install: `pnpm install`
5. API: `pnpm --filter @sankofa/api run dev` (port 4000)
6. Web: `pnpm --filter @sankofa/web run dev` (port 3000)
## Scripts
- `pnpm run build` — build all packages
- `pnpm run test` — run tests
- `pnpm run lint` — lint
- `pnpm db:migrate` — run DB migrations
## Docs
- [Architecture](docs/architecture.md)
- [ERD](docs/erd.md)
- [OpenAPI](docs/openapi.yaml)
- [RBAC sovereign operations](docs/rbac-sovereign-operations.md), [Compliance profiles](docs/compliance-profiles.md)
- [CI/CD](docs/cicd.md)
- [Integration specs](docs/integration-spec-unifi.md), [Proxmox](docs/integration-spec-proxmox.md), [Redfish](docs/integration-spec-redfish.md)
- [Purchasing feedback loop](docs/purchasing-feedback-loop.md), [Sovereign controller topology](docs/sovereign-controller-topology.md)
- [Capacity dashboard spec](docs/capacity-dashboard-spec.md) (RU utilization, power headroom, GPU inventory; UI at `/capacity`)
- [Operational baseline](docs/operational-baseline.md) (current hardware in-hand; see [data/operational-baseline-hardware.json](data/operational-baseline-hardware.json) for structured import)
- [Vendor portal](docs/vendor-portal.md) (vendor user login, scoped offers/POs), [Offer ingestion](docs/offer-ingestion.md) (scrape + email intake)
- [Next steps before Swagger and UI](docs/next-steps-before-swagger-and-ui.md)
- [Runbooks](docs/runbooks/)

36
apps/api/package.json Normal file
View File

@@ -0,0 +1,36 @@
{
"name": "@sankofa/api",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"build": "tsc",
"dev": "tsx watch src/index.ts",
"start": "node dist/index.js",
"test": "vitest run",
"lint": "eslint src --ext .ts"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.700.0",
"@aws-sdk/s3-request-presigner": "^3.700.0",
"@fastify/cors": "^10.0.0",
"@fastify/jwt": "^9.0.0",
"@fastify/multipart": "^9.0.0",
"@fastify/sensible": "^6.0.0",
"@fastify/swagger": "^9.7.0",
"@fastify/swagger-ui": "^5.2.5",
"@sankofa/auth": "workspace:*",
"@sankofa/schema": "workspace:*",
"@sankofa/workflow": "workspace:*",
"drizzle-orm": "^0.36.0",
"fastify": "^5.1.0",
"yaml": "^2.8.2"
},
"devDependencies": {
"@types/node": "^22.10.0",
"eslint": "^9.15.0",
"tsx": "^4.19.0",
"typescript": "^5.7.0",
"vitest": "^2.1.0"
}
}

31
apps/api/src/audit.ts Normal file
View File

@@ -0,0 +1,31 @@
import type { FastifyRequest } from "fastify";
import { auditEvents } from "@sankofa/schema";
export interface AuditPayload {
orgId: string;
actorId?: string;
actorEmail?: string;
action: string;
resourceType: string;
resourceId: string;
beforeState?: Record<string, unknown>;
afterState?: Record<string, unknown>;
}
export function getActorFromRequest(req: FastifyRequest): { actorId?: string; actorEmail?: string } {
const user = (req as unknown as { user?: { sub?: string; email?: string } }).user;
return { actorId: user?.sub, actorEmail: user?.email ?? (req.headers["x-user-email"] as string) };
}
export async function writeAudit(db: ReturnType<typeof import("@sankofa/schema").getDb>, payload: AuditPayload) {
await db.insert(auditEvents).values({
orgId: payload.orgId,
actorId: payload.actorId ?? null,
actorEmail: payload.actorEmail ?? null,
action: payload.action,
resourceType: payload.resourceType,
resourceId: payload.resourceId,
beforeState: payload.beforeState ?? null,
afterState: payload.afterState ?? null,
});
}

70
apps/api/src/auth.ts Normal file
View File

@@ -0,0 +1,70 @@
import type { FastifyInstance, FastifyRequest } from "fastify";
import { hasPermission, hasAnyPermission, type RoleName, type Permission } from "@sankofa/auth";
const ORG_HEADER = "x-org-id";
const ROLES_HEADER = "x-roles";
export async function authPlugin(app: FastifyInstance) {
app.decorate("orgId", (req: FastifyRequest): string => {
const h = (req.headers[ORG_HEADER] as string) || "";
return h || "default";
});
app.decorate("getRoles", (req: FastifyRequest): RoleName[] => {
const header = (req.headers[ROLES_HEADER] as string) || "";
if (header) return header.split(",").map((r) => r.trim() as RoleName).filter(Boolean);
const payload = (req as unknown as { user?: { roles?: string[] } }).user;
return (payload?.roles as RoleName[]) ?? [];
});
app.decorate("vendorId", (req: FastifyRequest): string | null => {
const payload = (req as unknown as { user?: { vendorId?: string } }).user;
return payload?.vendorId ?? null;
});
app.decorate("requirePermission", (permission: Permission) => async (req: FastifyRequest) => {
const payload = (req as unknown as { user?: { sub?: string } }).user;
if (!payload?.sub) throw app.httpErrors.unauthorized("Authentication required");
const roles = app.getRoles(req);
if (!hasPermission(roles, permission)) throw app.httpErrors.forbidden("Insufficient permission");
});
app.decorate("requireAnyPermission", (permissions: Permission[]) => async (req: FastifyRequest) => {
const payload = (req as unknown as { user?: { sub?: string } }).user;
if (!payload?.sub) throw app.httpErrors.unauthorized("Authentication required");
const roles = app.getRoles(req);
if (!hasAnyPermission(roles, permissions)) throw app.httpErrors.forbidden("Insufficient permission");
});
app.addHook("preHandler", async (req) => {
try {
await req.jwtVerify();
(req as unknown as { user?: unknown }).user = (req as unknown as { user: unknown }).user ?? {};
} catch {
// Optional JWT
}
});
app.addHook("preHandler", async (req) => {
const permission = (req.routeOptions.config as { permission?: Permission } | undefined)?.permission;
if (!permission) return;
const payload = (req as unknown as { user?: { sub?: string } }).user;
if (!payload?.sub) throw app.httpErrors.unauthorized("Authentication required");
const roles = app.getRoles(req);
if (!hasPermission(roles, permission)) throw app.httpErrors.forbidden("Insufficient permission");
});
}
declare module "fastify" {
interface FastifyInstance {
orgId: (req: FastifyRequest) => string;
getRoles: (req: FastifyRequest) => RoleName[];
vendorId: (req: FastifyRequest) => string | null;
requirePermission: (permission: Permission) => (req: FastifyRequest) => Promise<void>;
requireAnyPermission: (permissions: Permission[]) => (req: FastifyRequest) => Promise<void>;
db: ReturnType<typeof import("@sankofa/schema").getDb>;
}
interface FastifyContextConfig {
permission?: Permission;
}
}

View File

@@ -0,0 +1,7 @@
import { describe, it, expect } from "vitest";
describe("api", () => {
it("placeholder", () => {
expect(1).toBe(1);
});
});

69
apps/api/src/index.ts Normal file
View File

@@ -0,0 +1,69 @@
import { fileURLToPath } from "node:url";
import Fastify from "fastify";
import cors from "@fastify/cors";
import jwt from "@fastify/jwt";
import multipart from "@fastify/multipart";
import sensible from "@fastify/sensible";
import { getDb } from "@sankofa/schema";
import { authPlugin } from "./auth.js";
import { registerV1Routes } from "./routes/v1/index.js";
import { errorCodes, type ApiErrorPayload } from "./schemas/errors.js";
import { openApiSpec } from "./openapi-spec.js";
const PORT = Number(process.env.API_PORT) || 4000;
const HOST = process.env.API_HOST || "0.0.0";
const statusToCode: Record<number, string> = {
400: errorCodes.BAD_REQUEST,
401: errorCodes.UNAUTHORIZED,
403: errorCodes.FORBIDDEN,
404: errorCodes.NOT_FOUND,
409: errorCodes.CONFLICT,
};
export async function buildApp() {
const app = Fastify({ logger: false });
await app.register(cors, { origin: true });
await app.register(sensible);
await app.register(jwt, {
secret: process.env.JWT_SECRET || "dev-secret-change-in-production",
});
await app.register(multipart, { limits: { fileSize: 50 * 1024 * 1024 } });
app.decorate("db", getDb());
await app.register(authPlugin);
await app.register(registerV1Routes, { prefix: "/api/v1" });
app.get("/health", async () => ({ status: "ok" }));
app.get("/api/openapi.json", async (_req, reply) => reply.type("application/json").send(openApiSpec));
app.get("/api/docs", async (_req, reply) => {
reply.type("text/html").send(`
<!DOCTYPE html>
<html>
<head><meta charset="utf-8"/><title>Sankofa API</title><link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist@5/swagger-ui.css"/></head>
<body><div id="swagger-ui"></div>
<script src="https://unpkg.com/swagger-ui-dist@5/swagger-ui-bundle.js"></script>
<script>SwaggerUIBundle({ url: '/api/openapi.json', dom_id: '#swagger-ui' });</script>
</body>
</html>`);
});
app.setErrorHandler((err: { statusCode?: number; message?: string; validation?: unknown }, _req, reply) => {
const status = err.statusCode ?? 500;
const payload: ApiErrorPayload = {
error: err.message ?? "Internal Server Error",
code: statusToCode[status] ?? "INTERNAL_ERROR",
};
if (err.validation) payload.details = err.validation;
return reply.status(status).send(payload);
});
return app;
}
async function main() {
const app = await buildApp();
await app.listen({ port: PORT, host: HOST });
}
const isMain = process.argv[1] === fileURLToPath(import.meta.url);
if (isMain) main().catch((err) => { console.error(err); process.exit(1); });

View File

@@ -0,0 +1,2 @@
export interface ProxmoxNode { node: string; status?: string; }
export async function listProxmoxNodes(_baseUrl: string, _token: string): Promise<ProxmoxNode[]> { return []; }

View File

@@ -0,0 +1,2 @@
export interface RedfishSystem { id: string; serialNumber?: string; }
export async function getRedfishSystem(_baseUrl: string, _token: string, _systemId: string): Promise<RedfishSystem | null> { return null; }

View File

@@ -0,0 +1,26 @@
export interface UnifiDevice {
id: string;
name: string;
model?: string;
generation?: string;
supportHorizon?: string;
}
export async function listUnifiDevices(_baseUrl: string, _token: string): Promise<UnifiDevice[]> {
return [];
}
export type CatalogRow = { sku: string; modelName: string; generation: string; supportHorizon: string | null };
export function enrichDevicesWithCatalog(
devices: UnifiDevice[],
catalog: CatalogRow[]
): UnifiDevice[] {
const bySku = new Map(catalog.map((c) => [c.sku, c]));
const byModel = new Map(catalog.map((c) => [c.modelName, c]));
return devices.map((d) => {
const match = (d.model && bySku.get(d.model)) || (d.model && byModel.get(d.model));
if (!match) return d;
return { ...d, generation: match.generation, supportHorizon: match.supportHorizon ?? undefined };
});
}

View File

@@ -0,0 +1,20 @@
import { readFileSync } from "fs";
import { join } from "path";
import { parse } from "yaml";
function loadSpec(): Record<string, unknown> {
try {
const path = join(process.cwd(), "docs", "openapi.yaml");
const raw = readFileSync(path, "utf8");
return parse(raw) as Record<string, unknown>;
} catch {
return {
openapi: "3.0.3",
info: { title: "Sankofa HW Infra API", version: "0.1.0" },
servers: [{ url: "/api/v1" }],
paths: {},
};
}
}
export const openApiSpec = loadSpec();

View File

@@ -0,0 +1,30 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { assetComponents as acTable, assets as assetsTable } from "@sankofa/schema";
export async function assetComponentsRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const list = await db.select().from(acTable);
return reply.send(list);
});
app.get<{ Params: { assetId: string } }>("/by-parent/:assetId", async (req, reply) => {
const orgId = app.orgId(req);
const [parent] = await db.select().from(assetsTable).where(and(eq(assetsTable.id, req.params.assetId), eq(assetsTable.orgId, orgId)));
if (!parent) return reply.notFound();
const list = await db.select().from(acTable).where(eq(acTable.parentAssetId, req.params.assetId));
return reply.send(list);
});
app.post<{ Body: { parentAssetId: string; childAssetId: string; role: string; slotIndex?: number } }>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [parent] = await db.select().from(assetsTable).where(and(eq(assetsTable.id, req.body.parentAssetId), eq(assetsTable.orgId, orgId)));
if (!parent) return reply.notFound();
const [inserted] = await db.insert(acTable).values({ parentAssetId: req.body.parentAssetId, childAssetId: req.body.childAssetId, role: req.body.role, slotIndex: req.body.slotIndex ?? null }).returning();
return reply.code(201).send(inserted);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const [deleted] = await db.delete(acTable).where(eq(acTable.id, req.params.id)).returning({ id: acTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,90 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { assets as assetsTable } from "@sankofa/schema";
export async function assetsRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(assetsTable).where(eq(assetsTable.orgId, orgId));
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [row] = await db
.select()
.from(assetsTable)
.where(and(eq(assetsTable.id, req.params.id), eq(assetsTable.orgId, orgId)));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{
Body: {
assetId: string;
category: string;
manufacturerSerial?: string;
serviceTag?: string;
partNumber?: string;
condition?: string;
warranty?: string;
siteId?: string;
projectId?: string;
sensitivityTier?: string;
};
}>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db
.insert(assetsTable)
.values({
orgId,
assetId: req.body.assetId,
category: req.body.category,
manufacturerSerial: req.body.manufacturerSerial ?? null,
serviceTag: req.body.serviceTag ?? null,
partNumber: req.body.partNumber ?? null,
condition: req.body.condition ?? null,
warranty: req.body.warranty ?? null,
siteId: req.body.siteId ?? null,
projectId: req.body.projectId ?? null,
sensitivityTier: req.body.sensitivityTier ?? null,
})
.returning();
return reply.code(201).send(inserted);
});
app.patch<{
Params: { id: string };
Body: Partial<{
assetId: string;
category: string;
status: string;
siteId: string;
positionId: string;
ownerId: string;
projectId: string;
sensitivityTier: string;
}>;
}>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db
.update(assetsTable)
.set({ ...req.body, updatedAt: new Date() })
.where(and(eq(assetsTable.id, req.params.id), eq(assetsTable.orgId, orgId)))
.returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [deleted] = await db
.delete(assetsTable)
.where(and(eq(assetsTable.id, req.params.id), eq(assetsTable.orgId, orgId)))
.returning({ id: assetsTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,25 @@
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { buildApp } from "../../index.js";
describe("auth", () => {
let app: Awaited<ReturnType<typeof buildApp>>;
beforeAll(async () => {
app = await buildApp();
});
afterAll(async () => {
await app.close();
});
it("POST /api/v1/auth/token with unknown email returns 401 or 500 when DB unavailable", async () => {
const res = await app.inject({
method: "POST",
url: "/api/v1/auth/token",
headers: { "content-type": "application/json" },
payload: { email: "nobody@example.com" },
});
expect([401, 500]).toContain(res.statusCode);
expect(JSON.parse(res.payload).error).toBeDefined();
});
});

View File

@@ -0,0 +1,42 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { users as usersTable, userRoles, roles as rolesTable } from "@sankofa/schema";
import type { RoleName } from "@sankofa/auth";
export async function authRoutes(app: FastifyInstance) {
const db = app.db;
app.post<{
Body: { email: string; password?: string };
}>(
"/token",
{
schema: {
body: {
type: "object",
required: ["email"],
properties: { email: { type: "string", format: "email" }, password: { type: "string" } },
},
response: { 200: { type: "object", properties: { token: { type: "string" }, user: { type: "object" } } } },
},
},
async (req, reply) => {
const orgId = (req.headers["x-org-id"] as string) || "default";
const { email } = req.body;
const [user] = await db.select().from(usersTable).where(and(eq(usersTable.email, email), eq(usersTable.orgId, orgId)));
if (!user) return reply.code(401).send({ error: "Invalid email or password", code: "UNAUTHORIZED" });
const ur = await db.select({ roleName: rolesTable.name }).from(userRoles).innerJoin(rolesTable, eq(userRoles.roleId, rolesTable.id)).where(eq(userRoles.userId, user.id));
const roleNames = ur.map((r) => r.roleName as RoleName).filter(Boolean);
const token = app.jwt.sign({
sub: user.id,
email: user.email,
roles: roleNames,
vendorId: user.vendorId ?? undefined,
orgId: user.orgId,
});
return reply.send({ token, user: { id: user.id, email: user.email, name: user.name, roles: roleNames, vendorId: user.vendorId ?? null } });
}
);
}

View File

@@ -0,0 +1,89 @@
import type { FastifyInstance } from "fastify";
import { eq, and, inArray } from "drizzle-orm";
import {
assets as assetsTable,
sites as sitesTable,
rooms as roomsTable,
rows as rowsTable,
racks as racksTable,
positions as positionsTable,
} from "@sankofa/schema";
export async function capacityRoutes(app: FastifyInstance) {
const db = app.db;
app.get<{ Params: { siteId: string } }>("/sites/:siteId", async (req, reply) => {
const orgId = app.orgId(req);
const siteId = req.params.siteId;
const [site] = await db.select().from(sitesTable).where(and(eq(sitesTable.id, siteId), eq(sitesTable.orgId, orgId)));
if (!site) return reply.notFound();
const rooms = await db.select({ id: roomsTable.id }).from(roomsTable).where(eq(roomsTable.siteId, siteId));
const roomIds = rooms.map((r) => r.id);
if (roomIds.length === 0) return reply.send({ siteId, usedRu: 0, totalRu: 0, utilizationPercent: 0 });
const rows = await db.select({ id: rowsTable.id }).from(rowsTable).where(inArray(rowsTable.roomId, roomIds));
const rowIds = rows.map((r) => r.id);
if (rowIds.length === 0) return reply.send({ siteId, usedRu: 0, totalRu: 0, utilizationPercent: 0 });
const racks = await db.select({ id: racksTable.id, ruTotal: racksTable.ruTotal }).from(racksTable).where(inArray(racksTable.rowId, rowIds));
const totalRu = racks.reduce((sum, r) => sum + r.ruTotal, 0);
const rackIds = racks.map((r) => r.id);
if (rackIds.length === 0) return reply.send({ siteId, usedRu: 0, totalRu: 0, utilizationPercent: 0 });
const positions = await db.select({ id: positionsTable.id, ruStart: positionsTable.ruStart, ruEnd: positionsTable.ruEnd }).from(positionsTable).where(inArray(positionsTable.rackId, rackIds));
const occupiedPositionIds = await db.select({ positionId: assetsTable.positionId }).from(assetsTable).where(and(eq(assetsTable.orgId, orgId), eq(assetsTable.siteId, siteId)));
const occupiedSet = new Set(occupiedPositionIds.map((a) => a.positionId).filter(Boolean));
const usedRu = positions.filter((p) => occupiedSet.has(p.id)).reduce((sum, p) => sum + (p.ruEnd - p.ruStart + 1), 0);
const utilizationPercent = totalRu > 0 ? Math.round((usedRu / totalRu) * 100) : 0;
return reply.send({ siteId, usedRu, totalRu, utilizationPercent });
});
app.get<{ Params: { siteId: string } }>("/sites/:siteId/power", async (req, reply) => {
const orgId = app.orgId(req);
const siteId = req.params.siteId;
const [site] = await db.select().from(sitesTable).where(and(eq(sitesTable.id, siteId), eq(sitesTable.orgId, orgId)));
if (!site) return reply.notFound();
const rooms = await db.select({ id: roomsTable.id }).from(roomsTable).where(eq(roomsTable.siteId, siteId));
const roomIds = rooms.map((r) => r.id);
if (roomIds.length === 0) return reply.send({ siteId, circuitLimitWatts: 0, measuredDrawWatts: null, headroomWatts: null });
const rows = await db.select({ id: rowsTable.id }).from(rowsTable).where(inArray(rowsTable.roomId, roomIds));
const rowIds = rows.map((r) => r.id);
if (rowIds.length === 0) return reply.send({ siteId, circuitLimitWatts: 0, measuredDrawWatts: null, headroomWatts: null });
const racks = await db.select({ powerFeeds: racksTable.powerFeeds }).from(racksTable).where(inArray(racksTable.rowId, rowIds));
let circuitLimitWatts = 0;
for (const r of racks) {
const feeds = (r.powerFeeds as { circuitLimitWatts?: number }[] | null) ?? [];
for (const f of feeds) circuitLimitWatts += f.circuitLimitWatts ?? 0;
}
return reply.send({
siteId,
circuitLimitWatts,
measuredDrawWatts: null,
headroomWatts: null,
});
});
app.get("/gpu-inventory", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select({
id: assetsTable.id,
assetId: assetsTable.assetId,
siteId: assetsTable.siteId,
status: assetsTable.status,
partNumber: assetsTable.partNumber,
}).from(assetsTable).where(and(eq(assetsTable.orgId, orgId), eq(assetsTable.category, "gpu")));
const bySite: Record<string, number> = {};
const byType: Record<string, number> = {};
for (const a of list) {
const sid = a.siteId ?? "unassigned";
bySite[sid] = (bySite[sid] ?? 0) + 1;
const typeKey = a.partNumber ?? "unknown";
byType[typeKey] = (byType[typeKey] ?? 0) + 1;
}
return reply.send({ total: list.length, bySite, byType });
});
}

View File

@@ -0,0 +1,77 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { complianceProfiles as profilesTable } from "@sankofa/schema";
export async function complianceProfilesRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(profilesTable).where(eq(profilesTable.orgId, orgId));
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [row] = await db
.select()
.from(profilesTable)
.where(and(eq(profilesTable.id, req.params.id), eq(profilesTable.orgId, orgId)));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{
Body: {
name: string;
firmwareFreezePolicy?: { lockedVersion?: string; minVersion?: string; maxVersion?: string };
allowedGenerations?: string[];
approvedSkus?: string[];
siteId?: string;
};
}>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db
.insert(profilesTable)
.values({
orgId,
name: req.body.name,
firmwareFreezePolicy: req.body.firmwareFreezePolicy ?? null,
allowedGenerations: req.body.allowedGenerations ?? null,
approvedSkus: req.body.approvedSkus ?? null,
siteId: req.body.siteId ?? null,
})
.returning();
return reply.code(201).send(inserted);
});
app.patch<{
Params: { id: string };
Body: Partial<{
name: string;
firmwareFreezePolicy: { lockedVersion?: string; minVersion?: string; maxVersion?: string };
allowedGenerations: string[];
approvedSkus: string[];
siteId: string;
}>;
}>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db
.update(profilesTable)
.set({ ...req.body, updatedAt: new Date() })
.where(and(eq(profilesTable.id, req.params.id), eq(profilesTable.orgId, orgId)))
.returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [deleted] = await db
.delete(profilesTable)
.where(and(eq(profilesTable.id, req.params.id), eq(profilesTable.orgId, orgId)))
.returning({ id: profilesTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,44 @@
import type { FastifyInstance } from "fastify";
import { authRoutes } from "./auth";
import { vendorsRoutes } from "./vendors";
import { offersRoutes } from "./offers";
import { usersRoutes } from "./users";
import { rolesRoutes } from "./roles";
import { purchaseOrdersRoutes } from "./purchase-orders";
import { assetsRoutes } from "./assets";
import { sitesRoutes } from "./sites";
import { uploadRoutes } from "./upload";
import { workflowRoutes } from "./workflow";
import { inspectionRoutes } from "./inspection";
import { shipmentsRoutes } from "./shipments";
import { assetComponentsRoutes } from "./asset-components";
import { capacityRoutes } from "./capacity";
import { integrationsRoutes } from "./integrations";
import { maintenancesRoutes } from "./maintenances";
import { complianceProfilesRoutes } from "./compliance-profiles";
import { unifiControllersRoutes } from "./unifi-controllers";
import { reportsRoutes } from "./reports";
import { ingestionRoutes } from "./ingestion";
export async function registerV1Routes(app: FastifyInstance) {
await app.register(authRoutes, { prefix: "/auth" });
await app.register(vendorsRoutes, { prefix: "/vendors" });
await app.register(offersRoutes, { prefix: "/offers" });
await app.register(usersRoutes, { prefix: "/users" });
await app.register(rolesRoutes, { prefix: "/roles" });
await app.register(purchaseOrdersRoutes, { prefix: "/purchase-orders" });
await app.register(assetsRoutes, { prefix: "/assets" });
await app.register(sitesRoutes, { prefix: "/sites" });
await app.register(uploadRoutes, { prefix: "/upload" });
await app.register(workflowRoutes, { prefix: "/workflow" });
await app.register(inspectionRoutes, { prefix: "/inspection" });
await app.register(shipmentsRoutes, { prefix: "/shipments" });
await app.register(assetComponentsRoutes, { prefix: "/asset-components" });
await app.register(capacityRoutes, { prefix: "/capacity" });
await app.register(integrationsRoutes, { prefix: "/integrations" });
await app.register(maintenancesRoutes, { prefix: "/maintenances" });
await app.register(complianceProfilesRoutes, { prefix: "/compliance-profiles" });
await app.register(unifiControllersRoutes, { prefix: "/unifi-controllers" });
await app.register(reportsRoutes, { prefix: "/reports" });
await app.register(ingestionRoutes, { prefix: "/ingestion" });
}

View File

@@ -0,0 +1,35 @@
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { buildApp } from "../../index.js";
describe("ingestion", () => {
let app: Awaited<ReturnType<typeof buildApp>>;
beforeAll(async () => {
app = await buildApp();
});
afterAll(async () => {
await app.close();
});
it("POST /api/v1/ingestion/offers returns 401 without x-ingestion-api-key", async () => {
const res = await app.inject({
method: "POST",
url: "/api/v1/ingestion/offers",
headers: { "content-type": "application/json" },
payload: { source: "email", quantity: 1, unit_price: "1" },
});
expect(res.statusCode).toBe(401);
expect(JSON.parse(res.payload).error).toContain("ingestion API key");
});
it("POST /api/v1/ingestion/offers returns 401 with wrong x-ingestion-api-key", async () => {
const res = await app.inject({
method: "POST",
url: "/api/v1/ingestion/offers",
headers: { "content-type": "application/json", "x-ingestion-api-key": "wrong" },
payload: { source: "email", quantity: 1, unit_price: "1" },
});
expect(res.statusCode).toBe(401);
});
});

View File

@@ -0,0 +1,60 @@
import type { FastifyInstance } from "fastify";
import { offers as offersTable } from "@sankofa/schema";
const INGESTION_KEY = process.env.INGESTION_API_KEY;
export async function ingestionRoutes(app: FastifyInstance) {
const db = app.db;
app.addHook("preHandler", async (req, reply) => {
const key = (req.headers["x-ingestion-api-key"] as string) || "";
if (!INGESTION_KEY || key !== INGESTION_KEY) {
return reply.code(401).send({ error: "Invalid or missing ingestion API key" });
}
});
app.post<{
Body: {
source: "scraped" | "email";
source_ref?: string;
source_metadata?: Record<string, unknown>;
vendor_id?: string | null;
sku?: string;
mpn?: string;
quantity: number;
unit_price: string;
incoterms?: string;
lead_time_days?: number;
country_of_origin?: string;
condition?: string;
warranty?: string;
evidence_refs?: { key: string; hash?: string }[];
};
}>("/offers", async (req, reply) => {
const orgId = (req.headers["x-org-id"] as string) || "default";
const body = req.body;
const now = new Date();
const [inserted] = await db
.insert(offersTable)
.values({
orgId,
vendorId: body.vendor_id ?? null,
sku: body.sku ?? null,
mpn: body.mpn ?? null,
quantity: body.quantity,
unitPrice: body.unit_price,
incoterms: body.incoterms ?? null,
leadTimeDays: body.lead_time_days ?? null,
countryOfOrigin: body.country_of_origin ?? null,
condition: body.condition ?? null,
warranty: body.warranty ?? null,
evidenceRefs: body.evidence_refs ?? null,
source: body.source,
sourceRef: body.source_ref ?? null,
sourceMetadata: body.source_metadata ?? null,
ingestedAt: now,
})
.returning();
return reply.code(201).send(inserted);
});
}

View File

@@ -0,0 +1,47 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { inspectionTemplates as tplTable, inspectionRuns as runsTable } from "@sankofa/schema";
export async function inspectionRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/templates", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(tplTable).where(eq(tplTable.orgId, orgId));
return reply.send(list);
});
app.post<{ Body: { category: string; name: string; steps: { id: string; label: string; required?: boolean }[] } }>("/templates", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db.insert(tplTable).values({ orgId, category: req.body.category, name: req.body.name, steps: req.body.steps }).returning();
return reply.code(201).send(inserted);
});
app.get("/runs", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(runsTable).where(eq(runsTable.orgId, orgId));
return reply.send(list);
});
app.post<{ Body: { templateId: string; offerId?: string; assetId?: string } }>("/runs", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db.insert(runsTable).values({
orgId,
templateId: req.body.templateId,
offerId: req.body.offerId ?? null,
assetId: req.body.assetId ?? null,
}).returning();
return reply.code(201).send(inserted);
});
app.patch<{ Params: { id: string }; Body: { status?: string; evidenceRefs?: { key: string; hash?: string }[]; resultNotes?: string } }>("/runs/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db.update(runsTable).set({
...req.body,
completedAt: req.body.status === "pass" || req.body.status === "fail" ? new Date() : undefined,
updatedAt: new Date(),
}).where(and(eq(runsTable.id, req.params.id), eq(runsTable.orgId, orgId))).returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
}

View File

@@ -0,0 +1,50 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { integrationMappings as mappingsTable, unifiProductCatalog as catalogTable } from "@sankofa/schema";
import { listUnifiDevices, enrichDevicesWithCatalog } from "../../integrations/unifi.js";
import { listProxmoxNodes } from "../../integrations/proxmox.js";
export async function integrationsRoutes(app: FastifyInstance) {
const db = app.db;
app.get<{ Params: { siteId: string } }>("/unifi/sites/:siteId/devices", async (req, reply) => {
const token = (req.headers["x-unifi-token"] as string) || "";
const baseUrl = (req.headers["x-unifi-url"] as string) || "";
const devices = await listUnifiDevices(baseUrl, token);
const catalog = await db.select().from(catalogTable);
const enriched = enrichDevicesWithCatalog(devices, catalog);
return reply.send(enriched);
});
app.get<{ Querystring: { generation?: string; approved_sovereign?: string } }>("/unifi/product-catalog", async (req, reply) => {
const gen = (req.query as { generation?: string }).generation;
const approved = (req.query as { approved_sovereign?: string }).approved_sovereign;
const conditions = [
...(gen ? [eq(catalogTable.generation, gen)] : []),
...(approved === "true" ? [eq(catalogTable.approvedSovereignDefault, true)] : []),
];
const list = conditions.length
? await db.select().from(catalogTable).where(and(...conditions))
: await db.select().from(catalogTable);
return reply.send(list);
});
app.get<{ Params: { sku: string } }>("/unifi/product-catalog/:sku", async (req, reply) => {
const [row] = await db.select().from(catalogTable).where(eq(catalogTable.sku, req.params.sku));
if (!row) return reply.notFound();
return reply.send(row);
});
app.get<{ Params: { siteId: string } }>("/proxmox/sites/:siteId/nodes", async (req, reply) => {
const token = (req.headers["x-proxmox-token"] as string) || "";
const baseUrl = (req.headers["x-proxmox-url"] as string) || "";
const nodes = await listProxmoxNodes(baseUrl, token);
return reply.send(nodes);
});
app.get("/mappings", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(mappingsTable).where(eq(mappingsTable.orgId, orgId));
return reply.send(list);
});
app.post<{ Body: { assetId?: string; siteId?: string; provider: string; externalId: string } }>("/mappings", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db.insert(mappingsTable).values({ orgId, assetId: req.body.assetId ?? null, siteId: req.body.siteId ?? null, provider: req.body.provider, externalId: req.body.externalId }).returning();
return reply.code(201).send(inserted);
});
}

View File

@@ -0,0 +1,29 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { maintenances as maintenancesTable } from "@sankofa/schema";
export async function maintenancesRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(maintenancesTable).where(eq(maintenancesTable.orgId, orgId));
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [row] = await db.select().from(maintenancesTable).where(and(eq(maintenancesTable.id, req.params.id), eq(maintenancesTable.orgId, orgId)));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{ Body: { assetId: string; type: string; vendorTicketRef?: string; description?: string } }>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db.insert(maintenancesTable).values({ orgId, assetId: req.body.assetId, type: req.body.type, vendorTicketRef: req.body.vendorTicketRef ?? null, description: req.body.description ?? null }).returning();
return reply.code(201).send(inserted);
});
app.patch<{ Params: { id: string }; Body: { status?: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db.update(maintenancesTable).set({ ...req.body, updatedAt: new Date() }).where(and(eq(maintenancesTable.id, req.params.id), eq(maintenancesTable.orgId, orgId))).returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
}

View File

@@ -0,0 +1,57 @@
import type { FastifyInstance } from "fastify";
import { eq, and, sql } from "drizzle-orm";
import { offers as offersTable } from "@sankofa/schema";
export async function offersRoutes(app: FastifyInstance) {
const db = app.db;
const listSchema = { querystring: { type: "object", properties: { limit: { type: "integer" }, offset: { type: "integer" } } } };
app.get("/", { schema: listSchema }, async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const limit = Math.min(Number((req.query as { limit?: number }).limit) || 50, 100);
const offset = Number((req.query as { offset?: number }).offset) || 0;
const conditions = [eq(offersTable.orgId, orgId)] as ReturnType<typeof eq>[];
if (vid) conditions.push(eq(offersTable.vendorId, vid));
const list = await db.select().from(offersTable).where(and(...conditions)).limit(limit).offset(offset);
const [{ total }] = await db.select({ total: sql<number>`count(*)::int` }).from(offersTable).where(and(...conditions));
return reply.send({ data: list, total });
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const conditions = [eq(offersTable.id, req.params.id), eq(offersTable.orgId, orgId)] as ReturnType<typeof eq>[];
if (vid) conditions.push(eq(offersTable.vendorId, vid));
const [row] = await db.select().from(offersTable).where(and(...conditions));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{ Body: { vendorId?: string; sku?: string; mpn?: string; quantity: number; unitPrice: string; incoterms?: string; leadTimeDays?: number; countryOfOrigin?: string; condition?: string; warranty?: string; evidenceRefs?: { key: string; hash?: string }[] } }>("/", async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const vendorId = vid ?? req.body.vendorId ?? null;
if (!vendorId) throw app.httpErrors.badRequest("vendorId required (or login as vendor user)");
const [inserted] = await db.insert(offersTable).values({
orgId, vendorId, sku: req.body.sku ?? null, mpn: req.body.mpn ?? null, quantity: req.body.quantity, unitPrice: req.body.unitPrice,
incoterms: req.body.incoterms ?? null, leadTimeDays: req.body.leadTimeDays ?? null, countryOfOrigin: req.body.countryOfOrigin ?? null, condition: req.body.condition ?? null, warranty: req.body.warranty ?? null, evidenceRefs: req.body.evidenceRefs ?? null,
}).returning();
return reply.code(201).send(inserted);
});
app.patch<{ Params: { id: string }; Body: Partial<{ sku: string; mpn: string; quantity: number; unitPrice: string; status: string; evidenceRefs: { key: string; hash?: string }[] }> }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const conditions = [eq(offersTable.id, req.params.id), eq(offersTable.orgId, orgId)] as ReturnType<typeof eq>[];
if (vid) conditions.push(eq(offersTable.vendorId, vid));
const [updated] = await db.update(offersTable).set({ ...req.body, updatedAt: new Date() }).where(and(...conditions)).returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const conditions = [eq(offersTable.id, req.params.id), eq(offersTable.orgId, orgId)] as ReturnType<typeof eq>[];
if (vid) conditions.push(eq(offersTable.vendorId, vid));
const [deleted] = await db.delete(offersTable).where(and(...conditions)).returning({ id: offersTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,74 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { purchaseOrders as poTable } from "@sankofa/schema";
export async function purchaseOrdersRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const conditions = [eq(poTable.orgId, orgId)] as ReturnType<typeof eq>[];
if (vid) conditions.push(eq(poTable.vendorId, vid));
const list = await db.select().from(poTable).where(and(...conditions));
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const conditions = [eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)] as ReturnType<typeof eq>[];
if (vid) conditions.push(eq(poTable.vendorId, vid));
const [row] = await db.select().from(poTable).where(and(...conditions));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{
Body: {
vendorId: string;
lineItems: { offerId?: string; sku?: string; quantity: number; unitPrice: string }[];
escrowTerms?: string;
inspectionSiteId?: string;
deliverySiteId?: string;
};
}>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db
.insert(poTable)
.values({
orgId,
vendorId: req.body.vendorId,
lineItems: req.body.lineItems,
escrowTerms: req.body.escrowTerms ?? null,
inspectionSiteId: req.body.inspectionSiteId ?? null,
deliverySiteId: req.body.deliverySiteId ?? null,
})
.returning();
return reply.code(201).send(inserted);
});
app.patch<{
Params: { id: string };
Body: Partial<{ status: string; approvalStage: string; escrowTerms: string }>;
}>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db
.update(poTable)
.set({ ...req.body, updatedAt: new Date() })
.where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)))
.returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [deleted] = await db
.delete(poTable)
.where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)))
.returning({ id: poTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,46 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { assets as assetsTable, integrationMappings as mappingsTable, unifiProductCatalog as catalogTable } from "@sankofa/schema";
export async function reportsRoutes(app: FastifyInstance) {
const db = app.db;
app.get<{ Querystring: { org_id?: string; site_id?: string } }>("/bom", async (req, reply) => {
const orgId = (req.query as { org_id?: string }).org_id ?? app.orgId(req);
const siteId = (req.query as { site_id?: string }).site_id;
const assetList = siteId
? await db.select().from(assetsTable).where(and(eq(assetsTable.orgId, orgId), eq(assetsTable.siteId, siteId)))
: await db.select().from(assetsTable).where(eq(assetsTable.orgId, orgId));
const mappings = await db.select().from(mappingsTable).where(eq(mappingsTable.orgId, orgId));
const catalog = await db.select().from(catalogTable);
const items = assetList.map((a) => {
const mapping = mappings.find((m) => m.assetId === a.id && m.provider === "unifi");
const catalogEntry = mapping ? catalog.find((c) => c.sku === mapping.externalId || c.modelName === mapping.externalId) : null;
return {
assetId: a.assetId,
category: a.category,
siteId: a.siteId,
catalogSku: catalogEntry?.sku,
generation: catalogEntry?.generation,
supportHorizon: catalogEntry?.supportHorizon,
};
});
return reply.send({ orgId, siteId: siteId ?? null, items });
});
app.get<{ Querystring: { org_id?: string; horizon_months?: string } }>("/support-risk", async (req, reply) => {
const orgId = (req.query as { org_id?: string }).org_id ?? app.orgId(req);
const horizonMonths = Math.min(24, Math.max(1, parseInt((req.query as { horizon_months?: string }).horizon_months ?? "12", 10) || 12));
const catalog = await db.select().from(catalogTable);
const mappings = await db.select().from(mappingsTable).where(and(eq(mappingsTable.orgId, orgId), eq(mappingsTable.provider, "unifi")));
const cutoff = new Date();
cutoff.setMonth(cutoff.getMonth() + horizonMonths);
const atRisk = catalog.filter((c) => {
if (!c.eolDate) return false;
const eol = new Date(c.eolDate);
return eol <= cutoff;
});
const bySku = atRisk.map((c) => ({ sku: c.sku, modelName: c.modelName, generation: c.generation, eolDate: c.eolDate, supportHorizon: c.supportHorizon }));
return reply.send({ orgId, horizonMonths, atRisk: bySku, deviceCount: mappings.length });
});
}

View File

@@ -0,0 +1,43 @@
import type { FastifyInstance } from "fastify";
import { eq } from "drizzle-orm";
import { roles as rolesTable } from "@sankofa/schema";
export async function rolesRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (_req, reply) => {
const list = await db.select().from(rolesTable);
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const [row] = await db.select().from(rolesTable).where(eq(rolesTable.id, req.params.id));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{ Body: { name: string; description?: string; permissions?: string[] } }>(
"/",
{ schema: { body: { type: "object", required: ["name"], properties: { name: { type: "string" }, description: { type: "string" }, permissions: { type: "array", items: { type: "string" } } } } } },
async (req, reply) => {
const [inserted] = await db.insert(rolesTable).values({
name: req.body.name,
description: req.body.description ?? null,
permissions: req.body.permissions ?? [],
}).returning();
return reply.code(201).send(inserted);
}
);
app.patch<{ Params: { id: string }; Body: Partial<{ name: string; description: string; permissions: string[] }> }>("/:id", async (req, reply) => {
const [updated] = await db.update(rolesTable).set({ ...req.body, updatedAt: new Date() }).where(eq(rolesTable.id, req.params.id)).returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const [deleted] = await db.delete(rolesTable).where(eq(rolesTable.id, req.params.id)).returning({ id: rolesTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,39 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { shipments as shipmentsTable, assets as assetsTable } from "@sankofa/schema";
export async function shipmentsRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(shipmentsTable).where(eq(shipmentsTable.orgId, orgId));
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [row] = await db.select().from(shipmentsTable).where(and(eq(shipmentsTable.id, req.params.id), eq(shipmentsTable.orgId, orgId)));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{ Body: { purchaseOrderId: string; tracking?: string } }>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db.insert(shipmentsTable).values({ orgId, purchaseOrderId: req.body.purchaseOrderId, tracking: req.body.tracking ?? null }).returning();
return reply.code(201).send(inserted);
});
app.patch<{ Params: { id: string }; Body: { tracking?: string; status?: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db.update(shipmentsTable).set({ ...req.body, updatedAt: new Date() }).where(and(eq(shipmentsTable.id, req.params.id), eq(shipmentsTable.orgId, orgId))).returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.post<{ Params: { id: string }; Body: { assetIds: string[] } }>("/:id/receive", async (req, reply) => {
const orgId = app.orgId(req);
const [shipment] = await db.select().from(shipmentsTable).where(and(eq(shipmentsTable.id, req.params.id), eq(shipmentsTable.orgId, orgId)));
if (!shipment) return reply.notFound();
for (const assetId of req.body.assetIds || []) {
await db.update(assetsTable).set({ status: "received", updatedAt: new Date() }).where(and(eq(assetsTable.id, assetId), eq(assetsTable.orgId, orgId)));
}
await db.update(shipmentsTable).set({ status: "received", updatedAt: new Date() }).where(and(eq(shipmentsTable.id, req.params.id), eq(shipmentsTable.orgId, orgId)));
return reply.send({ status: "received" });
});
}

View File

@@ -0,0 +1,68 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { sites as sitesTable, racks as racksTable, positions as positionsTable, rows as rowsTable, rooms as roomsTable } from "@sankofa/schema";
export async function sitesRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(sitesTable).where(eq(sitesTable.orgId, orgId));
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [row] = await db.select().from(sitesTable).where(and(eq(sitesTable.id, req.params.id), eq(sitesTable.orgId, orgId)));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{ Body: { name: string; regionId?: string; address?: string; networkMetadata?: unknown } }>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db.insert(sitesTable).values({
orgId, name: req.body.name, regionId: req.body.regionId ?? null, address: req.body.address ?? null, networkMetadata: req.body.networkMetadata ?? null,
}).returning();
return reply.code(201).send(inserted);
});
app.patch<{ Params: { id: string }; Body: Partial<{ name: string; address: string; networkMetadata: unknown }> }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db.update(sitesTable).set({
name: req.body.name,
address: req.body.address,
networkMetadata: req.body.networkMetadata as { uplinks?: string[]; vlans?: string[]; portProfiles?: string[]; ipRanges?: string[] } | undefined,
updatedAt: new Date(),
}).where(and(eq(sitesTable.id, req.params.id), eq(sitesTable.orgId, orgId))).returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [deleted] = await db.delete(sitesTable).where(and(eq(sitesTable.id, req.params.id), eq(sitesTable.orgId, orgId))).returning({ id: sitesTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
app.get<{ Params: { siteId: string } }>("/:siteId/racks", async (req, reply) => {
const orgId = app.orgId(req);
const [site] = await db.select().from(sitesTable).where(and(eq(sitesTable.id, req.params.siteId), eq(sitesTable.orgId, orgId)));
if (!site) return reply.notFound();
const roomsList = await db.select().from(roomsTable).where(eq(roomsTable.siteId, req.params.siteId));
const rowsList: { id: string }[] = [];
for (const r of roomsList) {
const rRows = await db.select({ id: rowsTable.id }).from(rowsTable).where(eq(rowsTable.roomId, r.id));
rowsList.push(...rRows);
}
const rackList: unknown[] = [];
for (const row of rowsList) {
const rRacks = await db.select().from(racksTable).where(eq(racksTable.rowId, row.id));
for (const rack of rRacks) {
const posList = await db.select().from(positionsTable).where(eq(positionsTable.rackId, rack.id));
rackList.push({ ...rack, positions: posList });
}
}
return reply.send(rackList);
});
}

View File

@@ -0,0 +1,59 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { unifiControllers as controllersTable } from "@sankofa/schema";
export async function unifiControllersRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", async (req, reply) => {
const orgId = app.orgId(req);
const list = await db.select().from(controllersTable).where(eq(controllersTable.orgId, orgId));
return reply.send(list);
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [row] = await db
.select()
.from(controllersTable)
.where(and(eq(controllersTable.id, req.params.id), eq(controllersTable.orgId, orgId)));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{ Body: { siteId?: string; baseUrl: string; role: string; region?: string } }>("/", async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db
.insert(controllersTable)
.values({
orgId,
siteId: req.body.siteId ?? null,
baseUrl: req.body.baseUrl,
role: req.body.role,
region: req.body.region ?? null,
})
.returning();
return reply.code(201).send(inserted);
});
app.patch<{ Params: { id: string }; Body: Partial<{ baseUrl: string; role: string; region: string }> }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db
.update(controllersTable)
.set({ ...req.body, updatedAt: new Date() })
.where(and(eq(controllersTable.id, req.params.id), eq(controllersTable.orgId, orgId)))
.returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [deleted] = await db
.delete(controllersTable)
.where(and(eq(controllersTable.id, req.params.id), eq(controllersTable.orgId, orgId)))
.returning({ id: controllersTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,18 @@
import type { FastifyInstance } from "fastify";
import { uploadDocument } from "../../storage";
import { randomUUID } from "crypto";
export async function uploadRoutes(app: FastifyInstance) {
app.post<{ Querystring: { prefix?: string } }>("/", async (req, reply) => {
const data = await req.file();
if (!data) return reply.badRequest("No file");
const buf = await data.toBuffer();
const prefix = (req.query as { prefix?: string }).prefix ?? "documents";
const key = `${prefix}/${randomUUID()}/${data.filename}`;
const result = await uploadDocument(key, buf, data.mimetype || "application/octet-stream", {
originalName: data.filename,
orgId: app.orgId(req),
});
return reply.send({ key: result.key, bucket: result.bucket, etag: result.etag });
});
}

View File

@@ -0,0 +1,77 @@
import type { FastifyInstance } from "fastify";
import { eq, and, sql } from "drizzle-orm";
import { users as usersTable, userRoles, roles as rolesTable } from "@sankofa/schema";
export async function usersRoutes(app: FastifyInstance) {
const db = app.db;
const listSchema = { querystring: { type: "object", properties: { limit: { type: "integer" }, offset: { type: "integer" } } } };
app.get("/", { schema: listSchema }, async (req, reply) => {
const orgId = app.orgId(req);
const limit = Math.min(Number((req.query as { limit?: number }).limit) || 50, 100);
const offset = Number((req.query as { offset?: number }).offset) || 0;
const list = await db.select().from(usersTable).where(eq(usersTable.orgId, orgId)).limit(limit).offset(offset);
const [{ total }] = await db.select({ total: sql<number>`count(*)::int` }).from(usersTable).where(eq(usersTable.orgId, orgId));
return reply.send({ data: list, total });
});
app.get<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [row] = await db.select().from(usersTable).where(and(eq(usersTable.id, req.params.id), eq(usersTable.orgId, orgId)));
if (!row) return reply.notFound();
const ur = await db.select({ roleId: userRoles.roleId, roleName: rolesTable.name }).from(userRoles).innerJoin(rolesTable, eq(userRoles.roleId, rolesTable.id)).where(eq(userRoles.userId, row.id));
return reply.send({ ...row, roleIds: ur.map((r) => r.roleId), roleNames: ur.map((r) => r.roleName) });
});
app.post<{
Body: { email: string; name?: string; orgUnitId?: string; vendorId?: string };
}>(
"/",
{
schema: {
body: { type: "object", required: ["email"], properties: { email: { type: "string" }, name: { type: "string" }, orgUnitId: { type: "string" }, vendorId: { type: "string" } } },
},
},
async (req, reply) => {
const orgId = app.orgId(req);
const [inserted] = await db.insert(usersTable).values({
orgId,
email: req.body.email,
name: req.body.name ?? null,
orgUnitId: req.body.orgUnitId ?? null,
vendorId: req.body.vendorId ?? null,
}).returning();
return reply.code(201).send(inserted);
}
);
app.patch<{ Params: { id: string }; Body: Partial<{ name: string; orgUnitId: string; vendorId: string }> }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [updated] = await db.update(usersTable).set({ ...req.body, updatedAt: new Date() }).where(and(eq(usersTable.id, req.params.id), eq(usersTable.orgId, orgId))).returning();
if (!updated) return reply.notFound();
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", async (req, reply) => {
const orgId = app.orgId(req);
const [deleted] = await db.delete(usersTable).where(and(eq(usersTable.id, req.params.id), eq(usersTable.orgId, orgId))).returning({ id: usersTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
app.post<{ Params: { id: string }; Body: { roleId: string } }>("/:id/roles", async (req, reply) => {
const orgId = app.orgId(req);
const [user] = await db.select().from(usersTable).where(and(eq(usersTable.id, req.params.id), eq(usersTable.orgId, orgId)));
if (!user) return reply.notFound();
await db.insert(userRoles).values({ userId: user.id, roleId: req.body.roleId }).onConflictDoNothing();
return reply.code(204).send();
});
app.delete<{ Params: { id: string }; Querystring: { roleId: string } }>("/:id/roles", async (req, reply) => {
const orgId = app.orgId(req);
const [user] = await db.select().from(usersTable).where(and(eq(usersTable.id, req.params.id), eq(usersTable.orgId, orgId)));
if (!user) return reply.notFound();
await db.delete(userRoles).where(and(eq(userRoles.userId, user.id), eq(userRoles.roleId, (req.query as { roleId: string }).roleId)));
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,26 @@
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { buildApp } from "../../index.js";
describe("vendors", () => {
let app: Awaited<ReturnType<typeof buildApp>>;
beforeAll(async () => {
app = await buildApp();
});
afterAll(async () => {
await app.close();
});
it("GET /api/v1/vendors without auth returns 401 or 500 when DB unavailable", async () => {
const res = await app.inject({
method: "GET",
url: "/api/v1/vendors",
headers: { "x-org-id": "default" },
});
expect([401, 500]).toContain(res.statusCode);
const body = JSON.parse(res.payload);
expect(body.error).toBeDefined();
if (res.statusCode === 401) expect(body.code).toBe("UNAUTHORIZED");
});
});

View File

@@ -0,0 +1,58 @@
import type { FastifyInstance } from "fastify";
import { eq, and, sql } from "drizzle-orm";
import { vendors as vendorsTable } from "@sankofa/schema";
import { getActorFromRequest, writeAudit } from "../../audit.js";
export async function vendorsRoutes(app: FastifyInstance) {
const db = app.db;
app.get("/", {
config: { permission: "vendors:read" },
schema: { querystring: { type: "object", properties: { limit: { type: "integer" }, offset: { type: "integer" } } } },
}, async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
const limit = Math.min(Number((req.query as { limit?: number }).limit) || 50, 100);
const offset = Number((req.query as { offset?: number }).offset) || 0;
if (vid) {
const [row] = await db.select().from(vendorsTable).where(and(eq(vendorsTable.id, vid), eq(vendorsTable.orgId, orgId)));
return reply.send({ data: row ? [row] : [], total: row ? 1 : 0 });
}
const list = await db.select().from(vendorsTable).where(eq(vendorsTable.orgId, orgId)).limit(limit).offset(offset);
const [{ total }] = await db.select({ total: sql<number>`count(*)::int` }).from(vendorsTable).where(eq(vendorsTable.orgId, orgId));
return reply.send({ data: list, total });
});
app.get<{ Params: { id: string } }>("/:id", { config: { permission: "vendors:read" } }, async (req, reply) => {
const orgId = app.orgId(req);
const vid = app.vendorId(req);
if (vid && req.params.id !== vid) throw app.httpErrors.forbidden("Vendor users may only access their own vendor");
const [row] = await db.select().from(vendorsTable).where(and(eq(vendorsTable.id, req.params.id), eq(vendorsTable.orgId, orgId)));
if (!row) return reply.notFound();
return reply.send(row);
});
app.post<{ Body: { legalName: string; contacts?: unknown; trustTier?: string } }>("/", {
config: { permission: "vendors:write" },
schema: { body: { type: "object", required: ["legalName"], properties: { legalName: { type: "string" }, contacts: {}, trustTier: { type: "string" } } } },
}, async (req, reply) => {
if (app.vendorId(req)) throw app.httpErrors.forbidden("Vendor users cannot create vendors");
const orgId = app.orgId(req);
const [inserted] = await db.insert(vendorsTable).values({ orgId, legalName: req.body.legalName, contacts: (req.body.contacts as { email?: string; phone?: string; name?: string }[] | null) ?? null, trustTier: req.body.trustTier ?? "unknown" }).returning();
return reply.code(201).send(inserted);
});
app.patch<{ Params: { id: string }; Body: Record<string, unknown> }>("/:id", { config: { permission: "vendors:write" } }, async (req, reply) => {
if (app.vendorId(req)) throw app.httpErrors.forbidden("Vendor users cannot update vendors");
const orgId = app.orgId(req);
const [before] = await db.select().from(vendorsTable).where(and(eq(vendorsTable.id, req.params.id), eq(vendorsTable.orgId, orgId)));
const [updated] = await db.update(vendorsTable).set({ ...req.body, updatedAt: new Date() }).where(and(eq(vendorsTable.id, req.params.id), eq(vendorsTable.orgId, orgId))).returning();
if (!updated) return reply.notFound();
const actor = getActorFromRequest(req);
await writeAudit(db, { orgId, ...actor, action: "vendor.update", resourceType: "vendor", resourceId: req.params.id, beforeState: before ? { ...before } : undefined, afterState: { ...updated } });
return reply.send(updated);
});
app.delete<{ Params: { id: string } }>("/:id", { config: { permission: "vendors:write" } }, async (req, reply) => {
if (app.vendorId(req)) throw app.httpErrors.forbidden("Vendor users cannot delete vendors");
const orgId = app.orgId(req);
const [deleted] = await db.delete(vendorsTable).where(and(eq(vendorsTable.id, req.params.id), eq(vendorsTable.orgId, orgId))).returning({ id: vendorsTable.id });
if (!deleted) return reply.notFound();
return reply.code(204).send();
});
}

View File

@@ -0,0 +1,56 @@
import type { FastifyInstance } from "fastify";
import { eq, and } from "drizzle-orm";
import { purchaseOrders as poTable, offers as offersTable, vendors as vendorsTable } from "@sankofa/schema";
import { nextPOStage, canTransitionPO, computeOfferRiskScore } from "@sankofa/workflow";
export async function workflowRoutes(app: FastifyInstance) {
const db = app.db;
app.post<{ Params: { id: string }; Body: { trustTier?: string; priceDeviation?: number; conditionAmbiguity?: boolean } }>("/offers/:id/risk-score", async (req, reply) => {
const orgId = app.orgId(req);
const [offer] = await db.select().from(offersTable).where(and(eq(offersTable.id, req.params.id), eq(offersTable.orgId, orgId)));
if (!offer) return reply.notFound();
const [vendor] = offer.vendorId ? await db.select().from(vendorsTable).where(eq(vendorsTable.id, offer.vendorId)) : [null];
const factors = { trustTier: req.body.trustTier ?? vendor?.trustTier ?? "unknown", priceDeviation: req.body.priceDeviation, conditionAmbiguity: req.body.conditionAmbiguity ?? !offer.condition };
const { score, factors: outFactors } = computeOfferRiskScore(factors);
await db.update(offersTable).set({ riskScore: String(score), riskFactors: outFactors as unknown as Record<string, unknown>, updatedAt: new Date() }).where(and(eq(offersTable.id, req.params.id), eq(offersTable.orgId, orgId)));
return reply.send({ score, factors: outFactors });
});
app.post<{ Params: { id: string } }>("/purchase-orders/:id/submit", async (req, reply) => {
const orgId = app.orgId(req);
const [po] = await db.select().from(poTable).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
if (!po) return reply.notFound();
if (po.status !== "draft") return reply.badRequest("PO not in draft");
await db.update(poTable).set({ status: "pending_approval", approvalStage: "requester", updatedAt: new Date() }).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
return reply.send({ status: "pending_approval", approvalStage: "requester" });
});
app.post<{ Params: { id: string } }>("/purchase-orders/:id/approve", async (req, reply) => {
const orgId = app.orgId(req);
const [po] = await db.select().from(poTable).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
if (!po) return reply.notFound();
if (po.status !== "pending_approval") return reply.badRequest("PO not pending approval");
const next = nextPOStage(po.approvalStage as "requester" | "procurement" | "finance" | "executive" | null);
if (next) {
await db.update(poTable).set({ approvalStage: next, updatedAt: new Date() }).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
return reply.send({ status: "pending_approval", approvalStage: next });
}
await db.update(poTable).set({ status: "approved", approvalStage: "executive", updatedAt: new Date() }).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
return reply.send({ status: "approved" });
});
app.post<{ Params: { id: string } }>("/purchase-orders/:id/reject", async (req, reply) => {
const orgId = app.orgId(req);
const [po] = await db.select().from(poTable).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
if (!po) return reply.notFound();
if (po.status !== "pending_approval") return reply.badRequest("PO not pending approval");
await db.update(poTable).set({ status: "rejected", updatedAt: new Date() }).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
return reply.send({ status: "rejected" });
});
app.patch<{ Params: { id: string }; Body: { status: string } }>("/purchase-orders/:id/status", async (req, reply) => {
const orgId = app.orgId(req);
const [po] = await db.select().from(poTable).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
if (!po) return reply.notFound();
const to = req.body.status as "draft" | "pending_approval" | "approved" | "rejected" | "ordered" | "received";
if (!canTransitionPO(po.status as "draft" | "pending_approval" | "approved" | "rejected" | "ordered" | "received", to)) return reply.badRequest("Invalid status transition");
await db.update(poTable).set({ status: to, updatedAt: new Date() }).where(and(eq(poTable.id, req.params.id), eq(poTable.orgId, orgId)));
return reply.send({ status: to });
});
}

View File

@@ -0,0 +1,14 @@
/** Standard error payload for API responses */
export interface ApiErrorPayload {
error: string;
code?: string;
details?: unknown;
}
export const errorCodes = {
BAD_REQUEST: "BAD_REQUEST",
UNAUTHORIZED: "UNAUTHORIZED",
FORBIDDEN: "FORBIDDEN",
NOT_FOUND: "NOT_FOUND",
CONFLICT: "CONFLICT",
} as const;

67
apps/api/src/storage.ts Normal file
View File

@@ -0,0 +1,67 @@
import {
S3Client,
PutObjectCommand,
GetObjectCommand,
HeadObjectCommand,
} from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
const endpoint = process.env.S3_ENDPOINT;
const region = process.env.S3_REGION || "us-east-1";
const bucket = process.env.S3_BUCKET || "sankofa-documents";
const forcePathStyle = Boolean(endpoint);
export const s3Client = new S3Client({
region,
...(endpoint
? {
endpoint,
forcePathStyle,
credentials: {
accessKeyId: process.env.S3_ACCESS_KEY || "minioadmin",
secretAccessKey: process.env.S3_SECRET_KEY || "minioadmin",
},
}
: {}),
});
export interface UploadResult {
key: string;
bucket: string;
etag?: string;
}
export async function uploadDocument(
key: string,
body: Buffer | Uint8Array,
contentType: string,
metadata?: Record<string, string>
): Promise<UploadResult> {
const command = new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: body,
ContentType: contentType,
Metadata: metadata,
});
const out = await s3Client.send(command);
return { key, bucket, etag: out.ETag };
}
export async function getDocumentKey(key: string): Promise<boolean> {
try {
await s3Client.send(
new HeadObjectCommand({ Bucket: bucket, Key: key })
);
return true;
} catch {
return false;
}
}
export async function getSignedDownloadUrl(key: string, expiresIn = 3600): Promise<string> {
const command = new GetObjectCommand({ Bucket: bucket, Key: key });
return getSignedUrl(s3Client, command, { expiresIn });
}
export { bucket as defaultBucket };

13
apps/api/tsconfig.json Normal file
View File

@@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "bundler",
"strict": true,
"outDir": "dist",
"rootDir": "src",
"skipLibCheck": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

12
apps/web/index.html Normal file
View File

@@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Sankofa HW Infra</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

25
apps/web/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "@sankofa/web",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"build": "tsc && vite build",
"dev": "vite",
"preview": "vite preview",
"lint": "eslint src --ext .ts,.tsx"
},
"dependencies": {
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-router-dom": "^7.0.0"
},
"devDependencies": {
"@types/react": "^18.3.0",
"@types/react-dom": "^18.3.0",
"@vitejs/plugin-react": "^4.3.4",
"eslint": "^9.15.0",
"typescript": "^5.7.0",
"vite": "^6.0.0"
}
}

33
apps/web/src/App.tsx Normal file
View File

@@ -0,0 +1,33 @@
import { BrowserRouter, Routes, Route, Navigate } from "react-router-dom";
import { AuthProvider } from "./contexts/AuthContext";
import { Layout } from "./components/Layout";
import { Dashboard } from "./pages/Dashboard";
import { Login } from "./pages/Login";
import { Vendors } from "./pages/Vendors";
import { Offers } from "./pages/Offers";
import { PurchaseOrders } from "./pages/PurchaseOrders";
import { Assets } from "./pages/Assets";
import { Sites } from "./pages/Sites";
import { Capacity } from "./pages/Capacity";
export default function App() {
return (
<AuthProvider>
<BrowserRouter>
<Routes>
<Route path="/login" element={<Login />} />
<Route path="/" element={<Layout />}>
<Route index element={<Dashboard />} />
<Route path="vendors" element={<Vendors />} />
<Route path="offers" element={<Offers />} />
<Route path="purchase-orders" element={<PurchaseOrders />} />
<Route path="assets" element={<Assets />} />
<Route path="sites" element={<Sites />} />
<Route path="capacity" element={<Capacity />} />
<Route path="*" element={<Navigate to="/" replace />} />
</Route>
</Routes>
</BrowserRouter>
</AuthProvider>
);
}

View File

@@ -0,0 +1,58 @@
const API_BASE = (import.meta as unknown as { env: { VITE_API_URL?: string } }).env?.VITE_API_URL ?? "";
export interface ApiErrorPayload {
error: string;
code?: string;
details?: unknown;
}
function getToken(): string | null {
return localStorage.getItem("sankofa_token");
}
function getOrgId(): string {
return localStorage.getItem("sankofa_org_id") ?? "default";
}
export async function api<T>(
path: string,
options: RequestInit & { params?: Record<string, string> } = {}
): Promise<T> {
const { params, ...init } = options;
const url = params ? `${API_BASE}${path}?${new URLSearchParams(params)}` : `${API_BASE}${path}`;
const token = getToken();
const orgId = getOrgId();
const headers: HeadersInit = {
"Content-Type": "application/json",
"x-org-id": orgId,
...(init.headers as Record<string, string>),
};
if (token) (headers as Record<string, string>)["Authorization"] = `Bearer ${token}`;
const res = await fetch(url, { ...init, headers });
if (res.status === 401) {
localStorage.removeItem("sankofa_token");
window.dispatchEvent(new CustomEvent("auth:401"));
const body = (await res.json().catch(() => ({}))) as ApiErrorPayload;
throw new Error(body.error ?? "Unauthorized");
}
if (!res.ok) {
const body = (await res.json().catch(() => ({}))) as ApiErrorPayload;
throw new Error(body.error ?? "Request failed: " + res.status);
}
if (res.status === 204) return undefined as T;
return res.json() as Promise<T>;
}
export function setAuth(token: string, orgId?: string) {
localStorage.setItem("sankofa_token", token);
if (orgId != null) localStorage.setItem("sankofa_org_id", orgId);
}
export function clearAuth() {
localStorage.removeItem("sankofa_token");
localStorage.removeItem("sankofa_org_id");
}
export function isAuthenticated(): boolean {
return !!getToken();
}

View File

@@ -0,0 +1,40 @@
import { Link, Outlet, useNavigate } from "react-router-dom";
import { useAuth } from "../contexts/AuthContext";
const nav = [
{ to: "/", label: "Dashboard" },
{ to: "/vendors", label: "Vendors" },
{ to: "/offers", label: "Offers" },
{ to: "/purchase-orders", label: "Purchase orders" },
{ to: "/assets", label: "Assets" },
{ to: "/sites", label: "Sites" },
{ to: "/capacity", label: "Capacity" },
];
export function Layout() {
const { user, logout } = useAuth();
const navigate = useNavigate();
return (
<div style={{ minHeight: "100vh", display: "flex", flexDirection: "column" }}>
<header style={{ padding: "1rem 1.5rem", background: "#1a1a1a", color: "#fff", display: "flex", alignItems: "center", gap: "1.5rem" }}>
<h1 style={{ margin: 0, fontSize: "1.25rem" }}>Sankofa HW Infra</h1>
<nav style={{ display: "flex", gap: "1rem" }}>
{nav.map(({ to, label }) => (
<Link key={to} to={to} style={{ color: "#fff", textDecoration: "none" }}>{label}</Link>
))}
</nav>
{user ? (
<span style={{ marginLeft: "auto" }}>
{user.email}
<button type="button" onClick={() => { logout(); navigate("/login"); }} style={{ marginLeft: 8 }}>Sign out</button>
</span>
) : (
<Link to="/login" style={{ marginLeft: "auto", color: "#fff" }}>Sign in</Link>
)}
</header>
<main style={{ flex: 1, padding: "1.5rem" }}>
<Outlet />
</main>
</div>
);
}

View File

@@ -0,0 +1,70 @@
import { createContext, useContext, useCallback, useState, useEffect, type ReactNode } from "react";
import { api, setAuth as apiSetAuth, clearAuth, isAuthenticated } from "../api/client";
interface User {
id: string;
email: string;
name: string | null;
roles: string[];
vendorId: string | null;
}
interface AuthState {
user: User | null;
loading: boolean;
login: (email: string, password?: string) => Promise<void>;
logout: () => void;
setOrgId: (orgId: string) => void;
}
const AuthContext = createContext<AuthState | null>(null);
export function AuthProvider({ children }: { children: ReactNode }) {
const [user, setUser] = useState<User | null>(null);
const [loading, setLoading] = useState(true);
const login = useCallback(async (email: string, _password?: string) => {
const res = await api<{ token: string; user: User }>("/api/v1/auth/token", {
method: "POST",
body: JSON.stringify({ email }),
});
apiSetAuth(res.token, "default");
setUser(res.user);
}, []);
const logout = useCallback(() => {
clearAuth();
setUser(null);
}, []);
const setOrgId = useCallback((orgId: string) => {
localStorage.setItem("sankofa_org_id", orgId);
}, []);
useEffect(() => {
if (!isAuthenticated()) {
setLoading(false);
return;
}
setUser(null);
setLoading(false);
}, []);
useEffect(() => {
const on401 = () => setUser(null);
window.addEventListener("auth:401", on401);
return () => window.removeEventListener("auth:401", on401);
}, []);
return (
<AuthContext.Provider value={{ user, loading, login, logout, setOrgId }}>
{children}
</AuthContext.Provider>
);
}
export function useAuth(): AuthState {
const ctx = useContext(AuthContext);
if (!ctx) throw new Error("useAuth must be used within AuthProvider");
return ctx;
}

3
apps/web/src/index.css Normal file
View File

@@ -0,0 +1,3 @@
:root { font-family: system-ui,sans-serif; line-height: 1.5; color: #1a1a1a; background: #f8f9fa; }
* { box-sizing: border-box; }
body { margin: 0; }

10
apps/web/src/main.tsx Normal file
View File

@@ -0,0 +1,10 @@
import React from "react";
import ReactDOM from "react-dom/client";
import App from "./App";
import "./index.css";
ReactDOM.createRoot(document.getElementById("root")!).render(
<React.StrictMode>
<App />
</React.StrictMode>
);

View File

@@ -0,0 +1,30 @@
import { useEffect, useState } from "react";
import { api } from "../api/client";
type Asset = { id: string; hostname?: string; status: string };
export function Assets() {
const [list, setList] = useState<Asset[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState("");
useEffect(() => {
api<Asset[]>("/api/v1/assets")
.then(setList)
.catch((e) => setError(e.message))
.finally(() => setLoading(false));
}, []);
if (loading) return <p>Loading...</p>;
if (error) return <p style={{ color: "#c00" }}>{error}</p>;
return (
<div>
<h2>Assets</h2>
<ul>
{list.map((a) => (
<li key={a.id}>{a.hostname ?? a.id} - {a.status}</li>
))}
</ul>
</div>
);
}

View File

@@ -0,0 +1,140 @@
import { useEffect, useState } from "react";
import { api } from "../api/client";
type Site = { id: string; name: string };
type SiteCapacity = { siteId: string; usedRu: number; totalRu: number; utilizationPercent: number };
type SitePower = { siteId: string; circuitLimitWatts: number; measuredDrawWatts: number | null; headroomWatts: number | null };
type GpuInventory = { total: number; bySite: Record<string, number>; byType: Record<string, number> };
export function Capacity() {
const [sites, setSites] = useState<Site[]>([]);
const [capacityBySite, setCapacityBySite] = useState<SiteCapacity[]>([]);
const [powerBySite, setPowerBySite] = useState<SitePower[]>([]);
const [gpu, setGpu] = useState<GpuInventory | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState("");
useEffect(() => {
async function load() {
try {
const sitesRes = await api<Site[] | { data: Site[] }>("/api/v1/sites");
const list = Array.isArray(sitesRes) ? sitesRes : (sitesRes.data ?? []);
setSites(list);
const caps: SiteCapacity[] = [];
const pows: SitePower[] = [];
for (const s of list) {
try {
const [c, p] = await Promise.all([
api<SiteCapacity>(`/api/v1/capacity/sites/${s.id}`),
api<SitePower>(`/api/v1/capacity/sites/${s.id}/power`),
]);
caps.push(c);
pows.push(p);
} catch {
caps.push({ siteId: s.id, usedRu: 0, totalRu: 0, utilizationPercent: 0 });
pows.push({ siteId: s.id, circuitLimitWatts: 0, measuredDrawWatts: null, headroomWatts: null });
}
}
setCapacityBySite(caps);
setPowerBySite(pows);
const gpuRes = await api<GpuInventory>("/api/v1/capacity/gpu-inventory");
setGpu(gpuRes);
} catch (e) {
setError(e instanceof Error ? e.message : "Failed to load capacity");
} finally {
setLoading(false);
}
}
load();
}, []);
if (loading) return <p>Loading...</p>;
if (error) return <p style={{ color: "#c00" }}>{error}</p>;
return (
<div>
<h2>Capacity planning</h2>
<section style={{ marginTop: "1.5rem" }}>
<h3>RU utilization by site</h3>
<table style={{ borderCollapse: "collapse", width: "100%", maxWidth: 480 }}>
<thead>
<tr style={{ borderBottom: "1px solid #ccc" }}>
<th style={{ textAlign: "left", padding: "8px" }}>Site</th>
<th style={{ textAlign: "right", padding: "8px" }}>Used RU</th>
<th style={{ textAlign: "right", padding: "8px" }}>Total RU</th>
<th style={{ textAlign: "right", padding: "8px" }}>Utilization</th>
</tr>
</thead>
<tbody>
{sites.map((s) => {
const cap = capacityBySite.find((c) => c.siteId === s.id);
return (
<tr key={s.id} style={{ borderBottom: "1px solid #eee" }}>
<td style={{ padding: "8px" }}>{s.name}</td>
<td style={{ textAlign: "right", padding: "8px" }}>{cap?.usedRu ?? 0}</td>
<td style={{ textAlign: "right", padding: "8px" }}>{cap?.totalRu ?? 0}</td>
<td style={{ textAlign: "right", padding: "8px" }}>{cap?.utilizationPercent ?? 0}%</td>
</tr>
);
})}
</tbody>
</table>
</section>
<section style={{ marginTop: "1.5rem" }}>
<h3>Power headroom by site</h3>
<table style={{ borderCollapse: "collapse", width: "100%", maxWidth: 480 }}>
<thead>
<tr style={{ borderBottom: "1px solid #ccc" }}>
<th style={{ textAlign: "left", padding: "8px" }}>Site</th>
<th style={{ textAlign: "right", padding: "8px" }}>Circuit limit (W)</th>
<th style={{ textAlign: "right", padding: "8px" }}>Measured draw</th>
</tr>
</thead>
<tbody>
{sites.map((s) => {
const pow = powerBySite.find((p) => p.siteId === s.id);
return (
<tr key={s.id} style={{ borderBottom: "1px solid #eee" }}>
<td style={{ padding: "8px" }}>{s.name}</td>
<td style={{ textAlign: "right", padding: "8px" }}>{pow?.circuitLimitWatts ?? 0}</td>
<td style={{ textAlign: "right", padding: "8px" }}>{pow?.measuredDrawWatts != null ? pow.measuredDrawWatts + " W" : "—"}</td>
</tr>
);
})}
</tbody>
</table>
</section>
<section style={{ marginTop: "1.5rem" }}>
<h3>GPU inventory</h3>
{gpu && (
<>
<p><strong>Total:</strong> {gpu.total}</p>
<div style={{ display: "flex", gap: "2rem", flexWrap: "wrap" }}>
<div>
<h4 style={{ marginBottom: 8 }}>By site</h4>
<ul style={{ margin: 0, paddingLeft: "1.25rem" }}>
{Object.entries(gpu.bySite).map(([siteId, count]) => (
<li key={siteId}>{siteId}: {count}</li>
))}
{Object.keys(gpu.bySite).length === 0 && <li></li>}
</ul>
</div>
<div>
<h4 style={{ marginBottom: 8 }}>By type</h4>
<ul style={{ margin: 0, paddingLeft: "1.25rem" }}>
{Object.entries(gpu.byType).map(([type, count]) => (
<li key={type}>{type}: {count}</li>
))}
{Object.keys(gpu.byType).length === 0 && <li></li>}
</ul>
</div>
</div>
</>
)}
</section>
</div>
);
}

View File

@@ -0,0 +1,8 @@
export function Dashboard() {
return (
<div>
<h2>Control Plane</h2>
<p>Inventory, procurement, sites, and operations.</p>
</div>
);
}

View File

@@ -0,0 +1,42 @@
import { useState } from "react";
import { useNavigate } from "react-router-dom";
import { useAuth } from "../contexts/AuthContext";
export function Login() {
const [email, setEmail] = useState("");
const [error, setError] = useState("");
const { login } = useAuth();
const navigate = useNavigate();
async function handleSubmit(e: React.FormEvent) {
e.preventDefault();
setError("");
try {
await login(email);
navigate("/", { replace: true });
} catch (err) {
setError(err instanceof Error ? err.message : "Login failed");
}
}
return (
<div style={{ maxWidth: 360, margin: "2rem auto", padding: "1.5rem" }}>
<h2>Sign in</h2>
<form onSubmit={handleSubmit}>
<div style={{ marginBottom: "1rem" }}>
<label htmlFor="email" style={{ display: "block", marginBottom: 4 }}>Email</label>
<input
id="email"
type="email"
value={email}
onChange={(e) => setEmail(e.target.value)}
required
style={{ width: "100%", padding: 8 }}
/>
</div>
{error && <p style={{ color: "#c00", marginBottom: "1rem" }}>{error}</p>}
<button type="submit" style={{ padding: "8px 16px" }}>Sign in</button>
</form>
</div>
);
}

View File

@@ -0,0 +1,27 @@
import { useEffect, useState } from "react";
import { api } from "../api/client";
type Offer = { id: string; vendorId: string; quantity: number; unitPrice: string; status: string };
export function Offers() {
const [list, setList] = useState<Offer[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState("");
useEffect(() => {
api<{ data: Offer[]; total: number }>("/api/v1/offers").then((r) => setList(r.data)).catch((e) => setError(e.message)).finally(() => setLoading(false));
}, []);
if (loading) return <p>Loading...</p>;
if (error) return <p style={{ color: "#c00" }}>{error}</p>;
return (
<div>
<h2>Offers</h2>
<ul>
{list.map((o) => (
<li key={o.id}>Qty {o.quantity} at {o.unitPrice} - {o.status}</li>
))}
</ul>
</div>
);
}

View File

@@ -0,0 +1,30 @@
import { useEffect, useState } from "react";
import { api } from "../api/client";
type PO = { id: string; vendorId: string; status: string };
export function PurchaseOrders() {
const [list, setList] = useState<PO[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState("");
useEffect(() => {
api<PO[]>("/api/v1/purchase-orders")
.then(setList)
.catch((e) => setError(e.message))
.finally(() => setLoading(false));
}, []);
if (loading) return <p>Loading...</p>;
if (error) return <p style={{ color: "#c00" }}>{error}</p>;
return (
<div>
<h2>Purchase orders</h2>
<ul>
{list.map((p) => (
<li key={p.id}>{p.status}</li>
))}
</ul>
</div>
);
}

View File

@@ -0,0 +1,30 @@
import { useEffect, useState } from "react";
import { api } from "../api/client";
type Site = { id: string; name: string; regionId?: string };
export function Sites() {
const [list, setList] = useState<Site[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState("");
useEffect(() => {
api<Site[]>("/api/v1/sites")
.then(setList)
.catch((e) => setError(e.message))
.finally(() => setLoading(false));
}, []);
if (loading) return <p>Loading...</p>;
if (error) return <p style={{ color: "#c00" }}>{error}</p>;
return (
<div>
<h2>Sites</h2>
<ul>
{list.map((s) => (
<li key={s.id}>{s.name}</li>
))}
</ul>
</div>
);
}

View File

@@ -0,0 +1,27 @@
import { useEffect, useState } from "react";
import { api } from "../api/client";
type Vendor = { id: string; legalName: string; trustTier: string };
export function Vendors() {
const [list, setList] = useState<Vendor[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState("");
useEffect(() => {
api<{ data: Vendor[]; total: number }>("/api/v1/vendors").then((r) => setList(r.data)).catch((e) => setError(e.message)).finally(() => setLoading(false));
}, []);
if (loading) return <p>Loading...</p>;
if (error) return <p style={{ color: "#c00" }}>{error}</p>;
return (
<div>
<h2>Vendors</h2>
<ul>
{list.map((v) => (
<li key={v.id}>{v.legalName} ({v.trustTier})</li>
))}
</ul>
</div>
);
}

17
apps/web/tsconfig.json Normal file
View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"useDefineForClassFields": true,
"lib": ["ES2022", "DOM", "DOM.Iterable"],
"module": "ESNext",
"moduleResolution": "bundler",
"strict": true,
"jsx": "react-jsx",
"noEmit": true,
"skipLibCheck": true,
"baseUrl": ".",
"paths": { "@/*": ["src/*"] }
},
"include": ["src"],
"references": [{ "path": "./tsconfig.node.json" }]
}

View File

@@ -0,0 +1,10 @@
{
"compilerOptions": {
"composite": true,
"skipLibCheck": true,
"module": "ESNext",
"moduleResolution": "bundler",
"strict": true
},
"include": ["vite.config.ts"]
}

14
apps/web/vite.config.ts Normal file
View File

@@ -0,0 +1,14 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
import path from "path";
export default defineConfig({
plugins: [react()],
resolve: {
alias: { "@": path.resolve(__dirname, "src") },
},
server: {
port: 3000,
proxy: { "/api": { target: "http://localhost:4000", changeOrigin: true } },
},
});

View File

@@ -0,0 +1,24 @@
{
"name": "@sankofa/workflow",
"version": "0.1.0",
"private": true,
"type": "module",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"scripts": {
"build": "tsc",
"dev": "tsx watch src/index.ts",
"test": "vitest run",
"lint": "eslint src --ext .ts"
},
"dependencies": {
"@sankofa/schema": "workspace:*"
},
"devDependencies": {
"@types/node": "^22.10.0",
"eslint": "^9.15.0",
"tsx": "^4.19.0",
"typescript": "^5.7.0",
"vitest": "^2.1.0"
}
}

View File

@@ -0,0 +1,12 @@
import { describe, it, expect } from "vitest";
import { canTransitionPO, computeOfferRiskScore } from "./index";
describe("workflow", () => {
it("allows draft to pending_approval", () => {
expect(canTransitionPO("draft", "pending_approval")).toBe(true);
});
it("risk score computed", () => {
const { score } = computeOfferRiskScore({ trustTier: "unknown" });
expect(score).toBeGreaterThanOrEqual(0);
});
});

View File

@@ -0,0 +1,39 @@
export const PO_STATUS = ["draft", "pending_approval", "approved", "rejected", "ordered", "received"] as const;
export type POStatus = (typeof PO_STATUS)[number];
export const APPROVAL_STAGES = ["requester", "procurement", "finance", "executive"] as const;
export type ApprovalStage = (typeof APPROVAL_STAGES)[number];
export function nextPOStage(current: ApprovalStage | null): ApprovalStage | null {
if (!current) return "requester";
const i = APPROVAL_STAGES.indexOf(current);
return i < APPROVAL_STAGES.length - 1 ? APPROVAL_STAGES[i + 1] : null;
}
export function canTransitionPO(from: POStatus, to: POStatus): boolean {
const allowed: Record<POStatus, POStatus[]> = {
draft: ["pending_approval", "rejected"],
pending_approval: ["approved", "rejected", "draft"],
approved: ["ordered"],
rejected: ["draft"],
ordered: ["received"],
received: [],
};
return allowed[from]?.includes(to) ?? false;
}
export interface RiskFactors {
trustTier: string;
priceDeviation?: number;
conditionAmbiguity?: boolean;
}
export function computeOfferRiskScore(factors: RiskFactors): { score: number; factors: RiskFactors } {
let score = 0;
if (factors.trustTier === "unknown") score += 30;
else if (factors.trustTier === "low") score += 20;
else if (factors.trustTier === "medium") score += 10;
if (factors.priceDeviation != null && factors.priceDeviation > 0.2) score += 25;
if (factors.conditionAmbiguity) score += 20;
return { score: Math.min(100, score), factors };
}

View File

@@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "bundler",
"strict": true,
"outDir": "dist",
"rootDir": "src",
"skipLibCheck": true,
"declaration": true,
"declarationMap": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -0,0 +1 @@
{"category":"gpu","name":"GPU inspection","steps":[{"id":"board_id","label":"Board ID","required":true}]}

View File

@@ -0,0 +1 @@
{"category":"memory","name":"Memory inspection","steps":[{"id":"memtest","label":"Memtest pass","required":true}]}

View File

@@ -0,0 +1,9 @@
{
"category": "nic",
"name": "NIC inspection",
"steps": [
{ "id": "pci_id", "label": "PCI IDs", "required": true },
{ "id": "firmware", "label": "Firmware version", "required": false },
{ "id": "link_test", "label": "Link test", "required": true }
]
}

View File

@@ -0,0 +1 @@
{"category":"server","name":"Server inspection","steps":[{"id":"chassis","label":"Chassis condition","required":true},{"id":"service_tag","label":"Service tag verified","required":true}]}

View File

@@ -0,0 +1,67 @@
[
{
"id": "ml110",
"category": "server",
"model": "HPE ProLiant ML110",
"role": "Core services / management / utility workloads",
"formFactor": "Tower / rack-convertible",
"status": "running",
"quantity": "TBD",
"notes": "Suitable for control-plane services, monitoring, identity, light virtualization"
},
{
"id": "r630",
"category": "server",
"model": "Dell PowerEdge R630",
"role": "General compute / virtualization / legacy workloads",
"formFactor": "1U rackmount",
"status": "running",
"quantity": "TBD",
"notes": "Ideal for Proxmox clusters, utility VMs, staging environments"
},
{
"id": "udm-pro",
"category": "network",
"model": "UniFi Dream Machine Pro",
"role": "Edge gateway, UniFi OS controller, firewall",
"status": "running",
"quantity": "TBD",
"notes": "Per-site edge control; candidate for per-sovereign controller domains"
},
{
"id": "unifi-xg",
"category": "network",
"model": "UniFi XG Switches",
"role": "High-throughput aggregation / core switching",
"status": "running",
"quantity": "TBD",
"notes": "10G/25G backbone for compute and storage traffic"
},
{
"id": "spectrum-modem",
"category": "network",
"model": "Spectrum Business Cable Modem",
"role": "Primary or secondary WAN connectivity",
"status": "installed",
"quantity": "TBD",
"notes": "Business-class internet; typically paired with UDM Pro"
},
{
"id": "apc-cabinet",
"category": "rack",
"model": "APC Equipment Cabinet",
"role": "Secure rack enclosure",
"status": "installed",
"quantity": "TBD",
"notes": "Houses compute, network, and power equipment"
},
{
"id": "apc-ups",
"category": "power",
"model": "APC UPS",
"role": "Power conditioning and battery backup",
"status": "installed",
"quantity": "TBD",
"notes": "Runtime and load to be captured per site for capacity planning"
}
]

17
docs/api-error-format.md Normal file
View File

@@ -0,0 +1,17 @@
# API error response format
All API errors use a consistent JSON body:
```json
{
"error": "Human-readable message",
"code": "UNAUTHORIZED",
"details": {}
}
```
- **error** (string): Message for clients and logs.
- **code** (string, optional): Machine-readable code. One of `BAD_REQUEST`, `UNAUTHORIZED`, `FORBIDDEN`, `NOT_FOUND`, `CONFLICT`, `INTERNAL_ERROR`.
- **details** (object, optional): Extra data (e.g. validation errors under `details` when `code` is `BAD_REQUEST`).
HTTP status matches the error (400, 401, 403, 404, 409, 500). The OpenAPI spec references the `ApiError` schema in `components.schemas`.

20
docs/architecture.md Normal file
View File

@@ -0,0 +1,20 @@
# Sankofa HW Infra — Architecture
## Component diagram
See the plan file for the Mermaid flowchart (Control Plane UI, API, Workflow Engine, PostgreSQL, S3, Integration Layer, IAM, Audit, Logging).
## Components
- **Control Plane UI**: React SPA; inventory, procurement, sites, approvals, audit.
- **API Layer**: REST `/api/v1`; CRUD for core entities; JWT + RBAC/ABAC; file upload to S3.
- **Workflow Engine**: Purchase approvals, inspection checklists (Phase 1+).
- **PostgreSQL**: Transactions, core entities, audit_events (append-only).
- **Object Storage (S3)**: Invoices, packing lists, inspection photos, serial dumps.
- **Integration Layer**: UniFi, Proxmox, Redfish connectors; credentials in Vault.
- **IAM**: Roles, permissions; ABAC attributes (site_id, project_id).
- **Audit Log**: Who/when/what, before/after; WORM retention.
## Sovereign cloud positioning
Sankofa Phoenix operates as a **sovereign cloud services provider**. Multi-tenant isolation is per sovereign (org); UniFi, Proxmox, and hardware inventory form **one source of truth** for determinism and compliance. UniFi telemetry (with product intelligence), rack/power metadata, and Proxmox workloads are synthesized for root-cause analysis, capacity planning, and enforced hardware standards per sovereign profile. See [sovereign-controller-topology.md](sovereign-controller-topology.md), [rbac-sovereign-operations.md](rbac-sovereign-operations.md), and [purchasing-feedback-loop.md](purchasing-feedback-loop.md).

View File

@@ -0,0 +1,7 @@
# Capacity planning dashboard spec
- **RU utilization**: Per site, sum of assigned positions vs total RU (from racks); show percentage. **Implemented.** API: `GET /api/v1/capacity/sites/:siteId` returns `usedRu`, `totalRu`, and `utilizationPercent`.
- **Power headroom**: From rack `power_feeds` (circuit limits). **Implemented.** API: `GET /api/v1/capacity/sites/:siteId/power` returns `circuitLimitWatts`, `measuredDrawWatts` (null until Phase 4), `headroomWatts` (null). Measured draw can be added when telemetry is available.
- **GPU inventory**: By type (part number) and location. **Implemented.** API: `GET /api/v1/capacity/gpu-inventory` returns `total`, `bySite`, and `byType`.
- **Read-only**: All capacity endpoints are read-only; no edits.
- **Web**: Capacity dashboard at `/capacity` shows RU utilization, power headroom, and GPU inventory by site and type.

11
docs/cicd.md Normal file
View File

@@ -0,0 +1,11 @@
# CI/CD pipeline
- Lint: `pnpm run lint` (ESLint over apps and packages)
- Test: `pnpm run test` (Vitest per package)
- Build: `pnpm run build` (all workspace packages)
GitHub Actions: `.github/workflows/ci.yml` runs on push/PR to main: install, lint, test, build.
Environments: Dev (local + docker-compose), Staging/Production (set DATABASE_URL, S3_*, JWT_SECRET).
Runbook: Start Postgres via `infra/docker-compose up -d`. Migrate: `pnpm db:migrate`. API: `pnpm --filter @sankofa/api run dev`. Web: `pnpm --filter @sankofa/web run dev`.

View File

@@ -0,0 +1,23 @@
# Compliance profiles
Compliance profiles define **firmware freeze**, **allowed hardware generations**, and **approved SKUs** per sovereign (org) or per site. They feed purchasing (approved buy lists) and UniFi device approval.
## Purpose
- **Firmware freeze:** Lock to a version or range (e.g. 2024.Q2, or min/max version) so only compliant firmware is allowed.
- **Allowed generations:** Restrict hardware to e.g. Gen2 and Enterprise only (from UniFi product catalog).
- **Approved SKUs:** Explicit list of SKUs that may be purchased or deployed; optional per-site override.
Profiles are attached to `org_id` (sovereign/tenant); optionally `site_id` for site-specific rules.
## API
- `GET /api/v1/compliance-profiles` — list profiles for the current org.
- `GET /api/v1/compliance-profiles/:id` — get one profile.
- `POST /api/v1/compliance-profiles` — create (body: name, firmwareFreezePolicy, allowedGenerations, approvedSkus, siteId).
- `PATCH /api/v1/compliance-profiles/:id` — update.
- `DELETE /api/v1/compliance-profiles/:id` — delete.
## Use in validation
When generating the **approved purchasing catalog** or when syncing UniFi devices, filter or flag by compliance profile: only SKUs in `approved_skus` or in `allowed_generations` (from the UniFi product catalog) are considered approved for that sovereign/site.

70
docs/erd.md Normal file
View File

@@ -0,0 +1,70 @@
# Database ERD
## Entity relationship overview
```mermaid
erDiagram
org_units ||--o{ org_units : parent
org_units ||--o{ users : org_unit
users ||--o{ user_roles : user
roles ||--o{ user_roles : role
sites ||--o{ user_roles : scope_site
vendors ||--o{ vendor_bank_details : vendor
vendors ||--o{ offers : vendor
vendors ||--o{ purchase_orders : vendor
regions ||--o{ sites : region
sites ||--o{ rooms : site
rooms ||--o{ rows : room
rows ||--o{ racks : row
racks ||--o{ positions : rack
sites ||--o{ assets : site
positions ||--o{ assets : position
users ||--o{ assets : owner
assets ||--o{ asset_components : parent
assets ||--o{ asset_components : child
assets ||--o{ provisioning_records : asset
assets ||--o{ maintenances : asset
purchase_orders }o--|| sites : inspection_site
purchase_orders }o--|| sites : delivery_site
purchase_orders ||--o{ shipments : po
users ||--o{ audit_events : actor
org_units { uuid id text name uuid parent_id text org_id }
users { uuid id text email text org_id uuid org_unit_id }
vendors { uuid id text org_id text legal_name text trust_tier }
offers { uuid id text org_id uuid vendor_id int quantity decimal unit_price text status }
purchase_orders { uuid id text org_id uuid vendor_id jsonb line_items text status }
shipments { uuid id uuid purchase_order_id text tracking text status }
regions { uuid id text org_id text name }
sites { uuid id text org_id uuid region_id text name jsonb network_metadata }
rooms { uuid id uuid site_id text name }
rows { uuid id uuid room_id text name }
racks { uuid id uuid row_id text name int ru_total jsonb power_feeds }
positions { uuid id uuid rack_id int ru_start int ru_end uuid asset_id }
assets { uuid id text org_id text asset_id text category text status uuid site_id uuid position_id }
asset_components { uuid id uuid parent_asset_id uuid child_asset_id text role }
provisioning_records { uuid id uuid asset_id text hypervisor_node text cluster_id }
maintenances { uuid id text org_id uuid asset_id text type text status }
audit_events { uuid id text org_id uuid actor_id text action text resource_type text resource_id jsonb before_state jsonb after_state timestamp occurred_at }
roles { uuid id text name jsonb permissions }
user_roles { uuid user_id uuid role_id uuid scope_site_id text scope_project_id }
```
## Core tables
- **org_units**, **users**: Tenancy and org hierarchy.
- **vendors**, **vendor_bank_details**: Vendor master; versioned bank details with dual approval.
- **offers**: SKU/MPN, quantity, price, evidence_refs, risk_score, status.
- **purchase_orders**: Line items, approval_stage, escrow_terms, inspection_site_id, delivery_site_id.
- **shipments**: PO link, tracking, customs_docs_refs.
- **regions**, **sites**, **rooms**, **rows**, **racks**, **positions**: Site hierarchy and RU mapping.
- **assets**: asset_id, category, serials, proof_artifact_refs, site_id, position_id, status, chain_of_custody.
- **asset_components**: parent_asset_id, child_asset_id, role (gpu/cpu/dimm/nic).
- **provisioning_records**: OS image, hypervisor node, cluster_id.
- **maintenances**: RMA/incident/part_swap; vendor_ticket_ref.
- **audit_events**: Append-only; actor_id, action, resource_type, resource_id, before_state, after_state.
- **roles**, **user_roles**: RBAC; scope_site_id, scope_project_id for ABAC.

View File

@@ -0,0 +1,2 @@
# Proxmox integration spec
Use cases: nodes, inventory. Auth: token per site (Vault). Map Asset to node via integration_mappings.

View File

@@ -0,0 +1,2 @@
# Redfish integration spec
Use cases: verify serials, power cycle. Credentials in Vault per site.

View File

@@ -0,0 +1,21 @@
# UniFi integration spec
UniFi is positioned as a **hardware identity and telemetry source**, a **product-line intelligence feed**, and a **procurement and lifecycle signal**—not only as networking gear. The platform integrates UniFi OS, UniFi Network Application, firmware catalogs, device generation, and support-horizon mapping so Sankofa Phoenix can answer: what exact hardware is deployed, what generation and firmware lineage, what support status, and is this infrastructure policy-compliant for this sovereign body?
**Use cases:** Discover devices, map ports, push port profiles; plus hardware identity, EoL/support horizon, and compliance-relevant metadata. Auth: API token per site (Vault). Sync: nightly; store in integration_mappings.
## UniFi Product Intelligence layer
UniFi is used as a **hardware identity and telemetry source**, not only networking. The platform maintains a canonical **UniFi product catalog** (`unifi_product_catalog`) with:
- SKU, model name, generation (Gen1 / Gen2 / Enterprise)
- Performance class, EoL date, support horizon
- `approved_sovereign_default` for purchasing and compliance
**API:** `GET /api/v1/integrations/unifi/product-catalog` (optional `?generation=`, `?approved_sovereign=true`), `GET /api/v1/integrations/unifi/product-catalog/:sku`. Device list `GET .../unifi/sites/:siteId/devices` returns devices enriched with `generation` and `support_horizon` from the catalog when the device model matches.
This layer feeds **purchasing** (approved buy lists, BOMs) and **compliance** (approved SKUs per sovereign, support-risk views).
## Sovereign-safe controller architecture
Per-sovereign UniFi controller domains with no cross-sovereign write. See [sovereign-controller-topology.md](sovereign-controller-topology.md) for the diagram and trust boundaries. Optionally store controller endpoints in the `unifi_controllers` table (org_id, site_id, base_url, role: sovereign_write | oversight_read_only, region); credentials remain in Vault. API: CRUD under `GET/POST/PATCH/DELETE /api/v1/unifi-controllers`, scoped by org_id.

View File

@@ -0,0 +1,111 @@
# Next steps before full Swagger docs and UX/UI
Do these in order so the API contract is stable and the front end has a clear target.
---
## 1. Auth and identity
- **Login / token endpoint**
There is no in-app login. JWTs are assumed to come from an external IdP. Before UI:
- Either add **POST /auth/login** (or /auth/token) that accepts credentials, looks up `users` + `user_roles`, and returns a JWT with `roles` and (for vendor users) `vendorId`, **or**
- Document the exact JWT shape and how your IdP must set `roles` and `vendorId` so the UI can integrate.
- **User and role management (optional)**
Schema has `users`, `roles`, `user_roles`, but no API. For a self-contained product, add **CRUD for users** and **assignment of roles** (and `vendor_id` for vendor users) so admins can onboard users and vendors without touching the DB directly.
---
## 2. API contract and behavior
- **Request validation**
Add JSON Schema (or Zod) for request bodies and path/query params on all routes so invalid input returns **400** with a consistent error shape instead of 500 or undefined behavior.
- **Error response format**
Standardize error payloads (e.g. `{ error: string, code?: string, details?: unknown }`) and document them so Swagger and the UI can show the same errors.
- **Optional: list pagination**
List endpoints (vendors, offers, assets, sites, etc.) return full arrays. Add `limit`/`offset` or `page`/`pageSize` and a total/cursor so the UI and docs can assume a stable list contract.
---
## 3. RBAC enforcement
- **Wire permissions to routes**
`requirePermission` exists but is not used on route handlers. For each route, add the appropriate `requirePermission(...)` (or equivalent) so that missing permission returns **403** with a clear message. This makes the API safe to document and use from the UI.
---
## 4. OpenAPI completeness (prerequisite for Swagger)
- **Document all paths**
OpenAPI currently documents only health, vendors, offers, purchase-orders, and ingestion. Add the rest so Swagger matches the real API:
- **Assets**: GET/POST /assets, GET/PATCH/DELETE /assets/:id
- **Sites**: GET/POST /sites, GET/PATCH/DELETE /sites/:id, and nested (rooms, rows, racks, positions) if exposed
- **Workflow**: POST /workflow/offers/:id/risk-score, POST /workflow/purchase-orders/:id/submit, approve, reject, PATCH status
- **Inspection**: templates and runs
- **Shipments**: CRUD
- **Asset components**: CRUD
- **Capacity**: GET endpoints
- **Integrations**: UniFi, product-catalog, Proxmox, mappings
- **Maintenances**: CRUD
- **Compliance profiles**: CRUD
- **UniFi controllers**: CRUD
- **Reports**: BOM, support-risk
- **Upload**: POST /upload (multipart)
- **Request/response schemas**
For each path, add `requestBody` and `responses` with schema (or $ref to `components/schemas`) so Swagger can show request/response bodies and generate client types.
- **Security per path**
Mark which paths use BearerAuth, which use IngestionApiKey, and which are public (e.g. health).
---
## 5. Environment and config
- **env.example**
Add `INGESTION_API_KEY` (and any OIDC/SSO vars if you add login) so deployers and the docs know what to set.
- **API base URL for web**
Ensure the web app can be configured with the API base URL (e.g. env `VITE_API_URL` or similar) so Swagger and the UI both target the same backend.
---
## 6. Testing
- **Stabilize the contract**
Add or expand API tests for critical paths (e.g. vendors, offers, purchase-orders, workflow, ingestion) so that when you add Swagger and the UI, changes to the API are caught by tests.
- **Optional: contract tests**
Consider testing that responses match a minimal schema (e.g. required fields) so the OpenAPI spec and the implementation stay in sync.
---
## 7. Web app baseline (before full UX/UI)
- **API client**
Add a minimal API client (fetch or axios) that sends the JWT (and `x-org-id` if required) so all UI calls go through one place and can be swapped for generated clients later.
- **Auth in the client**
Implement login (or redirect to IdP), store the token, and attach it to every request; handle 401 (e.g. redirect to login or refresh).
- **Feature flags or minimal nav**
Add a simple nav or list of areas (e.g. Vendors, Offers, Purchase orders, Assets, Sites) so the “full UX/UI” phase can fill in one screen at a time without redoing routing.
---
## 8. Then: full Swagger and UX/UI
After the above:
- **Full Swagger**
Serve the OpenAPI spec (e.g. from `/api/openapi.json` or `/api/docs`) and mount Swagger UI (or Redoc) so all operations and schemas are discoverable and try-it-now works.
- **Full UX/UI**
Build out screens, forms, and flows using the stable API and client; keep OpenAPI and the UI in sync via the same base URL and error format.
---
## Summary checklist
| # | Area | Action |
|---|-------------------|--------|
| 1 | Auth | Login/token endpoint or IdP contract; optional users/roles API |
| 2 | API contract | Request validation; consistent error format; optional pagination |
| 3 | RBAC | Use requirePermission on routes; return 403 where appropriate |
| 4 | OpenAPI | Document all paths, request/response schemas, security |
| 5 | Env | env.example (INGESTION_API_KEY, etc.); web API base URL |
| 6 | Tests | Broader API tests; optional contract/schema tests |
| 7 | Web baseline | API client, auth (token + 401), minimal nav/routes |
| 8 | Swagger + UI | Serve spec + Swagger UI; build out full screens |

2
docs/observability.md Normal file
View File

@@ -0,0 +1,2 @@
# Observability
Central logging: ELK or OpenSearch. Metrics: Prometheus + Grafana. Alerting on API errors and integration sync failures. API uses Fastify logger (structured).

99
docs/offer-ingestion.md Normal file
View File

@@ -0,0 +1,99 @@
# Offer ingestion (scrape and email)
Offers can be ingested from external sources so they appear in the database for potential purchases, without manual data entry.
## Sources
1. **Scraped** e.g. site content from theserverstore.com (Peter as Manager). A scraper job fetches pages, parses offer-like content, and creates offer records.
2. **Email** a dedicated mailbox accepts messages (e.g. from Sergio and others); a pipeline parses them and creates offer records.
Ingested offers are stored with:
- `source`: `scraped` or `email`
- `source_ref`: URL (scrape) or email message id (email)
- `source_metadata`: optional JSON (e.g. sender, subject, page title, contact name)
- `ingested_at`: timestamp of ingestion
- `vendor_id`: optional; may be null until procurement assigns the offer to a vendor
## API: ingestion endpoint
Internal or automated callers use a dedicated endpoint, secured by an API key (no user JWT).
**POST** `/api/v1/ingestion/offers`
- **Auth:** Header `x-ingestion-api-key` must equal the environment variable `INGESTION_API_KEY`. If missing or wrong, returns `401`.
- **Org:** Header `x-org-id` (default `default`) specifies the org for the new offer.
**Body (JSON):**
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `source` | `"scraped"` \| `"email"` | yes | Ingestion source |
| `source_ref` | string | no | URL or message id |
| `source_metadata` | object | no | e.g. `{ "sender": "Sergio", "subject": "...", "page_url": "..." }` |
| `vendor_id` | UUID | no | Vendor to attach; omit for unassigned |
| `sku` | string | no | |
| `mpn` | string | no | |
| `quantity` | number | yes | |
| `unit_price` | string | yes | Decimal |
| `incoterms` | string | no | |
| `lead_time_days` | number | no | |
| `country_of_origin` | string | no | |
| `condition` | string | no | |
| `warranty` | string | no | |
| `evidence_refs` | array | no | `[{ "key": "s3-key", "hash": "..." }]` |
**Response:** `201` with the created offer (including `id`, `source`, `source_ref`, `source_metadata`, `ingested_at`).
Example (scrape):
```json
{
"source": "scraped",
"source_ref": "https://theserverstore.com/...",
"source_metadata": { "contact": "Peter", "site": "theserverstore.com" },
"vendor_id": null,
"sku": "DL380-G9",
"quantity": 2,
"unit_price": "450.00",
"condition": "refurbished"
}
```
Example (email):
```json
{
"source": "email",
"source_ref": "msg-12345",
"source_metadata": { "from": "sergio@example.com", "subject": "Quote for R630" },
"vendor_id": null,
"mpn": "PowerEdge R630",
"quantity": 1,
"unit_price": "320.00"
}
```
## Scraper (e.g. theserverstore.com)
- **Responsibility:** Fetch pages (respecting robots.txt and rate limits), extract product/offer fields, then POST to `POST /api/v1/ingestion/offers` for each offer.
- **Where:** Can run as a scheduled job in `apps/` or `packages/`, or as an external service that calls the API. No scraper implementation is in-repo yet; this doc defines the contract.
- **Vendor:** If the site is known (e.g. The Server Store, Peter as Manager), the scraper can resolve or create a vendor and pass `vendor_id`; otherwise leave null for procurement to assign later.
- **Idempotency:** Use `source_ref` (e.g. canonical product URL) so the same offer is not duplicated; downstream you can upsert by `(org_id, source, source_ref)` if desired.
## Email intake (e.g. Sergio and others)
- **Flow:** Incoming messages to a dedicated mailbox (e.g. `offers@your-org.com`) are read by an IMAP poller or processed via an inbound webhook (SendGrid, Mailgun, etc.). The pipeline parses sender, subject, body, and optional attachments, then POSTs one or more payloads to `POST /api/v1/ingestion/offers`.
- **Storing raw email:** Attachments or full message can be uploaded to object storage (e.g. S3/MinIO) and referenced in `evidence_refs` or `source_metadata` (e.g. `raw_message_key`).
- **Vendor matching:** Match sender address or name to an existing vendor and set `vendor_id` when possible; otherwise leave null and set `source_metadata.sender` / `from` for later assignment.
## Configuration
- Set `INGESTION_API_KEY` in the environment where the API runs. Scraper and email pipeline must use the same value in `x-ingestion-api-key`.
- Use `x-org-id` on each request to target the correct org.
## Procurement workflow
- Ingested offers appear in the offers list with `source` = `scraped` or `email` and optional `vendor_id`.
- Offers with `vendor_id` null are “unassigned”; procurement can assign them to a vendor (PATCH offer or create/link vendor then update offer).
- Existing RBAC and org/site scoping apply; audit can track creation via `ingested_at` and `source_metadata`.

175
docs/openapi.yaml Normal file
View File

@@ -0,0 +1,175 @@
openapi: 3.0.3
info:
title: Sankofa HW Infra API
version: 0.1.0
servers:
- url: /api/v1
security:
- BearerAuth: []
components:
schemas:
ApiError:
type: object
properties:
error: { type: string, description: Human-readable message }
code: { type: string, enum: [BAD_REQUEST, UNAUTHORIZED, FORBIDDEN, NOT_FOUND, CONFLICT, INTERNAL_ERROR] }
details: { type: object, description: Optional validation or extra data }
securitySchemes:
BearerAuth:
type: http
scheme: bearer
bearerFormat: JWT
description: JWT with optional vendorId for vendor users
IngestionApiKey:
type: apiKey
in: header
name: x-ingestion-api-key
description: Required for POST /ingestion/offers (env INGESTION_API_KEY)
paths:
/health:
get:
summary: Health
security: []
/auth/token:
post:
summary: Get JWT token
description: Exchange email (and optional password) for a JWT with roles and vendorId. No auth required.
security: []
requestBody:
required: true
content:
application/json:
schema:
type: object
required: [email]
properties:
email: { type: string, format: email }
password: { type: string }
responses:
"200":
description: Token and user info
"401":
description: Invalid credentials
/vendors:
get:
summary: List vendors
description: If JWT contains vendorId (vendor user), returns only that vendor.
post:
summary: Create vendor
description: Forbidden for vendor users.
/vendors/{id}:
get:
summary: Get vendor
description: Vendor users may only request their own vendor id.
/offers:
get:
summary: List offers
description: If JWT contains vendorId, returns only that vendor's offers.
post:
summary: Create offer
description: Vendor users' vendorId is forced to their vendor.
/offers/{id}:
get:
summary: Get offer
patch:
summary: Update offer
delete:
summary: Delete offer
/purchase-orders:
get:
summary: List purchase orders
description: If JWT contains vendorId, returns only POs for that vendor.
/purchase-orders/{id}:
get:
summary: Get purchase order
/ingestion/offers:
post:
summary: Ingest offer (scrape or email)
description: Creates an offer with source (scraped|email), source_ref, source_metadata. Secured by x-ingestion-api-key only; no JWT. Use x-org-id for target org.
security:
- IngestionApiKey: []
requestBody:
required: true
content:
application/json:
schema:
type: object
required: [source, quantity, unit_price]
properties:
source:
type: string
enum: [scraped, email]
source_ref:
type: string
description: URL or email message id
source_metadata:
type: object
vendor_id:
type: string
format: uuid
nullable: true
sku:
type: string
mpn:
type: string
quantity:
type: integer
unit_price:
type: string
incoterms:
type: string
lead_time_days:
type: integer
country_of_origin:
type: string
condition:
type: string
warranty:
type: string
evidence_refs:
type: array
items:
type: object
properties:
key: { type: string }
hash: { type: string }
responses:
"201":
description: Offer created
"401":
description: Invalid or missing x-ingestion-api-key
/capacity/sites/{siteId}:
get:
summary: RU utilization for a site
description: Returns usedRu, totalRu, utilizationPercent for the site (from racks and assigned positions).
parameters:
- name: siteId
in: path
required: true
schema: { type: string, format: uuid }
responses:
"200":
description: Site capacity (usedRu, totalRu, utilizationPercent)
"404":
description: Site not found
/capacity/sites/{siteId}/power:
get:
summary: Power headroom for a site
description: Returns circuitLimitWatts from rack power_feeds; measuredDrawWatts/headroomWatts null until Phase 4.
parameters:
- name: siteId
in: path
required: true
schema: { type: string, format: uuid }
responses:
"200":
description: Power info (circuitLimitWatts, measuredDrawWatts, headroomWatts)
"404":
description: Site not found
/capacity/gpu-inventory:
get:
summary: GPU inventory
description: Returns total, bySite, and byType (part number) counts.
responses:
"200":
description: GPU counts (total, bySite, byType)

View File

@@ -0,0 +1,95 @@
# Operational baseline — current hardware running / in-hand
Hardware already deployed, active, or physically in-hand (not part of available wholesale inventory). Quantities marked **TBD** are to be confirmed and locked during physical audit. Once confirmed, this document is the **authoritative operational baseline** for Sankofa Phoenix.
---
## A1. Compute servers (operational)
### HPE ProLiant ML110 series
- **Role:** Core services / management / utility workloads
- **Form factor:** Tower / rack-convertible
- **Status:** Running / in-hand
- **Quantity:** TBD
- **Notes:** Suitable for control-plane services, monitoring, identity, light virtualization
### Dell PowerEdge R630
- **Role:** General compute / virtualization / legacy workloads
- **Form factor:** 1U rackmount
- **Status:** Running / in-hand
- **Quantity:** TBD
- **Notes:** Ideal for Proxmox clusters, utility VMs, staging environments
---
## A2. Network and edge infrastructure
### UniFi Dream Machine Pro (UDM Pro)
- **Role:** Edge gateway, UniFi OS controller, firewall
- **Status:** Running / in-hand
- **Quantity:** TBD
- **Notes:** Per-site edge control; candidate for per-sovereign controller domains
### UniFi XG switches
- **Role:** High-throughput aggregation / core switching
- **Status:** Running / in-hand
- **Quantity:** TBD
- **Notes:** 10G/25G backbone for compute and storage traffic
---
## A3. ISP and external connectivity
### Spectrum Business cable modems
- **Role:** Primary or secondary WAN connectivity
- **Status:** Installed / in-hand
- **Quantity:** TBD
- **Notes:** Business-class internet access; typically paired with UDM Pro
---
## A4. Physical infrastructure and power
### APC equipment cabinets
- **Role:** Secure rack enclosure
- **Status:** Installed / in-hand
- **Quantity:** TBD
- **Notes:** Houses compute, network, and power equipment
### APC UPS units
- **Role:** Power conditioning and battery backup
- **Status:** Installed / in-hand
- **Quantity:** TBD
- **Notes:** Runtime and load to be captured per site for capacity planning
---
## A5. Operational classification summary
| Category | Status | Quantity |
| ------------------- | --------- | -------- |
| ML110 servers | Running | TBD |
| Dell R630 servers | Running | TBD |
| UDM Pro | Running | TBD |
| UniFi XG switches | Running | TBD |
| Spectrum modems | Installed | TBD |
| APC cabinets | Installed | TBD |
| APC UPS units | Installed | TBD |
---
## A6. Next actions (to finalize baseline)
1. **Physical audit** — Lock quantities and serials per site/rack.
2. **Import into sankofa-hw-infra** — Create as **Operational Assets** (assets with category, site, rack position).
3. **Attach to sites, racks, power feeds** — Populate site hierarchy and power metadata.
4. **Enable integrations** — UniFi (device mapping), Proxmox (node ↔ server), UPS monitoring where supported.
After quantities and serials are confirmed, this appendix is the authoritative operational baseline for capacity planning, BOM, and compliance.

View File

@@ -0,0 +1,29 @@
# Purchasing feedback loop
How UniFi telemetry and product intelligence drive approved buy lists, BOMs, and support-risk views.
## Data flow
1. **UniFi device sync** — Devices are synced from each sovereigns controller; device list includes model/SKU.
2. **Product catalog lookup** — Each device model/SKU is matched against `unifi_product_catalog` (generation, EoL, support horizon).
3. **Outputs:**
- **SKU-normalized BOM** per sovereign/site: which exact hardware is deployed, with generation and support status.
- **Support-risk heatmap:** devices near EoL or with short support horizon.
- **Firmware divergence alerts:** when firmware versions drift from policy (see compliance profiles).
- **Approved purchasing catalog:** only SKUs that meet the sovereigns compliance profile (allowed generations, approved_skus).
## Approved buy list
The “approved buy list” is the intersection of:
- Devices in use or recommended (from UniFi + catalog), and
- Catalog entries with `approved_sovereign_default` or matching the orgs **compliance profile** (allowed_generations, approved_skus).
So operations (what we have and whats supported) drives procurement (what were allowed to buy), not the other way around.
## Optional API
- `GET /api/v1/reports/bom?org_id=&site_id=` — Aggregate assets + UniFi mappings + catalog for a BOM.
- `GET /api/v1/reports/support-risk?org_id=&horizon_months=12` — Devices with EoL or support horizon within the next N months.
These can be implemented as thin wrappers over existing schema, `unifi_product_catalog`, and `integration_mappings`.

View File

@@ -0,0 +1,36 @@
# RBAC matrix for sovereign operations
Who can **see**, who can **change**, and who can **approve** (by role and by site/sovereign) for UniFi, compliance, and purchasing.
## Permissions
| Permission | Description |
|------------|-------------|
| unifi:read | Read UniFi devices and product catalog within assigned site/org |
| unifi:write | Change UniFi mappings and controller config within assigned site/org |
| unifi_oversight:read | Read-only across sovereigns (central oversight; no write) |
| compliance:read | View compliance profiles |
| compliance:write | Create/update/delete compliance profiles |
| purchasing_catalog:read | View approved buy lists and BOMs |
## Role vs permission (sovereign-relevant)
| Role | unifi:read | unifi:write | unifi_oversight:read | compliance:read | compliance:write | purchasing_catalog:read |
|------|:----------:|:-----------:|:--------------------:|:----------------:|:-----------------:|:------------------------:|
| super_admin | yes | yes | yes | yes | yes | yes |
| security_admin | | | yes | yes | yes | |
| procurement_manager | yes | | | | | yes |
| finance_approver | | | | | | yes |
| site_admin | yes | yes | | yes | | |
| noc_operator | yes | | | | | |
| read_only_auditor | yes | | | yes | | yes |
| partner_inspector | | | | | | |
## Scoping rules
- **unifi:read** and **unifi:write** apply only within the operators assigned **site** or **org** (via `user_roles.scope_site_id` / org). No cross-sovereign write.
- **unifi_oversight:read** is the only cross-sovereign read; used by central Sankofa Phoenix oversight. No write authority.
- **compliance:read** / **compliance:write** are scoped by org (sovereign); enforce in API so users only see/edit profiles for their org.
- **purchasing_catalog:read** is scoped by org/site so approved lists and BOMs are sovereign-specific.
Existing ABAC (e.g. `scope_site_id` on user_roles) enforces these boundaries; ensure new integration and compliance endpoints check permission and org/site scope.

View File

@@ -0,0 +1,4 @@
# Incident response runbook
1. Triage: check audit log and health endpoints.
2. Isolate affected assets or revoke credentials if compromise.
3. Notify; post-mortem and update runbooks.

View File

@@ -0,0 +1,6 @@
# Runbook: Provisioning and integration checks
- **Proxmox**: Register node; add mapping via POST /api/v1/integrations/mappings (provider=proxmox, externalId=node name). Sync nodes via scheduled job or manual trigger.
- **UniFi**: Map switch/port to rack position; store in integration_mappings with metadata (device id, port index).
- **Redfish**: At receiving, optionally call Redfish to verify serial and firmware; store result in asset proof artifacts.
- **Checks**: Verify mapping exists for asset before provisioning; confirm credentials in Vault for the site.

View File

@@ -0,0 +1,9 @@
# Runbook: Receiving and Inspection
## Inspection
1. Create inspection run from template.
2. Upload evidence; set pass/fail.
3. If fail: claim. If pass: approve release.
## Receiving
1. Reconcile shipment with PO. 2. Assign rack; set asset Received then Staged.

View File

@@ -0,0 +1,13 @@
# Runbook: Receiving and Racking
## Receiving
1. Create shipment for PO; scan items.
2. POST /api/v1/shipments/:id/receive with assetIds to set assets to received.
3. Update shipment status to received.
## Racking
1. Assign asset to position: PATCH /api/v1/assets/:id with positionId.
2. Set asset status to staged.
## Capacity
GET /api/v1/capacity/sites/:siteId and GET /api/v1/capacity/gpu-inventory for dashboards.

2
docs/security.md Normal file
View File

@@ -0,0 +1,2 @@
# Security
Secrets: Vault/KMS; rotate API tokens. MFA for privileged roles. Dual control: vendor bank details and PO final approval (Phase 1). Attachment malware scanning (Phase 4). Data retention policies by doc type.

View File

@@ -0,0 +1,42 @@
# Sovereign controller topology
Per-sovereign UniFi controller domains, regionally isolated management planes, and a central read-only oversight layer. No cross-sovereign write authority.
## Diagram
```mermaid
flowchart TB
subgraph sovereignA [Sovereign A]
CtrlA[UniFi Controller A]
CtrlA -->|write| NetA[Network A]
end
subgraph sovereignB [Sovereign B]
CtrlB[UniFi Controller B]
CtrlB -->|write| NetB[Network B]
end
subgraph oversight [Central oversight]
Phoenix[Sankofa Phoenix]
end
Phoenix -->|read only| CtrlA
Phoenix -->|read only| CtrlB
CtrlA -.->|no write| CtrlB
CtrlB -.->|no write| CtrlA
```
## Architecture
- **Per-sovereign controller domains:** Each sovereign (org/tenant) has its own UniFi controller(s). Write authority stays within that sovereign.
- **Regional isolation:** Controllers and management planes can be deployed per region so data and control stay in-region.
- **Central read-only oversight:** Sankofa Phoenix has a read-only view across controllers for audit, BOM, support-risk, and compliance—no write into any sovereigns controller.
- **Trust boundaries:** No cross-sovereign write; sovereign A cannot change sovereign Bs network or config.
This satisfies sovereignty, auditability, compartmentalization, and trust boundaries between different bodies (e.g. governmental).
## Optional: controller registry
If you store controller endpoints in the DB, use a table `unifi_controllers` with: org_id, site_id (optional), base_url, role (sovereign_write | oversight_read_only), region. Credentials remain in Vault; the table only stores topology and role. API: CRUD for controllers scoped by org_id.
See [integration-spec-unifi.md](integration-spec-unifi.md) for the “Sovereign-safe controller architecture” subsection.

49
docs/vendor-portal.md Normal file
View File

@@ -0,0 +1,49 @@
# Vendor portal and vendor users
Selected vendors can log in to assist in fulfilling needs: view and update their offers, and see purchase orders relevant to them.
## Model
- **Vendor user:** A user record with `vendor_id` set is a *vendor user*. That user can be assigned the role `vendor_user` and receive a JWT that includes `vendorId` in the payload.
- **Scoping:** When the API sees `req.user.vendorId`, it restricts:
- **Vendors:** List returns only that vendor; GET/PATCH/DELETE only for that vendor; POST (create vendor) is forbidden.
- **Offers:** List/GET/PATCH/DELETE only offers for that vendor; on create, `vendorId` is forced to the logged-in vendor.
- **Purchase orders:** List/GET only POs for that vendor.
## Onboarding a vendor user
1. Create or select a **Vendor** in the org (e.g. "The Server Store", "Sergio's Hardware").
2. Create a **User** with:
- `org_id` = same as org
- `vendor_id` = that vendor's ID
- `email` / `name` as needed
3. Assign the role **vendor_user** to that user (via your IdP or `user_roles` if you manage roles in-app).
4. At **login**, ensure the issued JWT includes:
- `roles`: e.g. `["vendor_user"]`
- `vendorId`: the vendor's UUID
Then the vendor can call the same API under `/api/v1` with that JWT (and `x-org-id`). They will only see and modify data for their vendor.
## Permissions for vendor_user
The role `vendor_user` has:
- `vendor:read_own` read own vendor
- `vendor:write_offers_own` create/update own offers
- `vendor:view_pos_own` view POs for their vendor
- `offers:read`, `offers:write` used in combination with `vendorId` scoping above
- `purchase_orders:read` used with vendor filter
Vendor users cannot create/update/delete vendor records, nor see other vendors' offers or POs.
## API surface (vendor portal)
Vendor users use the same endpoints as procurement, with automatic scoping:
- **GET /api/v1/vendors** Returns only their vendor.
- **GET /api/v1/vendors/:id** Allowed only when `:id` is their vendor.
- **GET/POST/PATCH/DELETE /api/v1/offers** Only their vendor's offers; POST forces their vendorId.
- **GET /api/v1/purchase-orders** Only POs where vendorId is their vendor.
- **GET /api/v1/purchase-orders/:id** Allowed only for POs of their vendor.
No changes to URLs or request bodies are required; scoping is derived from the JWT `vendorId`.

30
env.example Normal file
View File

@@ -0,0 +1,30 @@
# API
NODE_ENV=development
API_PORT=4000
API_HOST=0.0.0.0
# Database (match infra/docker-compose.yml for local dev)
DATABASE_URL=postgres://sankofa:sankofa_dev@localhost:5432/sankofa
# Object storage (MinIO for dev when using profile 'full')
S3_ENDPOINT=http://localhost:9000
S3_ACCESS_KEY=sankofa
S3_SECRET_KEY=sankofa_dev_minio
S3_BUCKET=sankofa-documents
S3_REGION=us-east-1
S3_USE_SSL=false
# JWT (generate a secret in production)
JWT_SECRET=change-me-in-production-use-openssl-rand-base64-32
# Ingestion (scraper / email pipeline)
INGESTION_API_KEY=set-a-secret-key-for-ingestion-endpoint
# Web app (optional; dev proxy uses /api -> localhost:4000)
# VITE_API_URL=http://localhost:4000
# Optional: SSO placeholder
# OIDC_ISSUER=
# OIDC_CLIENT_ID=
# OIDC_CLIENT_SECRET=
# SAML_ENTRY_POINT=

17
eslint.config.js Normal file
View File

@@ -0,0 +1,17 @@
import js from "@eslint/js";
import tseslint from "typescript-eslint";
export default [
js.configs.recommended,
...tseslint.configs.recommended,
{
ignores: [
"**/dist/**",
"**/node_modules/**",
"**/build/**",
"**/coverage/**",
"**/*.config.js",
"**/*.config.ts",
],
},
];

40
infra/docker-compose.yml Normal file
View File

@@ -0,0 +1,40 @@
# Development infrastructure: Postgres, S3-compatible (MinIO), optional Vault dev
services:
postgres:
image: postgres:16-alpine
environment:
POSTGRES_USER: sankofa
POSTGRES_PASSWORD: sankofa_dev
POSTGRES_DB: sankofa
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U sankofa -d sankofa"]
interval: 5s
timeout: 5s
retries: 5
minio:
image: minio/minio:latest
command: server /data
environment:
MINIO_ROOT_USER: sankofa
MINIO_ROOT_PASSWORD: sankofa_dev_minio
ports:
- "9000:9000"
- "9001:9001"
volumes:
- minio_data:/data
healthcheck:
test: ["CMD", "mc", "ready", "local"]
interval: 5s
timeout: 5s
retries: 5
profiles:
- full
volumes:
postgres_data:
minio_data:

23
package.json Normal file
View File

@@ -0,0 +1,23 @@
{
"name": "sankofa-hw-infra",
"version": "0.1.0",
"private": true,
"description": "Hardware procurement, inventory, and operations platform",
"scripts": {
"build": "pnpm -r run build",
"test": "pnpm -r run test",
"lint": "eslint apps packages",
"dev": "pnpm -r run dev --parallel",
"db:migrate": "pnpm --filter @sankofa/schema run db:migrate",
"db:generate": "pnpm --filter @sankofa/schema run db:generate"
},
"engines": {
"node": ">=20"
},
"packageManager": "pnpm@9.14.2",
"devDependencies": {
"@eslint/js": "^9.15.0",
"eslint": "^9.15.0",
"typescript-eslint": "^8.15.0"
}
}

View File

@@ -0,0 +1,22 @@
{
"name": "@sankofa/auth",
"version": "0.1.0",
"private": true,
"type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"test": "vitest run",
"lint": "eslint src --ext .ts"
},
"dependencies": {
"@sankofa/schema": "workspace:*"
},
"devDependencies": {
"@types/node": "^22.10.0",
"eslint": "^9.15.0",
"typescript": "^5.7.0",
"vitest": "^2.1.0"
}
}

View File

@@ -0,0 +1,6 @@
import { describe, it, expect } from "vitest";
import { hasPermission, ROLES } from "./index";
describe("auth", () => {
it("super_admin has permission", () => { expect(hasPermission(["super_admin"], "vendors:write")).toBe(true); });
it("ROLES non-empty", () => { expect(ROLES.length).toBeGreaterThan(0); });
});

View File

@@ -0,0 +1,64 @@
export const ROLES = [
"super_admin",
"security_admin",
"procurement_manager",
"finance_approver",
"site_admin",
"noc_operator",
"read_only_auditor",
"partner_inspector",
"vendor_user",
] as const;
export type RoleName = (typeof ROLES)[number];
export const PERMISSIONS = [
"vendors:read", "vendors:write", "offers:read", "offers:write",
"purchase_orders:read", "purchase_orders:write", "purchase_orders:approve",
"assets:read", "assets:write", "sites:read", "sites:write",
"users:read", "users:write", "roles:read", "roles:write",
"audit:read", "audit:export", "upload:write", "inspection:write",
"unifi:read", "unifi:write", "unifi_oversight:read",
"compliance:read", "compliance:write", "purchasing_catalog:read",
"vendor:read_own", "vendor:write_offers_own", "vendor:view_pos_own",
] as const;
export type Permission = (typeof PERMISSIONS)[number];
const ROLE_PERMISSIONS: Record<RoleName, Permission[]> = {
super_admin: [...PERMISSIONS],
security_admin: ["users:read", "users:write", "roles:read", "roles:write", "audit:read", "audit:export", "vendors:read", "offers:read", "purchase_orders:read", "assets:read", "sites:read", "compliance:read", "compliance:write", "unifi_oversight:read"],
procurement_manager: ["vendors:read", "vendors:write", "offers:read", "offers:write", "purchase_orders:read", "purchase_orders:write", "assets:read", "sites:read", "upload:write", "unifi:read", "purchasing_catalog:read"],
finance_approver: ["vendors:read", "offers:read", "purchase_orders:read", "purchase_orders:approve", "assets:read", "sites:read", "purchasing_catalog:read"],
site_admin: ["vendors:read", "offers:read", "purchase_orders:read", "assets:read", "assets:write", "sites:read", "sites:write", "upload:write", "unifi:read", "unifi:write", "compliance:read"],
noc_operator: ["assets:read", "assets:write", "sites:read", "upload:write", "unifi:read"],
read_only_auditor: ["vendors:read", "offers:read", "purchase_orders:read", "assets:read", "sites:read", "audit:read", "audit:export", "unifi:read", "compliance:read", "purchasing_catalog:read"],
partner_inspector: ["offers:read", "assets:read", "upload:write", "inspection:write"],
vendor_user: ["vendor:read_own", "vendor:write_offers_own", "vendor:view_pos_own", "vendors:read", "offers:read", "offers:write", "purchase_orders:read"],
};
export function hasPermission(roleNames: RoleName[], permission: Permission): boolean {
for (const r of roleNames) {
const perms = ROLE_PERMISSIONS[r];
if (perms?.includes(permission)) return true;
}
return false;
}
export function hasAnyPermission(roleNames: RoleName[], permissions: Permission[]): boolean {
return permissions.some((p) => hasPermission(roleNames, p));
}
export interface ABACContext {
site_id?: string;
project_id?: string;
asset_category?: string;
sensitivity_tier?: string;
vendor_trust_tier?: string;
}
export function checkABAC(resource: ABACContext, context: ABACContext): boolean {
if (context.site_id != null && resource.site_id != null && resource.site_id !== context.site_id) return false;
if (context.project_id != null && resource.project_id != null && resource.project_id !== context.project_id) return false;
return true;
}

View File

@@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "bundler",
"strict": true,
"outDir": "dist",
"rootDir": "src",
"declaration": true,
"declarationMap": true,
"skipLibCheck": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -0,0 +1,10 @@
import { defineConfig } from "drizzle-kit";
export default defineConfig({
schema: "./src/db/schema.ts",
out: "./drizzle",
dialect: "postgresql",
dbCredentials: {
url: process.env.DATABASE_URL ?? "postgres://sankofa:sankofa_dev@localhost:5432/sankofa",
},
});

Some files were not shown because too many files have changed in this diff Show More