23 Commit-ok c17a32de3a ... 36597cedcd

Szerző SHA1 Üzenet Dátum
  Code_Uwe 36597cedcd RHL-005-update(docs): enhance authentication documentation with detailed environment variables and configuration guidelines 1 napja
  Code_Uwe aebfaa7445 RHL-005-add(docs): add comprehensive documentation for the storage module and environment configuration 1 napja
  Code_Uwe 2bf527fed0 RHL-005-add(docs): create runbook for local development and server deployment 1 napja
  Code_Uwe ee9ca8aebe RHL-005-add(docs): create API documentation for HTTP endpoints and configuration 1 napja
  Code_Uwe 1faf7be4d9 RHL-005-remove(docs): delete outdated Lieferscheine documentation 1 napja
  Code_Uwe 8449820561 RHL-005-fix(env): add local NAS directory and cookies.txt to .gitignore 1 napja
  Code_Uwe 93e92e35f6 RHL-005-fix(env): add local NAS volume configuration to docker-compose 1 napja
  Code_Uwe 927e1b8005 RHL-005-fix(auth): add defensive check for missing passwordHash in login endpoint 1 napja
  Code_Uwe 29ad68afa5 RHL-005-fix(auth): improve session cookie security handling and update tests 1 napja
  Code_Uwe 039740c869 RHL-005-fix(env): add SESSION_COOKIE_SECURE validation and update tests 1 napja
  Code_Uwe 8b3112ddf4 RHL-005-fix(env): clarify root auth requirements in Docker environment configuration 1 napja
  Code_Uwe fca18e4737 RHL-005-fix(env): update MongoDB connection string and clarify root auth requirements 1 napja
  Code_Uwe 61147c9a96 RHL-005-fix(page): remove unnecessary whitespace and improve code formatting 1 napja
  Code_Uwe 952d191e48 RHL-005-refactor(auth): update database connection to use getDb and improve test mocks 1 napja
  Code_Uwe 60920aeffe RHL-005-refactor(db): refactor MongoDB connection to use Mongoose and improve error handling 1 napja
  Code_Uwe 71824431b3 RHL-005-fix(tests): update SESSION_SECRET to meet environment policy requirements 1 napja
  Code_Uwe 82b73c21b1 RHL-005-feat(docker): add .dockerignore to exclude env files and build outputs 2 napja
  Code_Uwe eecc7ed247 RHL-005-fix(env): update SESSION_SECRET placeholder in example environment files 2 napja
  Code_Uwe 3bf9d986f5 RHL-005-feat(env): add example environment configuration files for Docker and local development 2 napja
  Code_Uwe e2537daaec RHL-005-refactor(gitignore): allow committing example environment files 2 napja
  Code_Uwe b0fc3d8fef RHL-005-feat(docker): add healthcheck for MongoDB service and update app dependency condition 2 napja
  Code_Uwe 7bbd26e23d RHL-005-feat(env): add environment variable validation script 2 napja
  Code_Uwe 501dede489 RHL-005-feat(env): add environment variable validation and normalization functions with tests 2 napja

+ 14 - 0
.dockerignore

@@ -0,0 +1,14 @@
+# env files (never bake secrets into images)
+.env*
+!.env*.example
+
+# dependencies / build output
+node_modules
+.next
+coverage
+.git
+.DS_Store
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+.pnpm-debug.log*

+ 23 - 0
.env.docker.example

@@ -0,0 +1,23 @@
+# Docker-based deployment (docker-compose)
+# Copy this file to ".env.docker" and adjust values.
+#
+# NOTE: Never commit real secrets. Use placeholders in this example file.
+
+# MongoDB connection string inside the docker-compose network.
+# "db" should match the MongoDB service name in docker-compose.yml.
+# If root auth is enabled, include credentials.
+MONGODB_URI=mongodb://root:replace-me@db:27017/rhl-lieferscheine?authSource=admin
+
+# Secret used to sign session JWT cookies.
+# Requirements: strong + random, at least 32 characters, never committed.
+# Example generation:
+#   openssl rand -base64 48
+SESSION_SECRET=replace-me-with-a-long-random-secret-string-at-least-32-chars
+
+# Must match the mount target inside the app container (read-only mount recommended).
+NAS_ROOT_PATH=/mnt/niederlassungen
+
+NODE_ENV=production
+
+# Optional. Only needed if you run the app on a non-default port.
+# PORT=3000

+ 24 - 0
.env.local.example

@@ -0,0 +1,24 @@
+# Local development (Next.js dev server)
+# Copy this file to ".env.local" and adjust values.
+#
+# NOTE: Never commit real secrets. Use placeholders in this example file.
+
+# MongoDB connection string (local dev).
+# Expected format: mongodb://... OR mongodb+srv://...
+MONGODB_URI=mongodb://localhost:27017/rhl_lieferscheine
+
+# Secret used to sign session JWT cookies.
+# Requirements: strong + random, at least 32 characters, never committed.
+# Example generation:
+#   openssl rand -base64 48
+SESSION_SECRET=replace-me-with-a-long-random-secret-string-at-least-32-chars
+
+# Absolute Unix path to the NAS root folder that contains branch directories (NLxx/...).
+# For local dev you can point this to a local fixture folder.
+NAS_ROOT_PATH=/mnt/niederlassungen
+
+# Optional. Next.js dev usually sets this, but keeping it explicit is fine.
+NODE_ENV=development
+
+# Optional. Only needed if you want a custom port for your local server.
+# PORT=3000

+ 5 - 0
.gitignore

@@ -32,6 +32,11 @@ yarn-error.log*
 
 # env files (can opt-in for committing if needed)
 .env*
+!.env*.example
+
+# local fixtures / test artifacts
+.local_nas/
+cookies.txt
 
 # vercel
 .vercel

+ 0 - 449
Docs/Lieferscheine.md

@@ -1,449 +0,0 @@
-# Storage Module (`lib/storage`)
-
-The `lib/storage` module is the **single source of truth** for reading files
-from the network file share that contains scanned delivery notes.
-
-All code that needs to read from the NAS must go through this module instead
-of using Node.js `fs` directly. This keeps filesystem logic centralized and
-makes it easier to change paths or conventions later.
-
-## High-Level Responsibilities
-
-- Resolve paths under the NAS root (`NAS_ROOT_PATH`).
-- Provide high-level, intention-revealing helpers:
-
-  - `listBranches()` → `['NL01', 'NL02', ...]`
-  - `listYears(branch)` → `['2023', '2024', ...]`
-  - `listMonths(branch, year)` → `['01', '02', ...]`
-  - `listDays(branch, year, month)` → `['01', '02', ...]`
-  - `listFiles(branch, year, month, day)` → `[{ name, relativePath }, ...]`
-
-- Enforce **read-only** access from the filesystem (no delete/move/write logic
-  here).
-- Use asynchronous filesystem APIs (`fs/promises`) to avoid blocking the
-  event loop when reading from a network filesystem (SMB). Using async I/O is a
-  recommended best practice in Node.js for scalability and performance.
-
-## Environment Configuration
-
-The storage module depends on a single environment variable:
-
-- `NAS_ROOT_PATH`
-
-  Absolute path where the NAS share is mounted on the host.
-
-Typical values:
-
-- **Production (Linux server):**
-
-  ```env
-  NAS_ROOT_PATH=/mnt/niederlassungen
-  ```
-
-- **Local development (optional):**
-
-  ```env
-  # Example: local test folder
-  NAS_ROOT_PATH=/Users/<username>/dev/test/niederlassungen
-  ```
-
-  or, if the NAS is mounted locally (e.g. on macOS):
-
-  ```env
-  NAS_ROOT_PATH=/Volumes/Niederlassungen
-  ```
-
-If `NAS_ROOT_PATH` is not set, the helpers will throw when called. This is
-intentional: configuration issues should fail fast instead of causing
-confusing downstream errors.
-
-## Directory Layout Assumptions
-
-The helpers assume the following structure under `NAS_ROOT_PATH`:
-
-```text
-NAS_ROOT_PATH/
-  @Recently-Snapshot/   # ignored
-  NL01/
-    2024/
-      10/
-        23/
-          file1.pdf
-          file2.pdf
-  NL02/
-    2023/
-      12/
-        01/
-          ...
-  ...
-```
-
-Rules:
-
-- Branch directories follow the pattern `NL<Number>`, e.g. `NL01`, `NL23`.
-- Year directories are 4-digit numeric (`2023`, `2024`, ...).
-- Month and day directories are numeric; the helpers normalize them to
-  two‑digit strings for consistent display in the UI:
-
-  - Months: `"01"` … `"12"`
-  - Days: `"01"` … `"31"`
-
-- Only `.pdf` files are returned by `listFiles`.
-
-If the on-disk structure changes, update the logic in `lib/storage` only.
-API routes and UI components should not need to know about the exact layout.
-
-## Helper Functions
-
-All helper functions are asynchronous and return Promises.
-
-### `listBranches(): Promise<string[]>`
-
-Returns the list of branch directories (`NLxx`) under `NAS_ROOT_PATH`.
-
-- Ignores `@Recently-Snapshot`.
-- Filters for names matching `^NL\d+$` (case-insensitive).
-- Sorts branches numerically by their suffix (`NL1`, `NL2`, …, `NL10`).
-
-Example result:
-
-```json
-["NL01", "NL02", "NL03"]
-```
-
-### `listYears(branch: string): Promise<string[]>`
-
-Reads the year directories for a given branch.
-
-- Path: `${NAS_ROOT_PATH}/${branch}`
-- Filters for directories matching `^\d{4}$`.
-- Returns sorted year strings as `['2023', '2024', ...]`.
-
-### `listMonths(branch: string, year: string): Promise<string[]>`
-
-Reads the month directories for the given `branch` and `year`.
-
-- Path: `${NAS_ROOT_PATH}/${branch}/${year}`
-- Filters for directories matching `^\d{1,2}$`.
-- Normalizes month names to two digits (e.g. `'1' → '01'`).
-- Returns sorted month strings.
-
-Example result:
-
-```json
-["01", "02", "03", "10"]
-```
-
-### `listDays(branch: string, year: string, month: string): Promise<string[]>`
-
-Reads the day directories for the given `branch`, `year`, and `month`.
-
-- Path: `${NAS_ROOT_PATH}/${branch}/${year}/${month}`.
-- Filters for directories matching `^\d{1,2}$`.
-- Normalizes day names to two digits (e.g. `'3' → '03'`).
-- Returns sorted day strings.
-
-Example result:
-
-```json
-["01", "02", "03", "23"]
-```
-
-### `listFiles(branch: string, year: string, month: string, day: string): Promise<{ name: string; relativePath: string }[]>`
-
-Reads all PDF files for the given `branch`, `year`, `month`, and `day`.
-
-- Path: `${NAS_ROOT_PATH}/${branch}/${year}/${month}/${day}`.
-- Filters for files whose names end with `.pdf` (case-insensitive).
-- Sorts filenames alphabetically.
-- Returns an array of objects with:
-
-  - `name`: the raw filename (e.g. `"Stapel-1_Seiten-1_Zeit-1048.pdf"`).
-  - `relativePath`: the relative path from `NAS_ROOT_PATH` (e.g.
-    `"NL01/2024/10/23/Stapel-1_Seiten-1_Zeit-1048.pdf"`).
-
-Example result:
-
-```json
-[
-	{
-		"name": "Stapel-1_Seiten-1_Zeit-1048.pdf",
-		"relativePath": "NL01/2024/10/23/Stapel-1_Seiten-1_Zeit-1048.pdf"
-	},
-	{
-		"name": "Stapel-1_Seiten-2_Zeit-1032.pdf",
-		"relativePath": "NL01/2024/10/23/Stapel-1_Seiten-2_Zeit-1032.pdf"
-	}
-]
-```
-
-## Error Handling
-
-`lib/storage` does **not** swallow errors:
-
-- If a folder does not exist or is not accessible, the underlying
-  `fs.promises.readdir` call will throw (e.g. `ENOENT`, `EACCES`).
-- Callers (API routes, services) are responsible for catching these errors and
-  converting them into appropriate HTTP responses.
-
-This separation keeps responsibilities clear:
-
-- `lib/storage` → _How do we read data from the filesystem?_
-- API layer (`app/api/.../route.js`) → _How do we map errors to HTTP responses?_
-
----
-
-# API Overview
-
-This document describes the HTTP API exposed by the application using Next.js
-**Route Handlers** in the App Router (`app/api/*/route.js`).
-
-> All routes below are served under the `/api` prefix.
-
-> **Note:** Authentication and authorization are not implemented yet. In the
-> final system, branch users should only see their own branch, while admins
-> can access all branches.
-
-## General Conventions
-
-- All endpoints return JSON.
-
-- Successful responses use HTTP status `200`.
-
-- Error responses use `4xx` or `5xx` and have the shape:
-
-  ```json
-  { "error": "Human-readable error message" }
-  ```
-
-- Route Handlers are implemented in `app/api/.../route.js` using the standard
-  Web `Request` / `Response` primitives as described in the Next.js
-  documentation.
-
-- Filesystem access must use `lib/storage` (no direct `fs` calls inside
-  route handlers).
-
----
-
-## Health Check
-
-### `GET /api/health`
-
-**Purpose**
-
-Check whether:
-
-- The database is reachable.
-- The NAS root path (`NAS_ROOT_PATH`) is readable from the app container.
-
-**Response 200 (example)**
-
-```json
-{
-	"db": "OK",
-	"nas": {
-		"path": "/mnt/niederlassungen",
-		"entriesSample": ["@Recently-Snapshot", "NL01", "NL02", "NL03", "NL04"]
-	}
-}
-```
-
-**Error cases**
-
-- If the database is not reachable, the `db` field contains an error message.
-- If the NAS path cannot be read, the `nas` field contains an error string,
-  e.g. `"error: ENOENT: no such file or directory, scandir '/mnt/niederlassungen'"`.
-
-This endpoint is intended for operations/monitoring and quick manual checks.
-
----
-
-## Delivery Notes Hierarchy
-
-The following endpoints reflect the filesystem hierarchy:
-
-> `NAS_ROOT_PATH` → Branch → Year → Month → Day → PDF files
-
-### `GET /api/branches`
-
-List all branch directories based on the names under `NAS_ROOT_PATH`.
-
-**Response 200**
-
-```json
-{
-	"branches": ["NL01", "NL02", "NL03"]
-}
-```
-
-**Errors**
-
-- `500` – Internal error (e.g. filesystem error, missing `NAS_ROOT_PATH`).
-
----
-
-### `GET /api/branches/[branch]/years`
-
-Example: `/api/branches/NL01/years`
-
-Return all year folders for a given branch.
-
-**Response 200**
-
-```json
-{
-	"branch": "NL01",
-	"years": ["2023", "2024"]
-}
-```
-
-**Errors**
-
-- `400` – `branch` parameter is missing (indicates a route/handler bug).
-- `500` – Error while reading year directories.
-
----
-
-### `GET /api/branches/[branch]/[year]/months`
-
-Example: `/api/branches/NL01/2024/months`
-
-Return all month folders for the given branch and year.
-
-**Response 200**
-
-```json
-{
-	"branch": "NL01",
-	"year": "2024",
-	"months": ["01", "02", "03", "10"]
-}
-```
-
-**Notes**
-
-- Months are returned as two‑digit strings (`"01"` … `"12"`) so that UI
-  code does not need to handle formatting.
-
-**Errors**
-
-- `400` – `branch` or `year` parameter is missing.
-- `500` – Filesystem or configuration error.
-
----
-
-### `GET /api/branches/[branch]/[year]/[month]/days`
-
-Example: `/api/NL01/2024/10/days`
-
-Return all day folders for the given branch, year, and month.
-
-**Response 200**
-
-```json
-{
-	"branch": "NL01",
-	"year": "2024",
-	"month": "10",
-	"days": ["01", "02", "03", "23"]
-}
-```
-
-**Notes**
-
-- Days are returned as two‑digit strings (`"01"` … `"31"`).
-
-**Errors**
-
-- `400` – `branch`, `year`, or `month` parameter is missing.
-- `500` – Filesystem or configuration error.
-
----
-
-### `GET /api/files?branch=&year=&month=&day=`
-
-Example:
-
-```text
-/api/files?branch=NL01&year=2024&month=10&day=23
-```
-
-Return the list of PDF files for a specific branch and date.
-
-**Query parameters**
-
-- `branch` – branch identifier (e.g. `NL01`).
-- `year` – four‑digit year (e.g. `2024`).
-- `month` – month (e.g. `10`).
-- `day` – day (e.g. `23`).
-
-**Response 200**
-
-```json
-{
-	"branch": "NL01",
-	"year": "2024",
-	"month": "10",
-	"day": "23",
-	"files": [
-		{
-			"name": "Stapel-1_Seiten-1_Zeit-1048.pdf",
-			"relativePath": "NL01/2024/10/23/Stapel-1_Seiten-1_Zeit-1048.pdf"
-		},
-		{
-			"name": "Stapel-1_Seiten-2_Zeit-1032.pdf",
-			"relativePath": "NL01/2024/10/23/Stapel-1_Seiten-2_Zeit-1032.pdf"
-		}
-	]
-}
-```
-
-**Errors**
-
-- `400` – one or more required query parameters are missing.
-- `500` – filesystem error while reading the day directory or files.
-
----
-
-## Adding New Endpoints
-
-When adding new endpoints:
-
-1. **Define the URL and method first**, e.g.:
-
-   - `GET /api/file?path=...` (download a single PDF)
-   - `GET /api/search?branch=&query=...` (full‑text search via Qsirch)
-
-2. **Create a `route.js` file** in `app/api/...` following Next.js 16 Route
-   Handler conventions. For dynamic routes, use the `(request, ctx)` signature
-   and resolve parameters via `const params = await ctx.params`.
-
-3. **Use `lib/storage` for filesystem access** instead of calling `fs`
-   directly inside route handlers. If needed, add new helpers to
-   `lib/storage`.
-
-4. **Handle errors explicitly** with `try/catch` in the handler and return
-   `4xx/5xx` responses with clear `error` messages.
-
-5. **Update this document** to describe the new endpoint (URL, purpose,
-   parameters, sample responses, error cases).
-
----
-
-## Future Extensions
-
-- **Authentication & Authorization**
-
-  - Enforce branch‑level access control (branch user vs. admin).
-  - Likely implemented using JWT stored in cookies and a shared helper
-    (e.g. `lib/auth`) plus a `middleware.js` or per‑route checks.
-
-- **Search Endpoints (Qsirch)**
-
-  - Integrate with QNAP Qsirch via its HTTP API.
-  - Provide endpoints like `GET /api/search?branch=&query=&from=&to=`.
-
-- **File Download / Preview**
-
-  - Add endpoints for streaming PDF content from the NAS to the browser
-    with appropriate `Content-Type` and `Content-Disposition` headers.

+ 242 - 0
Docs/api.md

@@ -0,0 +1,242 @@
+<!-- --------------------------------------------------------------------------- -->
+
+<!-- Ordner: Docs -->
+
+<!-- Datei: api.md -->
+
+<!-- Relativer Pfad: Docs/api.md -->
+
+<!-- --------------------------------------------------------------------------- -->
+
+# API Overview
+
+This document describes the HTTP API exposed by the application using Next.js **Route Handlers** in the App Router (`app/api/*/route.js`).
+
+All routes below are served under the `/api` prefix.
+
+---
+
+## 1. Configuration Dependencies
+
+The API expects a valid server configuration.
+
+Required environment variables:
+
+- `MONGODB_URI` — database connection string (used by `lib/db.js`).
+- `SESSION_SECRET` — JWT signing secret for session cookies.
+- `NAS_ROOT_PATH` — NAS mount root for storage operations.
+
+Optional environment variables:
+
+- `SESSION_COOKIE_SECURE` — override for the cookie `Secure` flag (`true`/`false`).
+
+The environment can be validated via:
+
+- `lib/config/validateEnv.js`
+- `scripts/validate-env.mjs`
+
+In Docker/production-like runs, execute `node scripts/validate-env.mjs` before starting the server to fail fast.
+
+---
+
+## 2. Authentication & Authorization
+
+### 2.1 Sessions
+
+Authentication uses a signed JWT stored in an HTTP-only cookie (`auth_session`).
+
+To access protected endpoints:
+
+1. `POST /api/auth/login` to obtain the cookie.
+2. Send subsequent requests with that cookie.
+
+Notes:
+
+- In production-like setups, cookies should be `Secure` and the app should run behind HTTPS.
+- For local HTTP testing (`http://localhost:3000`), set `SESSION_COOKIE_SECURE=false` in your local docker env file.
+
+### 2.2 RBAC (Branch-Level)
+
+RBAC is enforced on filesystem-related endpoints.
+
+- **401 Unauthorized**: no valid session
+- **403 Forbidden**: session exists but branch access is not allowed
+
+---
+
+## 3. General Conventions
+
+- All endpoints return JSON.
+
+- Error responses use:
+
+  ```json
+  { "error": "Human-readable error message" }
+  ```
+
+- Route handlers use Web `Request` / `Response` primitives.
+
+- For dynamic routes, Next.js 16 resolves parameters asynchronously via `ctx.params`.
+
+---
+
+## 4. Endpoints
+
+### 4.1 `GET /api/health`
+
+**Purpose**
+
+Health check endpoint:
+
+- Verifies database connectivity (`db.command({ ping: 1 })`).
+- Verifies readability of `NAS_ROOT_PATH`.
+
+**Authentication**: not required.
+
+**Response 200 (example)**
+
+```json
+{
+	"db": "ok",
+	"nas": {
+		"path": "/mnt/niederlassungen",
+		"entriesSample": ["@Recently-Snapshot", "NL01", "NL02"]
+	}
+}
+```
+
+---
+
+### 4.2 `POST /api/auth/login`
+
+**Purpose**
+
+Authenticate a user and set the session cookie.
+
+**Authentication**: not required.
+
+**Request body (JSON)**
+
+```json
+{ "username": "example.user", "password": "plain-text-password" }
+```
+
+**Responses**
+
+- `200 { "ok": true }`
+- `400 { "error": "Invalid request body" }`
+- `400 { "error": "Missing username or password" }`
+- `401 { "error": "Invalid credentials" }`
+- `500 { "error": "Internal server error" }`
+
+---
+
+### 4.3 `GET /api/auth/logout`
+
+**Purpose**
+
+Destroy the current session by clearing the cookie.
+
+**Authentication**: recommended (but endpoint is idempotent).
+
+**Response**
+
+- `200 { "ok": true }`
+
+---
+
+### 4.4 `GET /api/branches`
+
+Returns the list of branches (e.g. `["NL01", "NL02"]`).
+
+**Authentication**: required.
+
+**RBAC behavior**
+
+- `branch` role → only own branch
+- `admin`/`dev` → all branches
+
+**Response 200**
+
+```json
+{ "branches": ["NL01", "NL02"] }
+```
+
+---
+
+### 4.5 `GET /api/branches/[branch]/years`
+
+Example: `/api/branches/NL01/years`
+
+**Authentication**: required.
+
+**Response 200**
+
+```json
+{ "branch": "NL01", "years": ["2023", "2024"] }
+```
+
+---
+
+### 4.6 `GET /api/branches/[branch]/[year]/months`
+
+Example: `/api/branches/NL01/2024/months`
+
+**Authentication**: required.
+
+**Response 200**
+
+```json
+{ "branch": "NL01", "year": "2024", "months": ["01", "02", "10"] }
+```
+
+---
+
+### 4.7 `GET /api/branches/[branch]/[year]/[month]/days`
+
+Example: `/api/branches/NL01/2024/10/days`
+
+**Authentication**: required.
+
+**Response 200**
+
+```json
+{ "branch": "NL01", "year": "2024", "month": "10", "days": ["01", "23"] }
+```
+
+---
+
+### 4.8 `GET /api/files?branch=&year=&month=&day=`
+
+Example:
+
+```text
+/api/files?branch=NL01&year=2024&month=10&day=23
+```
+
+**Authentication**: required.
+
+**Response 200**
+
+```json
+{
+	"branch": "NL01",
+	"year": "2024",
+	"month": "10",
+	"day": "23",
+	"files": [{ "name": "test.pdf", "relativePath": "NL01/2024/10/23/test.pdf" }]
+}
+```
+
+---
+
+## 5. Adding New Endpoints
+
+When adding new endpoints:
+
+1. Define URL + method.
+2. Implement a `route.js` under `app/api/...`.
+3. Use `lib/storage` for filesystem access.
+4. Enforce RBAC (`getSession()` + `canAccessBranch()` as needed).
+5. Add route tests (Vitest).
+6. Update this document.

+ 84 - 547
Docs/auth.md

@@ -1,10 +1,10 @@
 <!-- --------------------------------------------------------------------------- -->
 
-<!-- Folder: Docs -->
+<!-- Ordner: Docs -->
 
-<!-- File: auth.md -->
+<!-- Datei: auth.md -->
 
-<!-- Relative path: Docs/auth.md -->
+<!-- Relativer Pfad: Docs/auth.md -->
 
 <!-- --------------------------------------------------------------------------- -->
 
@@ -14,11 +14,10 @@ This document describes the authentication and authorization model for the inter
 
 The system uses:
 
-- MongoDB to store users.
+- MongoDB to store users (via Mongoose models).
 - Cookie-based sessions with a signed JWT payload.
 - Role-aware access control (`branch`, `admin`, `dev`).
-- Branch-level RBAC enforcement for filesystem APIs.
-- Extensible password management and recovery flows.
+- Branch-level RBAC enforcement for filesystem-related APIs.
 
 > NOTE: This document is a living document. As we extend the auth system (sessions, routes, policies, password flows), we will update this file.
 
@@ -33,183 +32,101 @@ The main goals of the authentication and authorization system are:
 - Admin and dev users can access data across branches.
 - Passwords are never stored in plaintext.
 - Sessions are stored as signed JWTs in HTTP-only cookies.
-- The system is ready for password change and password recovery functionality.
 
 This document covers:
 
-- User model and roles.
 - Environment variables related to auth.
-- RBAC rules and protected filesystem endpoints.
+- Roles and RBAC rules.
 - Session payload and cookie configuration.
 - Login and logout endpoints.
-- Planned endpoints for password management and recovery.
-- Security considerations and implementation guidelines.
 
 ---
 
-## 2. Environment Variables
+## 2. Environment & Configuration
 
-The authentication system depends on the following environment variables:
+### 2.1 Required variables
+
+Auth depends on the following environment variables:
 
 - `SESSION_SECRET` (required)
 
   - Strong, random string used to sign and verify JWT session tokens.
-  - Must be kept secret and should differ between environments (dev, staging, prod).
-
-Example for `.env.local.example`:
-
-```env
-# Session / JWT
-SESSION_SECRET=change-me-to-a-long-random-string
-```
-
-If `SESSION_SECRET` is not set, session utilities will throw an error.
-
----
-
-## 3. User Model
-
-Users are stored in MongoDB using the `User` collection.
-
-### 3.1 Fields
-
-- **username** (`String`, required, unique, lowercased)
-
-  - Human-chosen login name.
-  - Stored in lowercase to enable case-insensitive login.
-  - Trimmed, minimum length 3 characters.
-  - Unique index to enforce one user per username.
-
-- **email** (`String`, required, unique, lowercased)
-
-  - Contact address used for password recovery and notifications.
-  - For branch accounts, this is typically the branch email address.
-  - For individual accounts, this can be the personal work email.
-  - Stored in lowercase.
-  - Unique per user.
-
-- **passwordHash** (`String`, required)
-
-  - Hashed password (e.g. using bcrypt).
-  - Plaintext passwords are never stored.
-  - Always excluded from JSON serialization.
-
-- **role** (`String`, required, enum: `"branch" | "admin" | "dev"`)
-
-  - Controls the type of access a user has.
-  - See **Roles** section below.
-
-- **branchId** (`String | null`)
-
-  - Identifies the branch (e.g. `"NL01"`) that the user belongs to.
-  - Required for `role = "branch"`.
-  - Must be `null` or unused for non-branch users (`admin`, `dev`).
-
-- **mustChangePassword** (`Boolean`, default: `false`)
-
-  - When `true`, the user should be forced to set a new password on the next login.
-  - Useful for first-time login or admin-enforced password resets.
-
-- **passwordResetToken** (`String | null`)
-
-  - Token used for password reset flows.
-  - Generated and validated by the backend.
-  - Not exposed via public APIs.
-  - May be `null` if there is no active reset request.
-
-- **passwordResetExpiresAt** (`Date | null`)
+  - Minimum length: **32 characters**.
+  - Must be kept secret.
+  - Should differ between environments (dev/staging/prod).
 
-  - Expiry timestamp for the `passwordResetToken`.
-  - Used to ensure that reset links are only valid for a limited time.
-  - May be `null` if there is no active reset request.
+Auth endpoints also require DB connectivity:
 
-- **createdAt** (`Date`, auto-generated)
+- `MONGODB_URI` (required)
 
-  - Timestamp when the user record was created.
+### 2.2 Optional variables
 
-- **updatedAt** (`Date`, auto-generated)
+- `SESSION_COOKIE_SECURE` (optional)
 
-  - Timestamp when the user record was last updated.
+  - Overrides the `Secure` cookie flag.
+  - Allowed values: `true` or `false`.
 
-### 3.2 Validation Rules & Invariants
+Default behavior:
 
-- `username` must be unique and is stored in lowercase.
-- `email` must be unique and is stored in lowercase.
-- `passwordHash` must be present for all users.
-- When `role = "branch"`, `branchId` must be a non-empty string.
-- For `role = "admin"` and `role = "dev"`, `branchId` is optional and usually `null`.
-- `passwordResetToken` and `passwordResetExpiresAt` should be consistent:
+- `Secure` cookie is enabled when `NODE_ENV === "production"`.
 
-  - If one is set, the other should also be set.
-  - Once a reset is completed or expired, both should be cleared.
+Local HTTP testing (e.g. `http://localhost:3000` with Docker + `next start`):
 
-### 3.3 Serialization Rules
+- Set `SESSION_COOKIE_SECURE=false` in your local `.env.docker`.
 
-When converting `User` documents to JSON or plain objects (e.g. in API responses), the following fields must be hidden:
+Staging/Production:
 
-- `passwordHash`
-- `passwordResetToken`
+- Keep `SESSION_COOKIE_SECURE` unset (or `true`) and run the app behind HTTPS.
 
-This ensures that sensitive information is not exposed via API responses or logs.
+### 2.3 Fail-fast environment validation
 
-### 3.4 Role Assignment & User Provisioning
+The repo provides centralized env validation:
 
-- Users are **created by an admin** (no public self-registration).
+- `lib/config/validateEnv.js` validates required env vars and basic sanity checks.
+- `scripts/validate-env.mjs` runs validation against `process.env`.
 
-- When a user is created:
+In Docker, run validation before starting the server:
 
-  - `role` is set by the admin.
-  - `branchId` is set by the admin and cannot be chosen or changed by the user.
-
-- For branch accounts, we typically create one or more users per branch with:
-
-  - `role = "branch"`
-  - `branchId` set to the respective branch identifier (e.g. `"NL01"`).
-
-- The user is provided with an initial password and is encouraged (or forced via `mustChangePassword`) to change it after the first login.
+```sh
+node scripts/validate-env.mjs && npm run start
+```
 
 ---
 
-## 4. Roles
+## 3. Roles
 
-### 4.1 `branch`
+### 3.1 `branch`
 
 - Represents a user who belongs to a specific branch/location.
 - Must have a valid `branchId` (e.g. `"NL01"`).
-- Intended access pattern (high-level):
+- Intended access pattern:
 
   - Can only access delivery notes for their own branch.
   - Cannot access other branches.
-  - No global configuration or system-wide administration.
 
-### 4.2 `admin`
+### 3.2 `admin`
 
-- System administrator.
+- Administrator account.
 - Typically not bound to any single branch (`branchId = null`).
-- Intended access pattern (high-level):
+- Intended access pattern:
 
   - Can access delivery notes across all branches.
-  - Can perform user administration (create/update users).
-  - Can perform configuration-level changes.
 
-### 4.3 `dev`
+### 3.3 `dev`
 
 - Development/engineering account.
-- Used for debugging, maintenance, and operational tooling.
 - Typically not bound to any single branch (`branchId = null`).
-- Intended access pattern (high-level):
+- Intended access pattern:
 
-  - Full or near-full access to the system.
-  - Can be used in development/staging environments.
-  - Production use should be limited and auditable.
+  - Full or near-full access.
 
 ---
 
-## 5. Authorization: Branch-Level RBAC
+## 4. Authorization: Branch-Level RBAC
 
-The backend enforces **Role-Based Access Control (RBAC)** on branch-related filesystem APIs.
+RBAC is enforced on branch-related filesystem APIs.
 
-### 5.1 Response Semantics
+### 4.1 Response semantics
 
 - **401 Unauthorized**: no valid session (`getSession()` returns `null`).
 
@@ -223,64 +140,30 @@ The backend enforces **Role-Based Access Control (RBAC)** on branch-related file
   { "error": "Forbidden" }
   ```
 
-> Note: Some legacy `400`/`500` messages are still returned in German (e.g. missing params, filesystem errors). We may normalize these later.
+### 4.2 Permission helpers
 
-### 5.2 Permission Helpers
-
-RBAC rules are implemented in `lib/auth/permissions.js`:
+RBAC rules live in `lib/auth/permissions.js`:
 
 - `canAccessBranch(session, branchId)`
-
-  - No session → `false`
-  - `role = "branch"` → `true` only if `session.branchId === branchId`
-  - `role = "admin" | "dev"` → `true` for any branch
-
 - `filterBranchesForSession(session, branchIds)`
 
-  - `role = "branch"` → returns only the user’s own branch (if present)
-  - `role = "admin" | "dev"` → returns all
+### 4.3 Protected endpoints
 
-### 5.3 Protected Filesystem APIs
-
-The following endpoints are protected and must be called only with a valid session:
+These endpoints require a valid session:
 
 - `GET /api/branches`
-
-  - Requires session (401 otherwise)
-  - `branch` role: returns only `[session.branchId]`
-  - `admin`/`dev`: returns all branches
-
 - `GET /api/branches/[branch]/years`
-
 - `GET /api/branches/[branch]/[year]/months`
-
 - `GET /api/branches/[branch]/[year]/[month]/days`
-
-  - Requires session (401 otherwise)
-  - Requires branch access (403 if not allowed)
-
 - `GET /api/files?branch=&year=&month=&day=`
 
-  - Requires session (401 otherwise)
-  - Requires branch access (403 if not allowed)
-
-Implementation pattern (high-level):
-
-1. `const session = await getSession()`
-2. If `!session` → return 401
-3. Extract requested branch (`params.branch` or `query.branch`)
-4. If `!canAccessBranch(session, requestedBranch)` → return 403
-5. Proceed with storage access and return data
-
 ---
 
-## 6. Sessions & Cookies
+## 5. Sessions & Cookies
 
 Sessions are implemented as signed JWTs stored in HTTP-only cookies.
 
-### 6.1 Session Payload Format
-
-A session payload has the following structure:
+### 5.1 Session payload
 
 ```json
 {
@@ -292,403 +175,57 @@ A session payload has the following structure:
 }
 ```
 
-- `userId` (string): MongoDB `_id` of the user.
-- `role` (string): One of `"branch"`, `"admin"`, `"dev"`.
-- `branchId` (string or `null`): Branch identifier for branch users, or `null` for admin/dev users.
-- `iat` (number): Issued-at timestamp (UNIX time).
-- `exp` (number): Expiration timestamp (UNIX time).
-
-The `iat` and `exp` fields are managed by the JWT library.
-
-### 6.2 JWT Signing
-
-- JWTs are signed using a symmetric secret (`SESSION_SECRET`).
-- Algorithm: `HS256` (HMAC using SHA-256).
-- Secret is defined via environment variable `SESSION_SECRET`.
-- Token lifetime:
-
-  - `SESSION_MAX_AGE_SECONDS = 60 * 60 * 8` (8 hours).
-  - Configured in `lib/auth/session.js`.
-
-### 6.3 Cookie Settings
-
-The session token is stored in an HTTP-only cookie with the following properties:
-
-- **Cookie name**: `auth_session`
-- **Attributes**:
-
-  - `httpOnly: true`
-  - `secure: process.env.NODE_ENV === "production"`
-  - `sameSite: "lax"`
-  - `path: "/"` (cookie is sent for all paths)
-  - `maxAge: 8 hours` (matching `SESSION_MAX_AGE_SECONDS`)
-
-Cookies are written and cleared using Next.js `cookies()` from `next/headers` inside `lib/auth/session.js`:
-
-- `createSession({ userId, role, branchId })`:
-
-  - Creates and signs a JWT.
-  - Sets the `auth_session` cookie.
-
-- `getSession()`:
-
-  - Reads the `auth_session` cookie.
-  - Verifies the JWT and returns `{ userId, role, branchId }` or `null`.
-  - If the token is invalid or expired, clears the cookie and returns `null`.
-
-- `destroySession()`:
-
-  - Clears the `auth_session` cookie by setting an empty value with `maxAge: 0`.
-
----
-
-## 7. Core Auth Endpoints
-
-### 7.1 `POST /api/auth/login`
-
-**Purpose**
-Authenticate a user using `username` and `password`, create a session, and set the session cookie.
-
-**Method & URL**
-
-- `POST /api/auth/login`
-
-**Request Body (JSON)**
-
-```json
-{
-	"username": "example.user",
-	"password": "plain-text-password"
-}
-```
-
-- `username` (string): Login name (case-insensitive).
-- `password` (string): Plaintext password entered by the user.
-
-**Behavior**
-
-1. Normalize `username`:
-
-   - Trim whitespace and convert to lowercase.
-
-2. Parse and validate request body:
-
-   - If body is missing or invalid JSON → `400 { "error": "Invalid request body" }`.
-   - If `username` or `password` is missing or empty → `400 { "error": "Missing username or password" }`.
-
-3. Connect to MongoDB.
-
-4. Look up the user in MongoDB by normalized `username`.
-
-   - If no user is found → `401 { "error": "Invalid credentials" }`.
-
-5. Verify the password using bcrypt:
-
-   - Compare provided `password` with `user.passwordHash`.
-   - If password does not match → `401 { "error": "Invalid credentials" }`.
-
-6. On success:
-
-   - Create a session payload `{ userId, role, branchId }`.
-
-   - Call `createSession({ userId, role, branchId })`:
-
-     - Signs a JWT with the session payload.
-     - Sets the `auth_session` HTTP-only cookie.
-
-   - Return `200 { "ok": true }`.
-
-**Possible Responses**
-
-- `200 OK`:
-
-  ```json
-  {
-  	"ok": true
-  }
-  ```
-
-- `400 Bad Request`:
-
-  ```json
-  {
-  	"error": "Invalid request body"
-  }
-  ```
-
-  or
-
-  ```json
-  {
-  	"error": "Missing username or password"
-  }
-  ```
-
-- `401 Unauthorized`:
-
-  ```json
-  {
-  	"error": "Invalid credentials"
-  }
-  ```
-
-- `500 Internal Server Error`:
-
-  ```json
-  {
-  	"error": "Internal server error"
-  }
-  ```
-
-### 7.2 `GET /api/auth/logout`
-
-**Purpose**
-Destroy the current session by clearing the session cookie.
-
-**Method & URL**
-
-- `GET /api/auth/logout`
-
-**Request**
-
-- No request body.
-- Uses the current session cookie (if present).
-
-**Behavior**
-
-1. Call `destroySession()`:
-
-   - Clears the `auth_session` cookie by setting an empty value with `maxAge: 0`.
-
-2. Return `200 { "ok": true }`.
-
-Logout is **idempotent**:
-
-- If the cookie does not exist, the endpoint still returns `{ "ok": true }`.
-
-**Responses**
-
-- `200 OK`:
-
-  ```json
-  {
-  	"ok": true
-  }
-  ```
-
-- `500 Internal Server Error` (if `destroySession` throws):
-
-  ```json
-  {
-  	"error": "Internal server error"
-  }
-  ```
-
----
-
-## 8. Password Management & Recovery (Planned)
-
-The database model is already prepared for password management and password recovery flows, but the respective endpoints may be implemented in a separate epic.
-
-### 8.1 Change Password
-
-**Endpoint**
-`POST /api/auth/change-password` (planned)
-
-**Purpose**
-Allow logged-in users to change their password by providing the current password and a new password.
-
-**Method & URL**
-
-- `POST /api/auth/change-password`
-
-**Authentication**
-
-- Requires a valid session (user must be logged in).
-
-**Request Body (JSON)**
-
-```json
-{
-	"currentPassword": "old-password",
-	"newPassword": "new-password"
-}
-```
-
-**Planned Behavior**
-
-1. Extract `userId` from the current session (`getSession()`).
-2. Load user from MongoDB.
-3. Verify `currentPassword` against `passwordHash` using bcrypt.
-4. If verification fails → return a generic error (e.g. `400` or `401` with `{ "error": "Invalid password" }`).
-5. Hash `newPassword` with bcrypt.
-6. Update `passwordHash` in the database.
-7. Optionally set `mustChangePassword = false`.
-8. Optionally update a `passwordChangedAt` field if introduced later.
-9. Return `{ "ok": true }`.
-
-### 8.2 Request Password Reset
-
-**Endpoint**
-`POST /api/auth/request-password-reset` (planned)
-
-**Purpose**
-Start the "forgot password" flow by sending a reset link to the user's email address.
-
-**Method & URL**
-
-- `POST /api/auth/request-password-reset`
-
-**Request Body (JSON)**
-
-```json
-{
-	"usernameOrEmail": "nl01@company.com"
-}
-```
-
-**Planned Behavior**
-
-1. Normalize the identifier (trim + lowercase).
-
-2. Try to find a user by `email` (and optionally by `username`).
-
-3. If no user is found:
-
-   - Do **not** reveal this to the caller.
-   - Return a generic success response (e.g. `{ "ok": true }`) to avoid user enumeration.
-
-4. If a user is found:
-
-   - Generate a secure random token (or a signed token).
-   - Store it in `passwordResetToken`.
-   - Set `passwordResetExpiresAt` to a timestamp in the near future (e.g. now + 30 minutes).
-   - Send an email to `user.email` containing a link like:
-
-     ```
-     https://<app-domain>/reset-password?token=<passwordResetToken>
-     ```
-
-5. Always return `{ "ok": true }`.
-
-### 8.3 Reset Password
-
-**Endpoint**
-`POST /api/auth/reset-password` (planned)
-
-**Purpose**
-Complete the password reset process using a valid reset token.
-
-**Method & URL**
-
-- `POST /api/auth/reset-password`
-
-**Request Body (JSON)**
-
-```json
-{
-	"token": "reset-token-from-email",
-	"newPassword": "new-password"
-}
-```
-
-**Planned Behavior**
-
-1. Find user by `passwordResetToken`.
-
-2. If no user is found → return a generic error (e.g. `{ "error": "Invalid or expired token" }`).
-
-3. Check that `passwordResetExpiresAt` is in the future.
-
-4. If the token has expired:
-
-   - Return a generic error.
-   - Clear `passwordResetToken` and `passwordResetExpiresAt`.
-
-5. If the token is valid:
-
-   - Hash `newPassword` with bcrypt.
-   - Update `passwordHash` in the database.
-   - Clear `passwordResetToken` and `passwordResetExpiresAt`.
-   - Optionally set `mustChangePassword = false`.
-
-6. Optionally invalidate other active sessions if a "global logout on password change" is implemented.
+### 5.2 JWT signing
 
-7. Return `{ "ok": true }`.
+- Algorithm: `HS256`.
+- Secret: `SESSION_SECRET`.
+- Token lifetime: `SESSION_MAX_AGE_SECONDS = 8 hours`.
 
-### 8.4 Email Sending
+### 5.3 Cookie settings
 
-Password reset emails will be sent using a mailer library (e.g. `nodemailer`), configured for the environment.
+Cookie name: `auth_session`
 
-Key points:
+Attributes:
 
-- Emails are sent to `user.email`.
+- `httpOnly: true`
+- `secure: resolved via NODE_ENV + optional SESSION_COOKIE_SECURE override`
+- `sameSite: "lax"`
+- `path: "/"`
+- `maxAge: 8 hours`
 
-- The content includes:
+Implementation lives in `lib/auth/session.js`:
 
-  - A short explanation of the password reset process.
-  - A one-time link containing the `passwordResetToken`.
-  - Information about the expiration time.
-
-- No confidential data (like passwords) is ever sent via email.
+- `createSession({ userId, role, branchId })`
+- `getSession()`
+- `destroySession()`
 
 ---
 
-## 9. Security Considerations
-
-1. **Never trust client-provided branch information.**
-
-   - The effective branch authorization is enforced using the **session payload** (`session.branchId`) and RBAC rules.
-   - Even if routes use `branch` parameters for URL structure, the backend enforces branch access based on the session.
+## 6. Core Auth Endpoints
 
-2. **Password handling.**
+### 6.1 `POST /api/auth/login`
 
-   - Always hash passwords using a strong algorithm (e.g. bcrypt with a reasonable cost factor).
-   - Never log plaintext passwords.
-   - Never expose `passwordHash` or `passwordResetToken` in API responses.
+Authenticate a user and set the session cookie.
 
-3. **Session security.**
+Responses:
 
-   - Use `httpOnly` cookies to protect the session token from JavaScript access.
-   - Use `secure` cookies in production.
-   - Use `sameSite: "lax"` or stricter unless cross-site needs are explicitly identified.
-   - Use a strong `SESSION_SECRET`, rotated when necessary.
+- `200 { "ok": true }`
+- `400 { "error": "Invalid request body" }`
+- `400 { "error": "Missing username or password" }`
+- `401 { "error": "Invalid credentials" }`
+- `500 { "error": "Internal server error" }`
 
-4. **Brute force and enumeration.**
+### 6.2 `GET /api/auth/logout`
 
-   - Login and password reset endpoints should:
+Clears the session cookie.
 
-     - Respond with generic error messages (e.g. “Invalid credentials”).
-     - Not leak information on whether a user exists.
-     - Optionally implement rate limiting or throttling.
-
-5. **Auditing and logging.**
-
-   - Sensitive operations (login failures, password changes, password reset requests) should be logged with appropriate details, without exposing secrets.
-   - Logs must not contain plaintext passwords or reset tokens.
+- Returns `200 { "ok": true }` on success.
+- Logout is idempotent.
 
 ---
 
-## 10. Future Work & Integration
-
-- **(Optional)** Add a `middleware.js` for frontend route protection (redirect unauthenticated users to login for certain pages).
-
-- Implement password management endpoints:
-
-  - `POST /api/auth/change-password`
-  - `POST /api/auth/request-password-reset`
-  - `POST /api/auth/reset-password`
-
-- Integrate an email provider using `nodemailer` or similar for password reset.
-
-- Build frontend UI for:
-
-  - Login
-  - Logout
-  - Change password
-  - “Forgot password” / reset password flows.
-
-- Optional improvements:
+## 7. Security Notes
 
-  - Normalize API error messages (language and structure) across all endpoints.
-  - Add auditing for admin actions and branch access.
+- Use HTTPS for real users (staging/prod).
+- Keep `SESSION_SECRET` secret and rotate when needed.
+- Local HTTP testing is supported via `SESSION_COOKIE_SECURE=false`.

+ 250 - 0
Docs/runbook.md

@@ -0,0 +1,250 @@
+<!-- --------------------------------------------------------------------------- -->
+
+<!-- Ordner: Docs -->
+
+<!-- Datei: runbook.md -->
+
+<!-- Relativer Pfad: Docs/runbook.md -->
+
+<!-- --------------------------------------------------------------------------- -->
+
+# Runbook: Local Development vs Server Deployment
+
+This runbook describes how to run the project locally (developer machine) and on the internal server.
+
+The goal is a **clean separation** between:
+
+- Local development (uses local NAS fixtures)
+- Server deployment (uses the real NAS mount at `/mnt/niederlassungen`)
+
+---
+
+## 1. Repository Files and Separation
+
+### 1.1 Compose files
+
+- `docker-compose.yml`
+
+  - Base compose file (server-like)
+  - Mounts the real NAS path: `/mnt/niederlassungen:/mnt/niederlassungen:ro`
+
+- `docker-compose.local.yml`
+
+  - Local override
+  - Mounts local fixtures: `./.local_nas:/mnt/niederlassungen:ro`
+
+### 1.2 Env files
+
+- Committed templates:
+
+  - `.env.docker.example`
+  - `.env.local.example`
+
+- Local runtime env (not committed):
+
+  - `.env.docker`
+
+- Server runtime env (not committed):
+
+  - `.env.server`
+
+The compose file uses:
+
+- `ENV_FILE` to select which env file is loaded into the `app` container.
+
+---
+
+## 2. Local Development (Windows/macOS/Linux)
+
+### 2.1 Prerequisites
+
+- Docker Desktop installed and running.
+- Project checked out.
+
+### 2.2 Create local env file
+
+Copy the template:
+
+```bash
+cp .env.docker.example .env.docker
+```
+
+Then edit `.env.docker`:
+
+- Set a strong `SESSION_SECRET` (>= 32 characters)
+- For local HTTP testing add:
+
+```env
+SESSION_COOKIE_SECURE=false
+```
+
+### 2.3 Create local NAS fixtures
+
+Create a minimal NAS tree:
+
+```bash
+mkdir -p ./.local_nas/NL01/2024/10/23
+printf "dummy" > ./.local_nas/NL01/2024/10/23/test.pdf
+```
+
+### 2.4 Start the stack (local)
+
+Run with the local override:
+
+```bash
+docker compose -f docker-compose.yml -f docker-compose.local.yml up --build
+```
+
+### 2.5 Verify health
+
+```bash
+curl -s http://localhost:3000/api/health
+```
+
+Expected (example):
+
+- `db` is `ok`
+- `nas.entriesSample` contains `NL01`
+
+### 2.6 Create a test user in Mongo (manual)
+
+Generate a bcrypt hash (local):
+
+```bash
+node -e "const bcrypt=require('bcryptjs'); console.log(bcrypt.hashSync('secret-password', 10))"
+```
+
+Open Mongo shell inside the DB container:
+
+```bash
+docker exec -it rhl-lieferscheine-db mongosh -u root -p supersecret --authenticationDatabase admin
+```
+
+In `mongosh`:
+
+```js
+use rhl-lieferscheine
+
+show collections
+
+db.users.insertOne({
+  username: "branchuser",
+  email: "nl01@example.com",
+  passwordHash: "<PASTE_HASH_HERE>",
+  role: "branch",
+  branchId: "NL01",
+  createdAt: new Date(),
+  updatedAt: new Date()
+})
+```
+
+### 2.7 Login + call protected endpoints
+
+Login (stores cookie in `cookies.txt`):
+
+```bash
+curl -i -c cookies.txt \
+  -H "Content-Type: application/json" \
+  -d '{"username":"BranchUser","password":"secret-password"}' \
+  http://localhost:3000/api/auth/login
+```
+
+Call endpoints with cookie:
+
+```bash
+curl -i -b cookies.txt http://localhost:3000/api/branches
+curl -i -b cookies.txt http://localhost:3000/api/branches/NL01/years
+curl -i -b cookies.txt http://localhost:3000/api/branches/NL01/2024/months
+curl -i -b cookies.txt http://localhost:3000/api/branches/NL01/2024/10/days
+curl -i -b cookies.txt "http://localhost:3000/api/files?branch=NL01&year=2024&month=10&day=23"
+```
+
+RBAC negative test (expected 403):
+
+```bash
+curl -i -b cookies.txt http://localhost:3000/api/branches/NL02/years
+```
+
+Logout:
+
+```bash
+curl -i -b cookies.txt -c cookies.txt http://localhost:3000/api/auth/logout
+```
+
+### 2.8 Notes for Git Bash on Windows
+
+Git Bash may rewrite paths like `/mnt/niederlassungen`.
+
+If you need to run `docker exec ... ls /mnt/niederlassungen`, disable MSYS path conversion:
+
+```bash
+MSYS_NO_PATHCONV=1 docker exec -it rhl-lieferscheine-app ls -la /mnt/niederlassungen
+```
+
+---
+
+## 3. Server Deployment (internal server)
+
+### 3.1 Connect via SSH
+
+```bash
+ssh administrator@192.168.0.23
+```
+
+### 3.2 Prerequisites on the server
+
+- Docker and Docker Compose installed.
+- The real NAS share is mounted at:
+
+  - `/mnt/niederlassungen`
+
+- The mount is readable by Docker.
+
+### 3.3 Create server env file
+
+On the server (in the project folder), create `.env.server` based on the template:
+
+```bash
+cp .env.docker.example .env.server
+```
+
+Edit `.env.server`:
+
+- Set a strong `SESSION_SECRET`.
+- Keep `NODE_ENV=production`.
+- Do **not** set `SESSION_COOKIE_SECURE=false`.
+
+### 3.4 Start the stack on the server
+
+Use the base compose file only (no local override):
+
+```bash
+ENV_FILE=.env.server docker compose -f docker-compose.yml up -d --build
+```
+
+### 3.5 Verify
+
+On the server:
+
+```bash
+curl -s http://localhost:3000/api/health
+```
+
+Expected:
+
+- `db` is `ok`
+- `nas.entriesSample` contains real branch folders (`NLxx`)
+
+### 3.6 Logs and troubleshooting
+
+```bash
+docker compose -f docker-compose.yml logs --tail=200 app
+```
+
+---
+
+## 4. HTTPS Note (Future)
+
+For real users, the application should be served over HTTPS (reverse proxy / TLS termination).
+
+Local HTTP testing is supported via `SESSION_COOKIE_SECURE=false`.

+ 112 - 0
Docs/storage.md

@@ -0,0 +1,112 @@
+<!-- --------------------------------------------------------------------------- -->
+
+<!-- Ordner: Docs -->
+
+<!-- Datei: storage.md -->
+
+<!-- Relativer Pfad: Docs/storage.md -->
+
+<!-- --------------------------------------------------------------------------- -->
+
+# Storage Module (`lib/storage`)
+
+The `lib/storage` module is the **single source of truth** for reading delivery note PDFs from the NAS share.
+
+All code that needs to read from the NAS should go through this module instead of using Node.js `fs` directly. This keeps filesystem logic centralized and makes it easier to change conventions later.
+
+---
+
+## 1. High-Level Responsibilities
+
+- Resolve paths under the NAS root (`NAS_ROOT_PATH`).
+- Provide intention-revealing helpers:
+
+  - `listBranches()` → `['NL01', 'NL02', ...]`
+  - `listYears(branch)` → `['2023', '2024', ...]`
+  - `listMonths(branch, year)` → `['01', '02', ...]`
+  - `listDays(branch, year, month)` → `['01', '02', ...]`
+  - `listFiles(branch, year, month, day)` → `[{ name, relativePath }, ...]`
+
+- Enforce **read-only** behavior.
+- Use async filesystem APIs (`fs/promises`).
+
+---
+
+## 2. Environment Configuration
+
+### 2.1 `NAS_ROOT_PATH` (required)
+
+The module depends on:
+
+- `NAS_ROOT_PATH` — absolute Unix path where the NAS share is mounted **inside the app container**.
+
+Default/typical value:
+
+```env
+NAS_ROOT_PATH=/mnt/niederlassungen
+```
+
+Important:
+
+- `lib/storage` reads `process.env.NAS_ROOT_PATH` on demand and does not cache it at module load.
+- If `NAS_ROOT_PATH` is missing, `lib/storage` throws (fail fast).
+
+---
+
+## 3. Docker Mount Strategy (Local vs Server)
+
+The application code always expects the NAS path **inside the container** to be:
+
+- `/mnt/niederlassungen`
+
+Which host folder is mounted there is an environment concern:
+
+- **Server (`docker-compose.yml`)** mounts the real NAS:
+
+  ```yaml
+  volumes:
+    - /mnt/niederlassungen:/mnt/niederlassungen:ro
+  ```
+
+- **Local development (`docker-compose.local.yml`)** mounts a local fixture folder:
+
+  ```yaml
+  volumes:
+    - ./.local_nas:/mnt/niederlassungen:ro
+  ```
+
+This separation keeps code identical across environments while allowing safe local testing.
+
+---
+
+## 4. Directory Layout Assumptions
+
+`lib/storage` assumes the following structure under `NAS_ROOT_PATH`:
+
+```text
+NAS_ROOT_PATH/
+  @Recently-Snapshot/   # ignored
+  NL01/
+    2024/
+      10/
+        23/
+          file1.pdf
+          file2.pdf
+  ...
+```
+
+Rules:
+
+- Branch directories follow `NL<Number>` (e.g. `NL01`).
+- Year directories are 4-digit numeric (`2024`).
+- Month/day directories are numeric and normalized to **two digits**.
+- Only `.pdf` files are returned by `listFiles()`.
+
+---
+
+## 5. Error Handling
+
+`lib/storage` does not swallow errors:
+
+- If a folder does not exist or is not accessible, `fs.promises.readdir` throws.
+- API route handlers catch and convert errors into HTTP responses.

+ 9 - 2
app/api/auth/login/route.js

@@ -1,6 +1,7 @@
+// app/api/auth/login/route.js
 import bcrypt from "bcryptjs";
 import User from "@/models/user";
-import dbConnect from "@/lib/db";
+import { getDb } from "@/lib/db";
 import { createSession } from "@/lib/auth/session";
 
 /**
@@ -39,7 +40,8 @@ export async function POST(request) {
 
 		const normalizedUsername = username.trim().toLowerCase();
 
-		await dbConnect();
+		// Ensure DB (Mongoose) connection is established before using models.
+		await getDb();
 
 		const user = await User.findOne({ username: normalizedUsername }).exec();
 
@@ -47,6 +49,11 @@ export async function POST(request) {
 			return jsonResponse({ error: "Invalid credentials" }, 401);
 		}
 
+		// Defensive: never let missing/invalid passwordHash crash the endpoint.
+		if (typeof user.passwordHash !== "string" || !user.passwordHash) {
+			return jsonResponse({ error: "Invalid credentials" }, 401);
+		}
+
 		const passwordMatches = await bcrypt.compare(password, user.passwordHash);
 
 		if (!passwordMatches) {

+ 32 - 7
app/api/auth/login/route.test.js

@@ -1,9 +1,9 @@
+// app/api/auth/login/route.test.js
 import { describe, it, expect, vi, beforeEach } from "vitest";
 
 // 1) Mocks
-
 vi.mock("@/lib/db", () => ({
-	default: vi.fn(),
+	getDb: vi.fn(),
 }));
 
 vi.mock("@/models/user", () => ({
@@ -24,9 +24,8 @@ vi.mock("bcryptjs", () => {
 	};
 });
 
-// 2) Imports NACH den Mocks
-
-import dbConnect from "@/lib/db";
+// 2) Imports AFTER the mocks
+import { getDb } from "@/lib/db";
 import User from "@/models/user";
 import { createSession } from "@/lib/auth/session";
 import { compare as bcryptCompare } from "bcryptjs";
@@ -43,7 +42,7 @@ function createRequestStub(body) {
 describe("POST /api/auth/login", () => {
 	beforeEach(() => {
 		vi.clearAllMocks();
-		dbConnect.mockResolvedValue(undefined);
+		getDb.mockResolvedValue({}); // we only need it to "connect"
 	});
 
 	it("logs in successfully with correct credentials", async () => {
@@ -72,7 +71,7 @@ describe("POST /api/auth/login", () => {
 		expect(response.status).toBe(200);
 		expect(json).toEqual({ ok: true });
 
-		expect(dbConnect).toHaveBeenCalledTimes(1);
+		expect(getDb).toHaveBeenCalledTimes(1);
 		expect(User.findOne).toHaveBeenCalledWith({ username: "branchuser" });
 
 		expect(bcryptCompare).toHaveBeenCalledWith(
@@ -106,6 +105,32 @@ describe("POST /api/auth/login", () => {
 		expect(createSession).not.toHaveBeenCalled();
 	});
 
+	it("returns 401 when passwordHash is missing (defensive)", async () => {
+		User.findOne.mockReturnValue({
+			exec: vi.fn().mockResolvedValue({
+				_id: "507f1f77bcf86cd799439099",
+				username: "branchuser",
+				// passwordHash missing on purpose
+				role: "branch",
+				branchId: "NL01",
+			}),
+		});
+
+		const request = createRequestStub({
+			username: "branchuser",
+			password: "secret-password",
+		});
+
+		const response = await POST(request);
+		const json = await response.json();
+
+		expect(response.status).toBe(401);
+		expect(json).toEqual({ error: "Invalid credentials" });
+
+		expect(bcryptCompare).not.toHaveBeenCalled();
+		expect(createSession).not.toHaveBeenCalled();
+	});
+
 	it("returns 401 when password is incorrect", async () => {
 		const user = {
 			_id: "507f1f77bcf86cd799439012",

+ 2 - 1
app/api/auth/logout/route.js

@@ -1,3 +1,4 @@
+// app/api/auth/logout/route.js
 import { destroySession } from "@/lib/auth/session";
 
 /**
@@ -8,7 +9,7 @@ import { destroySession } from "@/lib/auth/session";
  */
 export async function GET() {
 	try {
-		destroySession();
+		await destroySession();
 
 		return new Response(JSON.stringify({ ok: true }), {
 			status: 200,

+ 0 - 2
app/page.js

@@ -1,10 +1,8 @@
 import { ModeToggle } from "@/components/ui/mode-toggle";
-import Image from "next/image";
 
 export default function Home() {
 	return (
 		<>
-			{" "}
 			<main className="flex min-h-screen flex-col items-center justify-center gap-4">
 				<h1 className="text-3xl font-bold">RHL Lieferscheine</h1>
 				<p className="text-muted-foreground">

+ 5 - 0
docker-compose.local.yml

@@ -0,0 +1,5 @@
+services:
+  app:
+    volumes:
+      # Local mount: fixture NAS folder
+      - ./.local_nas:/mnt/niederlassungen:ro

+ 19 - 2
docker-compose.yml

@@ -10,19 +10,36 @@ services:
       - db_data:/data/db
     ports:
       - "27017:27017"
+    healthcheck:
+      test:
+        [
+          "CMD",
+          "mongosh",
+          "--quiet",
+          "mongodb://root:supersecret@localhost:27017/admin?authSource=admin",
+          "--eval",
+          "db.adminCommand('ping').ok",
+        ]
+      interval: 10s
+      timeout: 5s
+      retries: 10
+      start_period: 20s
 
   app:
     build: .
     container_name: rhl-lieferscheine-app
     restart: unless-stopped
     env_file:
-      - .env.docker
+      - ${ENV_FILE:-.env.docker}
     depends_on:
-      - db
+      db:
+        condition: service_healthy
     ports:
       - "3000:3000"
     volumes:
+      # Server mount: real NAS
       - /mnt/niederlassungen:/mnt/niederlassungen:ro
+    command: sh -c "node scripts/validate-env.mjs && npm run start"
 
 volumes:
   db_data:

+ 29 - 19
lib/auth/session.js

@@ -1,3 +1,4 @@
+// lib/auth/session.js
 import { cookies } from "next/headers";
 import { SignJWT, jwtVerify } from "jose";
 
@@ -15,13 +16,25 @@ function getSessionSecretKey() {
 }
 
 /**
- * Create a signed session JWT and store it in a HTTP-only cookie.
+ * Resolve whether the session cookie should be marked as "Secure".
+ *
+ * Default:
+ * - Secure in production (`NODE_ENV=production`)
  *
- * @param {Object} params
- * @param {string} params.userId - MongoDB user id as string.
- * @param {string} params.role - User role ("branch" | "admin" | "dev").
- * @param {string|null} params.branchId - Branch id or null.
- * @returns {Promise<string>} The signed JWT.
+ * Override (useful for local HTTP testing):
+ * - SESSION_COOKIE_SECURE=false
+ * - SESSION_COOKIE_SECURE=true
+ */
+function resolveCookieSecureFlag() {
+	const raw = (process.env.SESSION_COOKIE_SECURE || "").trim().toLowerCase();
+	if (raw === "true") return true;
+	if (raw === "false") return false;
+
+	return process.env.NODE_ENV === "production";
+}
+
+/**
+ * Create a signed session JWT and store it in a HTTP-only cookie.
  */
 export async function createSession({ userId, role, branchId }) {
 	if (!userId || !role) {
@@ -40,11 +53,11 @@ export async function createSession({ userId, role, branchId }) {
 		.setExpirationTime(`${SESSION_MAX_AGE_SECONDS}s`)
 		.sign(getSessionSecretKey());
 
-	const cookieStore = cookies();
+	const cookieStore = await cookies();
 
 	cookieStore.set(SESSION_COOKIE_NAME, jwt, {
 		httpOnly: true,
-		secure: process.env.NODE_ENV === "production",
+		secure: resolveCookieSecureFlag(),
 		sameSite: "lax",
 		path: "/",
 		maxAge: SESSION_MAX_AGE_SECONDS,
@@ -55,12 +68,9 @@ export async function createSession({ userId, role, branchId }) {
 
 /**
  * Read the current session from the HTTP-only cookie.
- *
- * @returns {Promise<{ userId: string, role: string, branchId: string | null } | null>}
- *          The session payload, or null if no valid session exists.
  */
 export async function getSession() {
-	const cookieStore = cookies();
+	const cookieStore = await cookies();
 	const cookie = cookieStore.get(SESSION_COOKIE_NAME);
 
 	if (!cookie?.value) {
@@ -83,12 +93,12 @@ export async function getSession() {
 			role,
 			branchId: typeof branchId === "string" ? branchId : null,
 		};
-	} catch (error) {
-		// Invalid or expired token: clear the cookie for hygiene and return null
-		const store = cookies();
+	} catch {
+		// Invalid or expired token: clear cookie and return null
+		const store = await cookies();
 		store.set(SESSION_COOKIE_NAME, "", {
 			httpOnly: true,
-			secure: process.env.NODE_ENV === "production",
+			secure: resolveCookieSecureFlag(),
 			sameSite: "lax",
 			path: "/",
 			maxAge: 0,
@@ -101,12 +111,12 @@ export async function getSession() {
 /**
  * Destroy the current session by clearing the session cookie.
  */
-export function destroySession() {
-	const cookieStore = cookies();
+export async function destroySession() {
+	const cookieStore = await cookies();
 
 	cookieStore.set(SESSION_COOKIE_NAME, "", {
 		httpOnly: true,
-		secure: process.env.NODE_ENV === "production",
+		secure: resolveCookieSecureFlag(),
 		sameSite: "lax",
 		path: "/",
 		maxAge: 0,

+ 4 - 4
lib/auth/session.test.js

@@ -41,7 +41,7 @@ import { __cookieStore } from "next/headers";
 describe("auth session utilities", () => {
 	beforeEach(() => {
 		__cookieStore.clear();
-		process.env.SESSION_SECRET = "test-session-secret";
+		process.env.SESSION_SECRET = "x".repeat(64);
 		process.env.NODE_ENV = "test";
 	});
 
@@ -123,7 +123,7 @@ describe("auth session utilities", () => {
 			branchId: "NL02",
 		});
 
-		destroySession();
+		await destroySession();
 
 		const store = __cookieStore.dump();
 		const cookie = store.get(SESSION_COOKIE_NAME);
@@ -133,8 +133,8 @@ describe("auth session utilities", () => {
 		expect(cookie.options.maxAge).toBe(0);
 	});
 
-	it("destroySession sets an empty cookie even if none existed before", () => {
-		destroySession();
+	it("destroySession sets an empty cookie even if none existed before", async () => {
+		await destroySession();
 
 		const store = __cookieStore.dump();
 		const cookie = store.get(SESSION_COOKIE_NAME);

+ 190 - 0
lib/config/validateEnv.js

@@ -0,0 +1,190 @@
+// lib/config/validateEnv.js
+export const REQUIRED_ENV_VARS = [
+	"MONGODB_URI",
+	"SESSION_SECRET",
+	"NAS_ROOT_PATH",
+];
+
+export const ALLOWED_NODE_ENVS = new Set(["development", "test", "production"]);
+export const MIN_SESSION_SECRET_LENGTH = 32;
+
+function isBlank(value) {
+	return value === undefined || value === null || String(value).trim() === "";
+}
+
+function normalizeString(value) {
+	return String(value).trim();
+}
+
+function normalizeUnixPath(value) {
+	let p = normalizeString(value);
+	if (p.length > 1) p = p.replace(/\/+$/, "");
+	return p;
+}
+
+function containsDotDotSegment(p) {
+	return /(^|\/)\.\.(\/|$)/.test(p);
+}
+
+function looksLikePlaceholderSecret(value) {
+	const s = normalizeString(value).toLowerCase();
+	return (
+		s.includes("change-me") ||
+		s.includes("changeme") ||
+		s.includes("replace-me") ||
+		s.includes("replace_this") ||
+		s === "secret" ||
+		s === "password"
+	);
+}
+
+function validateMongoUri(uri) {
+	return uri.startsWith("mongodb://") || uri.startsWith("mongodb+srv://");
+}
+
+function parsePort(value) {
+	const raw = normalizeString(value);
+	if (!/^\d+$/.test(raw)) return { ok: false, value: null };
+	const n = Number(raw);
+	if (!Number.isInteger(n) || n < 1 || n > 65535)
+		return { ok: false, value: null };
+	return { ok: true, value: n };
+}
+
+function buildEnvError(missing, invalid) {
+	const lines = [];
+	lines.push("Invalid environment configuration.");
+
+	if (missing.length > 0) {
+		lines.push("");
+		lines.push("Missing required environment variables:");
+		for (const key of missing) lines.push(`- ${key}`);
+	}
+
+	if (invalid.length > 0) {
+		lines.push("");
+		lines.push("Invalid environment variables:");
+		for (const item of invalid) lines.push(`- ${item.key}: ${item.message}`);
+	}
+
+	lines.push("");
+	lines.push(
+		"Tip: Copy and adjust the example env files (.env.local.example / .env.docker.example)."
+	);
+
+	const err = new Error(lines.join("\n"));
+	err.code = "ENV_INVALID";
+	err.missing = missing;
+	err.invalid = invalid;
+	return err;
+}
+
+export function validateEnv(env) {
+	const e = env ?? {};
+	const missing = [];
+	const invalid = [];
+
+	for (const key of REQUIRED_ENV_VARS) {
+		if (isBlank(e[key])) missing.push(key);
+	}
+
+	const mongodbUri = !isBlank(e.MONGODB_URI)
+		? normalizeString(e.MONGODB_URI)
+		: "";
+	if (mongodbUri && !validateMongoUri(mongodbUri)) {
+		invalid.push({
+			key: "MONGODB_URI",
+			message: 'must start with "mongodb://" or "mongodb+srv://"',
+		});
+	}
+
+	const sessionSecret = !isBlank(e.SESSION_SECRET)
+		? normalizeString(e.SESSION_SECRET)
+		: "";
+	if (sessionSecret) {
+		if (sessionSecret.length < MIN_SESSION_SECRET_LENGTH) {
+			invalid.push({
+				key: "SESSION_SECRET",
+				message: `must be at least ${MIN_SESSION_SECRET_LENGTH} characters long`,
+			});
+		}
+		if (looksLikePlaceholderSecret(sessionSecret)) {
+			invalid.push({
+				key: "SESSION_SECRET",
+				message:
+					"looks like a placeholder (replace it with a strong random secret)",
+			});
+		}
+	}
+
+	const nasRootPath = !isBlank(e.NAS_ROOT_PATH)
+		? normalizeUnixPath(e.NAS_ROOT_PATH)
+		: "";
+	if (nasRootPath) {
+		if (!nasRootPath.startsWith("/")) {
+			invalid.push({
+				key: "NAS_ROOT_PATH",
+				message: 'must be an absolute Unix path (starts with "/")',
+			});
+		}
+		if (containsDotDotSegment(nasRootPath)) {
+			invalid.push({
+				key: "NAS_ROOT_PATH",
+				message: 'must not contain ".." path segments',
+			});
+		}
+	}
+
+	const nodeEnvRaw = !isBlank(e.NODE_ENV)
+		? normalizeString(e.NODE_ENV)
+		: "development";
+	if (nodeEnvRaw && !ALLOWED_NODE_ENVS.has(nodeEnvRaw)) {
+		invalid.push({
+			key: "NODE_ENV",
+			message: 'must be one of "development", "test", "production"',
+		});
+	}
+
+	// Optional: cookie secure override
+	const cookieSecureRaw = !isBlank(e.SESSION_COOKIE_SECURE)
+		? normalizeString(e.SESSION_COOKIE_SECURE).toLowerCase()
+		: "";
+	if (
+		cookieSecureRaw &&
+		cookieSecureRaw !== "true" &&
+		cookieSecureRaw !== "false"
+	) {
+		invalid.push({
+			key: "SESSION_COOKIE_SECURE",
+			message: 'must be "true" or "false" if provided',
+		});
+	}
+
+	let port;
+	if (!isBlank(e.PORT)) {
+		const parsed = parsePort(e.PORT);
+		if (!parsed.ok) {
+			invalid.push({
+				key: "PORT",
+				message: "must be an integer between 1 and 65535",
+			});
+		} else {
+			port = parsed.value;
+		}
+	}
+
+	if (missing.length > 0 || invalid.length > 0) {
+		throw buildEnvError(missing, invalid);
+	}
+
+	const cfg = {
+		mongodbUri,
+		sessionSecret,
+		nasRootPath,
+		nodeEnv: nodeEnvRaw,
+	};
+
+	if (port !== undefined) cfg.port = port;
+
+	return cfg;
+}

+ 148 - 0
lib/config/validateEnv.test.js

@@ -0,0 +1,148 @@
+// lib/config/validateEnv.test.js
+import { describe, it, expect } from "vitest";
+import { validateEnv, MIN_SESSION_SECRET_LENGTH } from "./validateEnv.js";
+
+function validSecret() {
+	return "x".repeat(MIN_SESSION_SECRET_LENGTH);
+}
+
+describe("validateEnv", () => {
+	it("returns normalized config for a valid env", () => {
+		const cfg = validateEnv({
+			MONGODB_URI: "mongodb://localhost:27017/rhl",
+			SESSION_SECRET: validSecret(),
+			NAS_ROOT_PATH: "/mnt/niederlassungen/",
+			PORT: "3000",
+		});
+
+		expect(cfg.mongodbUri).toBe("mongodb://localhost:27017/rhl");
+		expect(cfg.sessionSecret).toBe(validSecret());
+		expect(cfg.nasRootPath).toBe("/mnt/niederlassungen");
+		expect(cfg.nodeEnv).toBe("development");
+		expect(cfg.port).toBe(3000);
+	});
+
+	it("accepts optional SESSION_COOKIE_SECURE=false", () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+				NODE_ENV: "production",
+				SESSION_COOKIE_SECURE: "false",
+			})
+		).not.toThrow();
+	});
+
+	it("rejects invalid SESSION_COOKIE_SECURE values", () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+				SESSION_COOKIE_SECURE: "maybe",
+			})
+		).toThrow(/SESSION_COOKIE_SECURE/i);
+	});
+
+	it("throws with a clear error if required vars are missing", () => {
+		try {
+			validateEnv({});
+			throw new Error("Expected validateEnv to throw");
+		} catch (err) {
+			expect(err.code).toBe("ENV_INVALID");
+			expect(err.missing).toEqual([
+				"MONGODB_URI",
+				"SESSION_SECRET",
+				"NAS_ROOT_PATH",
+			]);
+			expect(String(err.message)).toContain(
+				"Missing required environment variables:"
+			);
+		}
+	});
+
+	it("rejects invalid MONGODB_URI schemes", () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "http://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+				NODE_ENV: "production",
+			})
+		).toThrow(/MONGODB_URI/i);
+	});
+
+	it("rejects too-short SESSION_SECRET", () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: "short-secret",
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+			})
+		).toThrow(/SESSION_SECRET/i);
+	});
+
+	it("rejects placeholder-like SESSION_SECRET even if long enough", () => {
+		const secret = `change-me-${"x".repeat(64)}`;
+
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: secret,
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+			})
+		).toThrow(/placeholder/i);
+	});
+
+	it("rejects NAS_ROOT_PATH that is not absolute", () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "mnt/niederlassungen",
+			})
+		).toThrow(/NAS_ROOT_PATH/i);
+	});
+
+	it('rejects NAS_ROOT_PATH containing ".." segments', () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "/mnt/../etc",
+			})
+		).toThrow(/NAS_ROOT_PATH/i);
+	});
+
+	it("rejects invalid NODE_ENV values", () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+				NODE_ENV: "staging",
+			})
+		).toThrow(/NODE_ENV/i);
+	});
+
+	it("rejects invalid PORT values", () => {
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+				PORT: "70000",
+			})
+		).toThrow(/PORT/i);
+
+		expect(() =>
+			validateEnv({
+				MONGODB_URI: "mongodb://localhost:27017/rhl",
+				SESSION_SECRET: validSecret(),
+				NAS_ROOT_PATH: "/mnt/niederlassungen",
+				PORT: "abc",
+			})
+		).toThrow(/PORT/i);
+	});
+});

+ 35 - 15
lib/db.js

@@ -1,28 +1,48 @@
 // lib/db.js
-import { MongoClient } from "mongodb";
+import mongoose from "mongoose";
 
-const uri = process.env.MONGODB_URI;
+// Reuse the connection across hot reloads in dev and across route handler invocations.
+const globalForMongoose = globalThis;
 
-let client;
-let clientPromise;
+const cached =
+	globalForMongoose.__mongooseCache ||
+	(globalForMongoose.__mongooseCache = { conn: null, promise: null });
+
+async function connectMongoose() {
+	const uri = process.env.MONGODB_URI;
 
-function getClientPromise() {
 	if (!uri) {
-		// Jetzt meckern wir erst beim tatsächlichen Zugriff auf die DB
-		throw new Error("MONGODB_URI ist nicht gesetzt (Env prüfen)");
+		throw new Error("MONGODB_URI environment variable is not set");
+	}
+
+	if (cached.conn) {
+		return cached.conn;
 	}
 
-	if (!clientPromise) {
-		// In Dev-Umgebungen könnte man global._mongoClientPromise nutzen;
-		// auf dem Server reicht ein einfacher Singleton.
-		client = new MongoClient(uri);
-		clientPromise = client.connect();
+	if (!cached.promise) {
+		cached.promise = mongoose
+			.connect(uri, {
+				// Fail fast if someone queries before we are connected
+				bufferCommands: false,
+			})
+			.then((m) => m);
 	}
 
-	return clientPromise;
+	cached.conn = await cached.promise;
+	return cached.conn;
 }
 
+/**
+ * Returns the native MongoDB db handle (from the active Mongoose connection).
+ * This also ensures the Mongoose connection is established.
+ */
 export async function getDb() {
-	const client = await getClientPromise();
-	return client.db();
+	await connectMongoose();
+
+	const db = mongoose.connection?.db;
+	if (!db) {
+		throw new Error("MongoDB connection is not ready");
+	}
+
+	return db;
 }

+ 2 - 2
lib/utils.js

@@ -1,6 +1,6 @@
 import { clsx } from "clsx";
-import { twMerge } from "tailwind-merge"
+import { twMerge } from "tailwind-merge";
 
 export function cn(...inputs) {
-  return twMerge(clsx(inputs));
+	return twMerge(clsx(inputs));
 }

+ 11 - 0
scripts/validate-env.mjs

@@ -0,0 +1,11 @@
+// scripts/validate-env.mjs
+import { validateEnv } from "../lib/config/validateEnv.js";
+
+try {
+	validateEnv(process.env);
+	// Keep output minimal, but explicit:
+	console.log("✅ Environment variables: OK");
+} catch (err) {
+	console.error(String(err?.message || err));
+	process.exit(1);
+}