Compare commits

..

No commits in common. "oss" and "v1.3.1" have entirely different histories.
oss ... v1.3.1

300 changed files with 4711 additions and 12595 deletions

View file

@ -1,35 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "daily"
groups:
patch-updates:
update-types:
- "patch"
minor-updates:
update-types:
- "minor"

View file

@ -1,49 +0,0 @@
name: Run Tests
on:
pull_request:
branches:
- main
- dev
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '20'
- name: Copy config file
run: cp config/config.example.yml config/config.yml
- name: Install dependencies
run: npm ci
- name: Generate database migrations
run: npm run db:sqlite:generate
- name: Apply database migrations
run: npm run db:sqlite:push
- name: Start app in background
run: nohup npm run dev &
- name: Wait for app availability
run: |
for i in {1..5}; do
if curl --silent --fail http://localhost:3002/auth/login; then
echo "App is up"
exit 0
fi
echo "Waiting for the app... attempt $i"
sleep 5
done
echo "App failed to start"
exit 1
- name: Build Docker image
run: make build

View file

@ -2,17 +2,14 @@ FROM node:20-alpine AS builder
WORKDIR /app
# COPY package.json package-lock.json ./
COPY package*.json ./
RUN npm ci
COPY package.json package-lock.json ./
RUN npm install
COPY . .
RUN echo 'export * from "./sqlite";' > server/db/index.ts
RUN npx drizzle-kit generate --dialect sqlite --schema ./server/db/schemas/ --out init
RUN npx drizzle-kit generate --dialect sqlite --schema ./server/db/sqlite/schema.ts --out init
RUN npm run build:sqlite
RUN npm run build
FROM node:20-alpine AS runner
@ -22,8 +19,8 @@ WORKDIR /app
RUN apk add --no-cache curl
# COPY package.json package-lock.json ./
COPY package*.json ./
RUN npm ci --omit=dev && npm cache clean --force
COPY package.json ./
RUN npm install --omit=dev && npm cache clean --force
COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static
@ -34,4 +31,4 @@ COPY server/db/names.json ./dist/names.json
COPY public ./public
CMD ["npm", "run", "start:sqlite"]
CMD ["npm", "start"]

View file

@ -1,37 +0,0 @@
FROM node:20-alpine AS builder
WORKDIR /app
# COPY package.json package-lock.json ./
COPY package*.json ./
RUN npm ci
COPY . .
RUN echo 'export * from "./pg";' > server/db/index.ts
RUN npx drizzle-kit generate --dialect postgresql --schema ./server/db/pg/schema.ts --out init
RUN npm run build:pg
FROM node:20-alpine AS runner
WORKDIR /app
# Curl used for the health checks
RUN apk add --no-cache curl
# COPY package.json package-lock.json ./
COPY package*.json ./
RUN npm ci --omit=dev && npm cache clean --force
COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/init ./dist/init
COPY server/db/names.json ./dist/names.json
COPY public ./public
CMD ["npm", "run", "start:pg"]

View file

@ -1,3 +1,5 @@
Copyright (c) 2025 Fossorial, LLC.
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007

View file

@ -1,14 +1,10 @@
.PHONY: build build-release build-arm build-x86 test clean
build-release:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
echo "Error: tag is required. Usage: make build-all tag=<tag>"; \
exit 1; \
fi
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:latest -f Dockerfile --push .
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:$(tag) -f Dockerfile --push .
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:postgresql-latest -f Dockerfile.pg --push .
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:postgresql-$(tag) -f Dockerfile.pg --push .
build-arm:
docker buildx build --platform linux/arm64 -t fosrl/pangolin:latest .

View file

@ -1,13 +1,15 @@
<div align="center">
<h2>
<picture>
<source media="(prefers-color-scheme: dark)" srcset="public/logo/word_mark_white.png">
<img alt="Pangolin Logo" src="public/logo/word_mark_black.png" width="250">
</picture>
</h2>
<h2 align="center"><a href="https://fossorial.io"><img alt="pangolin" src="public/logo//word_mark.png" width="400" /></a></h2>
[![Documentation](https://img.shields.io/badge/docs-latest-blue.svg?style=flat-square)](https://docs.fossorial.io/)
[![Docker](https://img.shields.io/docker/pulls/fosrl/pangolin?style=flat-square)](https://hub.docker.com/r/fosrl/pangolin)
![Stars](https://img.shields.io/github/stars/fosrl/pangolin?style=flat-square)
[![Discord](https://img.shields.io/discord/1325658630518865980?logo=discord&style=flat-square)](https://discord.gg/HCJR8Xhme4)
[![Youtube](https://img.shields.io/badge/YouTube-red?logo=youtube&logoColor=white&style=flat-square)](https://www.youtube.com/@fossorial-app)
</div>
<h4 align="center">Tunneled Reverse Proxy Server with Access Control</h4>
<h3 align="center">Tunneled Mesh Reverse Proxy Server with Access Control</h3>
<div align="center">
_Your own self-hosted zero trust tunnel._
@ -28,12 +30,6 @@ _Your own self-hosted zero trust tunnel._
Contact Us
</a>
</h5>
[![Docker](https://img.shields.io/docker/pulls/fosrl/pangolin?style=flat-square)](https://hub.docker.com/r/fosrl/pangolin)
![Stars](https://img.shields.io/github/stars/fosrl/pangolin?style=flat-square)
[![Discord](https://img.shields.io/discord/1325658630518865980?logo=discord&style=flat-square)](https://discord.gg/HCJR8Xhme4)
[![Youtube](https://img.shields.io/badge/YouTube-red?logo=youtube&logoColor=white&style=flat-square)](https://www.youtube.com/@fossorial-app)
</div>
Pangolin is a self-hosted tunneled reverse proxy server with identity and access control, designed to securely expose private resources on distributed networks. Acting as a central hub, it connects isolated networks — even those behind restrictive firewalls — through encrypted tunnels, enabling easy access to remote services without opening ports.
@ -90,7 +86,7 @@ will be reimplemented under the AGPL license.
### Modular Design
- Extend functionality with existing [Traefik](https://github.com/traefik/traefik) plugins, such as [CrowdSec](https://plugins.traefik.io/plugins/6335346ca4caa9ddeffda116/crowdsec-bouncer-traefik-plugin) and [Geoblock](https://github.com/PascalMinder/geoblock).
- Extend functionality with existing [Traefik](https://github.com/traefik/traefik) plugins, such as [CrowdSec](https://plugins.traefik.io/plugins/6335346ca4caa9ddeffda116/crowdsec-bouncer-traefik-plugin) and [Geoblock](github.com/PascalMinder/geoblock).
- **Automatically install and configure Crowdsec via Pangolin's installer script.**
- Attach as many sites to the central server as you wish.
@ -103,19 +99,19 @@ will be reimplemented under the AGPL license.
- Deploy the Docker Compose stack onto a VPS hosted on a cloud platform like RackNerd, Amazon EC2, DigitalOcean Droplet, or similar. There are many cheap VPS hosting options available to suit your needs.
> [!TIP]
> Many of our users have had a great experience with [RackNerd](https://my.racknerd.com/aff.php?aff=13788). Depending on promotions, you can get a [**VPS with 1 vCPU, 1GB RAM, and ~20GB SSD for just around $12/year**](https://my.racknerd.com/aff.php?aff=13788&pid=912). That's a great deal!
> Many of our users have had a great experience with [RackNerd](https://my.racknerd.com/aff.php?aff=13788). Depending on promotions, you can likely get a **VPS with 1 vCPU, 1GB RAM, and ~20GB SSD for just around $12/year**. That's a great deal!
> We are part of the [RackNerd](https://my.racknerd.com/aff.php?aff=13788) affiliate program, so if you purchase through [our link](https://my.racknerd.com/aff.php?aff=13788), we receive a small commission which helps us maintain the project and keep it free for everyone.
1. **Domain Configuration**:
2. **Domain Configuration**:
- Point your domain name to the VPS and configure Pangolin with your preferred settings.
2. **Connect Private Sites**:
3. **Connect Private Sites**:
- Install Newt or use another WireGuard client on private sites.
- Automatically establish a connection from these sites to the central server.
3. **Expose Resources**:
4. **Expose Resources**:
- Add resources to the central server and configure access control rules.
- Access these resources securely from anywhere.

View file

@ -35,7 +35,7 @@ services:
- 80:80 # Port for traefik because of the network_mode
traefik:
image: traefik:v3.4.0
image: traefik:v3.3.3
container_name: traefik
restart: unless-stopped
network_mode: service:gerbil # Ports appear on the gerbil service

View file

@ -4,7 +4,7 @@ import path from "path";
export default defineConfig({
dialect: "sqlite",
schema: path.join("server", "db", "sqlite", "schema.ts"),
schema: path.join("server", "db", "schemas"),
out: path.join("server", "migrations"),
verbose: true,
dbCredentials: {

View file

@ -1,12 +0,0 @@
import { defineConfig } from "drizzle-kit";
import path from "path";
export default defineConfig({
dialect: "postgresql",
schema: path.join("server", "db", "pg", "schema.ts"),
out: path.join("server", "migrations"),
verbose: true,
dbCredentials: {
url: process.env.DATABASE_URL as string
}
});

View file

@ -1,12 +1,9 @@
import tseslint from 'typescript-eslint';
export default tseslint.config(
tseslint.configs.recommended,
// eslint.config.js
export default [
{
files: ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx"],
rules: {
semi: "error",
"prefer-const": "error"
}
}
);
];

View file

@ -26,7 +26,7 @@ server:
cors:
origins: ["https://{{.DashboardDomain}}"]
methods: ["GET", "POST", "PUT", "DELETE", "PATCH"]
allowed_headers: ["X-CSRF-Token", "Content-Type"]
headers: ["X-CSRF-Token", "Content-Type"]
credentials: false
traefik:

View file

@ -21,10 +21,6 @@ experimental:
log:
level: "INFO"
format: "json" # Log format changed to json for better parsing
maxSize: 100
maxBackups: 3
maxAge: 3
compress: true
accessLog: # We enable access logs as json
filePath: "/var/log/traefik/access.log"

View file

@ -35,7 +35,7 @@ services:
- 80:80 # Port for traefik because of the network_mode
{{end}}
traefik:
image: traefik:v3.4.1
image: traefik:v3.3.6
container_name: traefik
restart: unless-stopped
{{if .InstallGerbil}}
@ -58,4 +58,4 @@ services:
networks:
default:
driver: bridge
name: pangolin
name: pangolin

View file

@ -18,10 +18,6 @@ experimental:
log:
level: "INFO"
format: "common"
maxSize: 100
maxBackups: 3
maxAge: 3
compress: true
certificatesResolvers:
letsencrypt:

View file

@ -9,7 +9,6 @@ import (
"io/fs"
"os"
"os/exec"
"os/user"
"path/filepath"
"runtime"
"strings"
@ -18,7 +17,6 @@ import (
"time"
"unicode"
"math/rand"
"strconv"
"golang.org/x/term"
)
@ -59,31 +57,22 @@ type Config struct {
func main() {
reader := bufio.NewReader(os.Stdin)
// check if docker is not installed and the user is root
if !isDockerInstalled() {
if os.Geteuid() != 0 {
fmt.Println("Docker is not installed. Please install Docker manually or run this installer as root.")
os.Exit(1)
}
}
// check if the user is in the docker group (linux only)
if !isUserInDockerGroup() {
fmt.Println("You are not in the docker group.")
fmt.Println("The installer will not be able to run docker commands without running it as root.")
// check if the user is root
if os.Geteuid() != 0 {
fmt.Println("This script must be run as root")
os.Exit(1)
}
var config Config
config.DoCrowdsecInstall = false
config.Secret = generateRandomSecretKey()
// check if there is already a config file
if _, err := os.Stat("config/config.yml"); err != nil {
config = collectUserInput(reader)
loadVersions(&config)
config.DoCrowdsecInstall = false
config.Secret = generateRandomSecretKey()
if err := createConfigFiles(config); err != nil {
fmt.Printf("Error creating config files: %v\n", err)
os.Exit(1)
@ -94,27 +83,6 @@ func main() {
if !isDockerInstalled() && runtime.GOOS == "linux" {
if readBool(reader, "Docker is not installed. Would you like to install it?", true) {
installDocker()
// try to start docker service but ignore errors
if err := startDockerService(); err != nil {
fmt.Println("Error starting Docker service:", err)
} else {
fmt.Println("Docker service started successfully!")
}
// wait 10 seconds for docker to start checking if docker is running every 2 seconds
fmt.Println("Waiting for Docker to start...")
for i := 0; i < 5; i++ {
if isDockerRunning() {
fmt.Println("Docker is running!")
break
}
fmt.Println("Docker is not running yet, waiting...")
time.Sleep(2 * time.Second)
}
if !isDockerRunning() {
fmt.Println("Docker is still not running after 10 seconds. Please check the installation.")
os.Exit(1)
}
fmt.Println("Docker installed successfully!")
}
}
@ -429,7 +397,7 @@ func installDocker() error {
return fmt.Errorf("failed to detect Linux distribution: %v", err)
}
osRelease := string(output)
// Detect system architecture
archCmd := exec.Command("uname", "-m")
archOutput, err := archCmd.Output()
@ -437,7 +405,7 @@ func installDocker() error {
return fmt.Errorf("failed to detect system architecture: %v", err)
}
arch := strings.TrimSpace(string(archOutput))
// Map architecture to Docker's architecture naming
var dockerArch string
switch arch {
@ -470,31 +438,11 @@ func installDocker() error {
apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
`, dockerArch))
case strings.Contains(osRelease, "ID=fedora"):
// Detect Fedora version to handle DNF 5 changes
versionCmd := exec.Command("bash", "-c", "grep VERSION_ID /etc/os-release | cut -d'=' -f2 | tr -d '\"'")
versionOutput, err := versionCmd.Output()
var fedoraVersion int
if err == nil {
if v, parseErr := strconv.Atoi(strings.TrimSpace(string(versionOutput))); parseErr == nil {
fedoraVersion = v
}
}
// Use appropriate DNF syntax based on version
var repoCmd string
if fedoraVersion >= 41 {
// DNF 5 syntax for Fedora 41+
repoCmd = "dnf config-manager addrepo --from-repofile=https://download.docker.com/linux/fedora/docker-ce.repo"
} else {
// DNF 4 syntax for Fedora < 41
repoCmd = "dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo"
}
installCmd = exec.Command("bash", "-c", fmt.Sprintf(`
installCmd = exec.Command("bash", "-c", `
dnf -y install dnf-plugins-core &&
%s &&
dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo &&
dnf install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
`, repoCmd))
`)
case strings.Contains(osRelease, "ID=opensuse") || strings.Contains(osRelease, "ID=\"opensuse-"):
installCmd = exec.Command("bash", "-c", `
zypper install -y docker docker-compose &&
@ -518,26 +466,11 @@ func installDocker() error {
default:
return fmt.Errorf("unsupported Linux distribution")
}
installCmd.Stdout = os.Stdout
installCmd.Stderr = os.Stderr
return installCmd.Run()
}
func startDockerService() error {
if runtime.GOOS == "linux" {
cmd := exec.Command("systemctl", "enable", "--now", "docker")
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
return cmd.Run()
} else if runtime.GOOS == "darwin" {
// On macOS, Docker is usually started via the Docker Desktop application
fmt.Println("Please start Docker Desktop manually on macOS.")
return nil
}
return fmt.Errorf("unsupported operating system for starting Docker service")
}
func isDockerInstalled() bool {
cmd := exec.Command("docker", "--version")
if err := cmd.Run(); err != nil {
@ -546,43 +479,6 @@ func isDockerInstalled() bool {
return true
}
func isUserInDockerGroup() bool {
if runtime.GOOS == "darwin" {
// Docker group is not applicable on macOS
// So we assume that the user can run Docker commands
return true
}
if os.Geteuid() == 0 {
return true // Root user can run Docker commands anyway
}
// Check if the current user is in the docker group
if dockerGroup, err := user.LookupGroup("docker"); err == nil {
if currentUser, err := user.Current(); err == nil {
if currentUserGroupIds, err := currentUser.GroupIds(); err == nil {
for _, groupId := range currentUserGroupIds {
if groupId == dockerGroup.Gid {
return true
}
}
}
}
}
// Eventually, if any of the checks fail, we assume the user cannot run Docker commands
return false
}
// isDockerRunning checks if the Docker daemon is running by using the `docker info` command.
func isDockerRunning() bool {
cmd := exec.Command("docker", "info")
if err := cmd.Run(); err != nil {
return false
}
return true
}
// executeDockerComposeCommandWithArgs executes the appropriate docker command with arguments supplied
func executeDockerComposeCommandWithArgs(args ...string) error {
var cmd *exec.Cmd
@ -723,4 +619,4 @@ func generateRandomSecretKey() string {
b[i] = charset[seededRand.Intn(len(charset))]
}
return string(b)
}
}

View file

@ -1,7 +1,7 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
eslint: {
ignoreDuringBuilds: true
ignoreDuringBuilds: true,
},
output: "standalone"
};

6126
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -12,108 +12,100 @@
"license": "SEE LICENSE IN LICENSE AND README.md",
"scripts": {
"dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts",
"db:pg:generate": "drizzle-kit generate --config=./drizzle.pg.config.ts",
"db:sqlite:generate": "drizzle-kit generate --config=./drizzle.sqlite.config.ts",
"db:pg:push": "npx tsx server/db/pg/migrate.ts",
"db:sqlite:push": "npx tsx server/db/sqlite/migrate.ts",
"db:sqlite:studio": "drizzle-kit studio --config=./drizzle.sqlite.config.ts",
"db:pg:studio": "drizzle-kit studio --config=./drizzle.pg.config.ts",
"db:clear-migrations": "rm -rf server/migrations",
"build:sqlite": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs",
"build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs",
"start:sqlite": "DB_TYPE=sqlite NODE_OPTIONS=--enable-source-maps NODE_ENV=development ENVIRONMENT=prod sh -c 'node dist/migrations.mjs && node dist/server.mjs'",
"start:pg": "DB_TYPE=pg NODE_OPTIONS=--enable-source-maps NODE_ENV=development ENVIRONMENT=prod sh -c 'node dist/migrations.mjs && node dist/server.mjs'",
"db:generate": "drizzle-kit generate",
"db:push": "npx tsx server/db/migrate.ts",
"db:studio": "drizzle-kit studio",
"build": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrations.ts -o dist/migrations.mjs",
"start": "NODE_OPTIONS=--enable-source-maps NODE_ENV=development ENVIRONMENT=prod sh -c 'node dist/migrations.mjs && node dist/server.mjs'",
"email": "email dev --dir server/emails/templates --port 3005"
},
"dependencies": {
"@asteasolutions/zod-to-openapi": "^7.3.2",
"@asteasolutions/zod-to-openapi": "^7.3.0",
"@hookform/resolvers": "3.9.1",
"@node-rs/argon2": "^2.0.2",
"@node-rs/argon2": "2.0.2",
"@oslojs/crypto": "1.0.1",
"@oslojs/encoding": "1.1.0",
"@radix-ui/react-avatar": "1.1.10",
"@radix-ui/react-checkbox": "1.3.2",
"@radix-ui/react-collapsible": "1.1.11",
"@radix-ui/react-dialog": "1.1.14",
"@radix-ui/react-dropdown-menu": "2.1.15",
"@radix-ui/react-avatar": "1.1.2",
"@radix-ui/react-checkbox": "1.1.3",
"@radix-ui/react-collapsible": "1.1.2",
"@radix-ui/react-dialog": "1.1.4",
"@radix-ui/react-dropdown-menu": "2.1.4",
"@radix-ui/react-icons": "1.3.2",
"@radix-ui/react-label": "2.1.7",
"@radix-ui/react-popover": "1.1.14",
"@radix-ui/react-progress": "^1.1.7",
"@radix-ui/react-radio-group": "1.3.7",
"@radix-ui/react-scroll-area": "^1.2.9",
"@radix-ui/react-select": "2.2.5",
"@radix-ui/react-separator": "1.1.7",
"@radix-ui/react-slot": "1.2.3",
"@radix-ui/react-switch": "1.2.5",
"@radix-ui/react-tabs": "1.1.12",
"@radix-ui/react-toast": "1.2.14",
"@react-email/components": "0.0.41",
"@react-email/render": "^1.1.2",
"@react-email/tailwind": "1.0.5",
"@radix-ui/react-label": "2.1.1",
"@radix-ui/react-popover": "1.1.4",
"@radix-ui/react-progress": "^1.1.4",
"@radix-ui/react-radio-group": "1.2.2",
"@radix-ui/react-select": "2.1.4",
"@radix-ui/react-separator": "1.1.1",
"@radix-ui/react-slot": "1.1.1",
"@radix-ui/react-switch": "1.1.2",
"@radix-ui/react-tabs": "1.1.2",
"@radix-ui/react-toast": "1.2.4",
"@react-email/components": "0.0.36",
"@react-email/render": "^1.0.6",
"@react-email/tailwind": "1.0.4",
"@tailwindcss/forms": "^0.5.10",
"@tanstack/react-table": "8.21.3",
"arctic": "^3.7.0",
"axios": "1.9.0",
"@tanstack/react-table": "8.20.6",
"arctic": "^3.6.0",
"axios": "1.8.4",
"better-sqlite3": "11.7.0",
"canvas-confetti": "1.9.3",
"class-variance-authority": "0.7.1",
"clsx": "2.1.1",
"cmdk": "1.1.1",
"cmdk": "1.0.4",
"cookie": "^1.0.2",
"cookie-parser": "1.4.7",
"cookies": "^0.9.1",
"cors": "2.8.5",
"crypto-js": "^4.2.0",
"drizzle-orm": "0.38.3",
"eslint": "9.28.0",
"eslint-config-next": "15.3.3",
"eslint": "9.17.0",
"eslint-config-next": "15.1.3",
"express": "4.21.2",
"express-rate-limit": "7.5.0",
"glob": "11.0.2",
"helmet": "8.1.0",
"glob": "11.0.0",
"helmet": "8.0.0",
"http-errors": "2.0.0",
"i": "^0.3.7",
"input-otp": "1.4.2",
"input-otp": "1.4.1",
"jmespath": "^0.16.0",
"js-yaml": "4.1.0",
"jsonwebtoken": "^9.0.2",
"lucide-react": "0.511.0",
"lucide-react": "0.469.0",
"moment": "2.30.1",
"next": "15.3.3",
"next-themes": "0.4.6",
"next": "15.2.4",
"next-themes": "0.4.4",
"node-cache": "5.1.2",
"node-fetch": "3.3.2",
"nodemailer": "6.9.16",
"npm": "^11.4.1",
"npm": "^11.2.0",
"oslo": "1.2.1",
"pg": "^8.16.0",
"qrcode.react": "4.2.0",
"react": "19.1.0",
"react-dom": "19.1.0",
"react": "19.0.0",
"react-dom": "19.0.0",
"react-easy-sort": "^1.6.0",
"react-hook-form": "7.56.4",
"react-hook-form": "7.54.2",
"react-icons": "^5.5.0",
"rebuild": "0.1.2",
"semver": "7.7.2",
"semver": "7.6.3",
"swagger-ui-express": "^5.0.1",
"tailwind-merge": "2.6.0",
"tw-animate-css": "^1.3.3",
"tw-animate-css": "^1.2.5",
"uuid": "^11.1.0",
"vaul": "1.1.2",
"winston": "3.17.0",
"winston-daily-rotate-file": "5.0.0",
"ws": "8.18.2",
"zod": "3.25.56",
"zod-validation-error": "3.4.1"
"ws": "8.18.0",
"zod": "3.24.1",
"zod-validation-error": "3.4.0"
},
"devDependencies": {
"@dotenvx/dotenvx": "1.44.2",
"@dotenvx/dotenvx": "1.32.0",
"@esbuild-plugins/tsconfig-paths": "0.1.2",
"@tailwindcss/postcss": "^4.1.8",
"@tailwindcss/postcss": "^4.1.3",
"@types/better-sqlite3": "7.6.12",
"@types/cookie-parser": "1.4.9",
"@types/cors": "2.8.19",
"@types/cookie-parser": "1.4.8",
"@types/cors": "2.8.17",
"@types/crypto-js": "^4.2.2",
"@types/express": "5.0.0",
"@types/jmespath": "^0.15.2",
@ -121,23 +113,22 @@
"@types/jsonwebtoken": "^9.0.9",
"@types/node": "^22",
"@types/nodemailer": "6.4.17",
"@types/react": "19.1.7",
"@types/react-dom": "19.1.6",
"@types/semver": "7.7.0",
"@types/react": "19.1.1",
"@types/react-dom": "19.1.2",
"@types/semver": "7.5.8",
"@types/swagger-ui-express": "^4.1.8",
"@types/ws": "8.18.1",
"@types/ws": "8.5.13",
"@types/yargs": "17.0.33",
"drizzle-kit": "0.31.1",
"esbuild": "0.25.5",
"drizzle-kit": "0.30.6",
"esbuild": "0.25.2",
"esbuild-node-externals": "1.18.0",
"postcss": "^8",
"react-email": "4.0.16",
"react-email": "4.0.6",
"tailwindcss": "^4.1.4",
"tsc-alias": "1.8.16",
"tsx": "4.19.4",
"tsc-alias": "1.8.10",
"tsx": "4.19.3",
"typescript": "^5",
"typescript-eslint": "^8.34.0",
"yargs": "18.0.0"
"yargs": "17.7.2"
},
"overrides": {
"emblor": {

View file

@ -1,21 +1,22 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="900.82861"
height="955.20648"
viewBox="0 0 238.34422 252.7317"
version="1.1"
id="svg420"
inkscape:export-filename="logo.svg"
inkscape:export-xdpi="221.14999"
inkscape:export-ydpi="221.14999"
x="0px"
y="0px"
viewBox="0 0 399.99999 400.00002"
enable-background="new 0 0 419.528 419.528"
xml:space="preserve"
id="svg52"
sodipodi:docname="noun-pangolin-1798092.svg"
width="400"
height="400"
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview422"
xmlns:svg="http://www.w3.org/2000/svg"><defs
id="defs56" /><sodipodi:namedview
id="namedview54"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
@ -23,18 +24,15 @@
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="false" />
<defs
id="defs417" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(-13.119542,-5.9258171)">
<path
d="m 213.66176,90.072122 c 4.95655,0 8.97383,4.018046 8.97383,8.973827 0,4.956581 -4.01728,8.974621 -8.97383,8.974621 -4.95657,0 -8.97462,-4.01804 -8.97462,-8.974621 0,-4.955781 4.01805,-8.973827 8.97462,-8.973827 z m 35.2316,37.450998 c -0.90048,29.80928 -23.66033,69.21262 -54.51292,79.34466 -36.04206,11.836 -63.40991,-5.92226 -72.08409,-26.74061 -6.75754,-16.21966 -1.65117,-35.62363 10.96266,-43.83669 10.6506,-6.93533 30.48543,-8.76736 47.15454,2.19144 -5.85627,-15.34246 -21.62491,-25.4256 -35.59101,-28.49424 -13.96613,-3.06867 -28.38324,0.43858 -38.74504,5.69946 13.29071,-14.68572 44.40801,-28.946049 78.24077,-10.95958 22.67676,12.05491 32.43775,28.93208 42.0489,51.72763 C 251.59637,117.87858 234.026,71.411066 203.39074,43.794029 172.15544,15.636686 129.95516,4.340214 97.668803,6.103155 108.32483,12.678273 120.84625,22.06586 132.41209,33.053363 81.298533,26.697169 39.174705,38.314245 13.119542,73.749217 27.67508,70.878527 46.868833,69.073666 65.974711,70.016861 28.737658,96.252107 7.1124298,140.38147 18.105298,186.43137 c 6.718497,-11.74129 16.767711,-25.84558 28.726275,-38.62863 -3.677175,34.36994 1.42836,80.83745 45.62293,110.85478 -2.25587,-9.42394 -4.08014,-20.88443 -4.91466,-33.0154 20.673197,16.1282 50.685067,29.42205 87.917917,20.24096 65.77679,-16.21975 83.34719,-79.78335 73.4356,-118.35996"
style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0776283"
id="path32" />
</g>
</svg>
showgrid="false"
inkscape:zoom="1.9583914"
inkscape:cx="209.86611"
inkscape:cy="262.20499"
inkscape:window-width="3840"
inkscape:window-height="2136"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg52" /><path
d="m 62.232921,184.91974 c 0,2.431 -1.97,4.402 -4.399,4.402 -2.429,0 -4.399,-1.972 -4.399,-4.402 0,-2.429 1.97,-4.399 4.399,-4.399 2.429,-10e-4 4.399,1.97 4.399,4.399 z m 58.993999,-4.821 c -25.943999,-2.826 -38.978999,7.453 -71.181999,31.357 -27.572,20.467 -32.767,4.381 -31.748,-2.614 1.499,-10.282 25.222,-58.573 48.079,-88.461 28.273,7.34 49.869999,30.727 54.850999,59.718 z m -55.915999,4.821 c 0,-4.131 -3.349,-7.478 -7.478,-7.478 -4.129,0 -7.478,3.347 -7.478,7.478 0,4.131 3.349,7.481 7.478,7.481 4.13,0 7.478,-3.35 7.478,-7.481 z m -15.032,48.424 -0.234,14.041 20.413,22.687 -9.818,7.353 33.306,27.492 -11.759,8.124 42.631999,19.939 -10.825,9.747 48.291,8.078 -7.526,10.307 48.758,-4.531 -3.997,11.725 53.916,-18.153 -2.76,13.357 48.077,-34.345 1.479,13.562 34.087,-48.576 7.478,14.206 15.187,-58.89 10.391,8.533 -2.14,-57.884 13.814,5.13 -21.082,-51.204 13.404,0.048 -33.696,-42.131 15.312,-1.366 -47.026,-32.831002 14.255,-8.399 -54.817,-14.682 9.257,-11.695 -49.625,0.352 0.6,-13.337 -38.537,14.084 -1.597,-12.689 -29.984,21.429 -6.446,-10.852 -22.59,26.504 -7.021,-9.572 -18.923,30.294 -9.595999,-8.744 -16.754,30.138002 c 31.509999,10.197 54.979999,37.951 59.126999,71.547 0.404,0.087 -22.37,31.257 10.955,57.85 -0.576,-2.985 -6.113,-53.902 47.496,-57.61 26.668,-1.844 48.4,21.666 48.4,48.399 0,8.184 -2.05,15.883 -5.636,22.64 -15.927,29.611 -64.858,30.755 -80.429,30.596 -45.154,-0.459 -104.051999,-51.521 -104.051999,-51.521 z"
id="path46" /></svg>

Before

Width:  |  Height:  |  Size: 2.5 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

View file

@ -1,22 +1,39 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="900.82861"
height="955.20648"
viewBox="0 0 238.34422 252.7317"
version="1.1"
id="svg420"
x="0px"
y="0px"
viewBox="0 0 399.99999 400.00002"
enable-background="new 0 0 419.528 419.528"
xml:space="preserve"
id="svg52"
sodipodi:docname="pangolin_orange.svg"
width="400"
height="400"
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs417" />
<g
id="layer1"
transform="translate(-13.119542,-5.9258171)">
<path
d="m 213.66176,90.072122 c 4.95655,0 8.97383,4.018046 8.97383,8.973827 0,4.956581 -4.01728,8.974621 -8.97383,8.974621 -4.95657,0 -8.97462,-4.01804 -8.97462,-8.974621 0,-4.955781 4.01805,-8.973827 8.97462,-8.973827 z m 35.2316,37.450998 c -0.90048,29.80928 -23.66033,69.21262 -54.51292,79.34466 -36.04206,11.836 -63.40991,-5.92226 -72.08409,-26.74061 -6.75754,-16.21966 -1.65117,-35.62363 10.96266,-43.83669 10.6506,-6.93533 30.48543,-8.76736 47.15454,2.19144 -5.85627,-15.34246 -21.62491,-25.4256 -35.59101,-28.49424 -13.96613,-3.06867 -28.38324,0.43858 -38.74504,5.69946 13.29071,-14.68572 44.40801,-28.946049 78.24077,-10.95958 22.67676,12.05491 32.43775,28.93208 42.0489,51.72763 C 251.59637,117.87858 234.026,71.411066 203.39074,43.794029 172.15544,15.636686 129.95516,4.340214 97.668803,6.103155 108.32483,12.678273 120.84625,22.06586 132.41209,33.053363 81.298533,26.697169 39.174705,38.314245 13.119542,73.749217 27.67508,70.878527 46.868833,69.073666 65.974711,70.016861 28.737658,96.252107 7.1124298,140.38147 18.105298,186.43137 c 6.718497,-11.74129 16.767711,-25.84558 28.726275,-38.62863 -3.677175,34.36994 1.42836,80.83745 45.62293,110.85478 -2.25587,-9.42394 -4.08014,-20.88443 -4.91466,-33.0154 20.673197,16.1282 50.685067,29.42205 87.917917,20.24096 65.77679,-16.21975 83.34719,-79.78335 73.4356,-118.35996"
style="fill:#f36118;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0776283"
id="path32" />
</g>
</svg>
xmlns:svg="http://www.w3.org/2000/svg"><defs
id="defs56" /><sodipodi:namedview
id="namedview54"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
showgrid="false"
inkscape:zoom="1.9583914"
inkscape:cx="127.40048"
inkscape:cy="262.71561"
inkscape:window-width="1436"
inkscape:window-height="1236"
inkscape:window-x="2208"
inkscape:window-y="511"
inkscape:window-maximized="0"
inkscape:current-layer="svg52" /><path
d="m 62.232921,184.91974 c 0,2.431 -1.97,4.402 -4.399,4.402 -2.429,0 -4.399,-1.972 -4.399,-4.402 0,-2.429 1.97,-4.399 4.399,-4.399 2.429,-10e-4 4.399,1.97 4.399,4.399 z m 58.993999,-4.821 c -25.943999,-2.826 -38.978999,7.453 -71.181999,31.357 -27.572,20.467 -32.767,4.381 -31.748,-2.614 1.499,-10.282 25.222,-58.573 48.079,-88.461 28.273,7.34 49.869999,30.727 54.850999,59.718 z m -55.915999,4.821 c 0,-4.131 -3.349,-7.478 -7.478,-7.478 -4.129,0 -7.478,3.347 -7.478,7.478 0,4.131 3.349,7.481 7.478,7.481 4.13,0 7.478,-3.35 7.478,-7.481 z m -15.032,48.424 -0.234,14.041 20.413,22.687 -9.818,7.353 33.306,27.492 -11.759,8.124 42.631999,19.939 -10.825,9.747 48.291,8.078 -7.526,10.307 48.758,-4.531 -3.997,11.725 53.916,-18.153 -2.76,13.357 48.077,-34.345 1.479,13.562 34.087,-48.576 7.478,14.206 15.187,-58.89 10.391,8.533 -2.14,-57.884 13.814,5.13 -21.082,-51.204 13.404,0.048 -33.696,-42.131 15.312,-1.366 -47.026,-32.831002 14.255,-8.399 -54.817,-14.682 9.257,-11.695 -49.625,0.352 0.6,-13.337 -38.537,14.084 -1.597,-12.689 -29.984,21.429 -6.446,-10.852 -22.59,26.504 -7.021,-9.572 -18.923,30.294 -9.595999,-8.744 -16.754,30.138002 c 31.509999,10.197 54.979999,37.951 59.126999,71.547 0.404,0.087 -22.37,31.257 10.955,57.85 -0.576,-2.985 -6.113,-53.902 47.496,-57.61 26.668,-1.844 48.4,21.666 48.4,48.399 0,8.184 -2.05,15.883 -5.636,22.64 -15.927,29.611 -64.858,30.755 -80.429,30.596 -45.154,-0.459 -104.051999,-51.521 -104.051999,-51.521 z"
id="path46"
style="fill:#f97315;fill-opacity:1" /></svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 7.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.4 KiB

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

View file

@ -1,6 +1,6 @@
import { Request } from "express";
import { db } from "@server/db";
import { userActions, roleActions, userOrgs } from "@server/db";
import { userActions, roleActions, userOrgs } from "@server/db/schemas";
import { and, eq, inArray } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
@ -109,7 +109,7 @@ export async function checkUserActionPermission(
try {
let userRoleIds = req.userRoleIds;
// If userRoleIds is not available on the request, fetch it
// If userOrgRoleId is not available on the request, fetch it
if (userRoleIds === undefined) {
const userOrgRoles = await db
.select({ roleId: userOrgs.roleId })

View file

@ -1,6 +1,6 @@
import { db } from "@server/db";
import db from "@server/db";
import { and, eq, inArray } from "drizzle-orm";
import { roleResources, userResources } from "@server/db";
import { roleResources, userResources } from "@server/db/schemas";
export async function canUserAccessResource({
userId,

View file

@ -1,5 +1,5 @@
import { db } from "@server/db";
import { UserInvite, userInvites } from "@server/db";
import db from "@server/db";
import { UserInvite, userInvites } from "@server/db/schemas";
import { isWithinExpirationDate } from "oslo";
import { verifyPassword } from "./password";
import { eq } from "drizzle-orm";

View file

@ -1,5 +1,5 @@
import { db } from '@server/db';
import { limitsTable } from '@server/db';
import { limitsTable } from '@server/db/schemas';
import { and, eq } from 'drizzle-orm';
import createHttpError from 'http-errors';
import HttpCode from '@server/types/HttpCode';

View file

@ -1,5 +1,5 @@
import { db } from "@server/db";
import { resourceOtp } from "@server/db";
import db from "@server/db";
import { resourceOtp } from "@server/db/schemas";
import { and, eq } from "drizzle-orm";
import { createDate, isWithinExpirationDate, TimeSpan } from "oslo";
import { alphabet, generateRandomString, sha256 } from "oslo/crypto";

View file

@ -1,7 +1,7 @@
import { TimeSpan, createDate } from "oslo";
import { generateRandomString, alphabet } from "oslo/crypto";
import { db } from "@server/db";
import { users, emailVerificationCodes } from "@server/db";
import db from "@server/db";
import { users, emailVerificationCodes } from "@server/db/schemas";
import { eq } from "drizzle-orm";
import { sendEmail } from "@server/emails";
import config from "@server/lib/config";

View file

@ -9,8 +9,8 @@ import {
sessions,
User,
users
} from "@server/db";
import { db } from "@server/db";
} from "@server/db/schemas";
import db from "@server/db";
import { eq, inArray } from "drizzle-orm";
import config from "@server/lib/config";
import type { RandomReader } from "@oslojs/crypto/random";

View file

@ -2,8 +2,8 @@ import {
encodeHexLowerCase,
} from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
import { Newt, newts, newtSessions, NewtSession } from "@server/db";
import { db } from "@server/db";
import { Newt, newts, newtSessions, NewtSession } from "@server/db/schemas";
import db from "@server/db";
import { eq } from "drizzle-orm";
export const EXPIRES = 1000 * 60 * 60 * 24 * 30;

View file

@ -1,7 +1,7 @@
import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
import { resourceSessions, ResourceSession } from "@server/db";
import { db } from "@server/db";
import { resourceSessions, ResourceSession } from "@server/db/schemas";
import db from "@server/db";
import { eq, and } from "drizzle-orm";
import config from "@server/lib/config";

View file

@ -1,6 +1,6 @@
import { verify } from "@node-rs/argon2";
import { db } from "@server/db";
import { twoFactorBackupCodes } from "@server/db";
import db from "@server/db";
import { twoFactorBackupCodes } from "@server/db/schemas";
import { eq } from "drizzle-orm";
import { decodeHex } from "oslo/encoding";
import { TOTPController } from "oslo/otp";

View file

@ -1,10 +1,10 @@
import { db } from "@server/db";
import db from "@server/db";
import {
Resource,
ResourceAccessToken,
resourceAccessToken,
resources
} from "@server/db";
} from "@server/db/schemas";
import { and, eq } from "drizzle-orm";
import { isWithinExpirationDate } from "oslo";
import { verifyPassword } from "./password";

View file

@ -1,72 +0,0 @@
# Database
Pangolin can use a Postgres or SQLite database to store its data.
## Development
### Postgres
To use Postgres, edit `server/db/index.ts` to export all from `server/db/pg/index.ts`:
```typescript
export * from "./pg";
```
Make sure you have a valid config file with a connection string:
```yaml
postgres:
connection_string: postgresql://postgres:postgres@localhost:5432
```
You can run an ephemeral Postgres database for local development using Docker:
```bash
docker run -d \
--name postgres \
--rm \
-p 5432:5432 \
-e POSTGRES_PASSWORD=postgres \
-v $(mktemp -d):/var/lib/postgresql/data \
postgres:17
```
### Schema
`server/db/pg/schema.ts` and `server/db/sqlite/schema.ts` contain the database schema definitions. These need to be kept in sync with with each other.
Stick to common data types and avoid Postgres-specific features to ensure compatibility with SQLite.
### SQLite
To use SQLite, edit `server/db/index.ts` to export all from `server/db/sqlite/index.ts`:
```typescript
export * from "./sqlite";
```
No edits to the config are needed. If you keep the Postgres config, it will be ignored.
## Generate and Push Migrations
Ensure drizzle-kit is installed.
### Postgres
You must have a connection string in your config file, as shown above.
```bash
npm run db:pg:generate
npm run db:pg:push
```
### SQLite
```bash
npm run db:sqlite:generate
npm run db:sqlite:push
```
## Build Time
There is a dockerfile for each database type. The dockerfile swaps out the `server/db/index.ts` file to use the correct database type.

View file

@ -1,2 +1,52 @@
export * from "./sqlite";
// export * from "./pg";
import { drizzle } from "drizzle-orm/better-sqlite3";
import Database from "better-sqlite3";
import * as schema from "@server/db/schemas";
import path from "path";
import fs from "fs/promises";
import { APP_PATH } from "@server/lib/consts";
import { existsSync, mkdirSync } from "fs";
export const location = path.join(APP_PATH, "db", "db.sqlite");
export const exists = await checkFileExists(location);
bootstrapVolume();
const sqlite = new Database(location);
export const db = drizzle(sqlite, { schema });
export default db;
async function checkFileExists(filePath: string): Promise<boolean> {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
}
function bootstrapVolume() {
const appPath = APP_PATH;
const dbDir = path.join(appPath, "db");
const logsDir = path.join(appPath, "logs");
// check if the db directory exists and create it if it doesn't
if (!existsSync(dbDir)) {
mkdirSync(dbDir, { recursive: true });
}
// check if the logs directory exists and create it if it doesn't
if (!existsSync(logsDir)) {
mkdirSync(logsDir, { recursive: true });
}
// THIS IS FOR TRAEFIK; NOT REALLY NEEDED, BUT JUST IN CASE
const traefikDir = path.join(appPath, "traefik");
// check if the traefik directory exists and create it if it doesn't
if (!existsSync(traefikDir)) {
mkdirSync(traefikDir, { recursive: true });
}
}

View file

@ -1,5 +1,5 @@
import { migrate } from "drizzle-orm/better-sqlite3/migrator";
import db from "./driver";
import db from "@server/db";
import path from "path";
const migrationsFolder = path.join("server/migrations");
@ -7,7 +7,7 @@ const migrationsFolder = path.join("server/migrations");
const runMigrations = async () => {
console.log("Running migrations...");
try {
migrate(db as any, {
migrate(db, {
migrationsFolder: migrationsFolder,
});
console.log("Migrations completed successfully.");

View file

@ -1,7 +1,7 @@
import { join } from "path";
import { readFileSync } from "fs";
import { db } from "@server/db";
import { exitNodes, sites } from "@server/db";
import { exitNodes, sites } from "./schemas/schema";
import { eq, and } from "drizzle-orm";
import { __DIRNAME } from "@server/lib/consts";

View file

@ -1,17 +0,0 @@
import { drizzle as DrizzlePostgres } from "drizzle-orm/node-postgres";
import { readConfigFile } from "@server/lib/readConfigFile";
function createDb() {
const config = readConfigFile();
const connectionString = config.postgres?.connection_string;
if (!connectionString) {
throw new Error("Postgres connection string is not defined in the configuration file.");
}
return DrizzlePostgres(connectionString);
}
export const db = createDb();
export default db;

View file

@ -1,20 +0,0 @@
import { migrate } from "drizzle-orm/node-postgres/migrator";
import db from "./driver";
import path from "path";
const migrationsFolder = path.join("server/migrations");
const runMigrations = async () => {
console.log("Running migrations...");
try {
await migrate(db as any, {
migrationsFolder: migrationsFolder
});
console.log("Migrations completed successfully.");
} catch (error) {
console.error("Error running migrations:", error);
process.exit(1);
}
};
runMigrations();

View file

@ -1,532 +0,0 @@
import {
pgTable,
serial,
varchar,
boolean,
integer,
bigint,
real
} from "drizzle-orm/pg-core";
import { InferSelectModel } from "drizzle-orm";
export const domains = pgTable("domains", {
domainId: varchar("domainId").primaryKey(),
baseDomain: varchar("baseDomain").notNull(),
configManaged: boolean("configManaged").notNull().default(false)
});
export const orgs = pgTable("orgs", {
orgId: varchar("orgId").primaryKey(),
name: varchar("name").notNull()
});
export const orgDomains = pgTable("orgDomains", {
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
domainId: varchar("domainId")
.notNull()
.references(() => domains.domainId, { onDelete: "cascade" })
});
export const sites = pgTable("sites", {
siteId: serial("siteId").primaryKey(),
orgId: varchar("orgId")
.references(() => orgs.orgId, {
onDelete: "cascade"
})
.notNull(),
niceId: varchar("niceId").notNull(),
exitNodeId: integer("exitNode").references(() => exitNodes.exitNodeId, {
onDelete: "set null"
}),
name: varchar("name").notNull(),
pubKey: varchar("pubKey"),
subnet: varchar("subnet").notNull(),
megabytesIn: real("bytesIn"),
megabytesOut: real("bytesOut"),
lastBandwidthUpdate: varchar("lastBandwidthUpdate"),
type: varchar("type").notNull(), // "newt" or "wireguard"
online: boolean("online").notNull().default(false),
dockerSocketEnabled: boolean("dockerSocketEnabled").notNull().default(true)
});
export const resources = pgTable("resources", {
resourceId: serial("resourceId").primaryKey(),
siteId: integer("siteId")
.references(() => sites.siteId, {
onDelete: "cascade"
})
.notNull(),
orgId: varchar("orgId")
.references(() => orgs.orgId, {
onDelete: "cascade"
})
.notNull(),
name: varchar("name").notNull(),
subdomain: varchar("subdomain"),
fullDomain: varchar("fullDomain"),
domainId: varchar("domainId").references(() => domains.domainId, {
onDelete: "set null"
}),
ssl: boolean("ssl").notNull().default(false),
blockAccess: boolean("blockAccess").notNull().default(false),
sso: boolean("sso").notNull().default(true),
http: boolean("http").notNull().default(true),
protocol: varchar("protocol").notNull(),
proxyPort: integer("proxyPort"),
emailWhitelistEnabled: boolean("emailWhitelistEnabled")
.notNull()
.default(false),
isBaseDomain: boolean("isBaseDomain"),
applyRules: boolean("applyRules").notNull().default(false),
enabled: boolean("enabled").notNull().default(true),
stickySession: boolean("stickySession").notNull().default(false),
tlsServerName: varchar("tlsServerName"),
setHostHeader: varchar("setHostHeader")
});
export const targets = pgTable("targets", {
targetId: serial("targetId").primaryKey(),
resourceId: integer("resourceId")
.references(() => resources.resourceId, {
onDelete: "cascade"
})
.notNull(),
ip: varchar("ip").notNull(),
method: varchar("method"),
port: integer("port").notNull(),
internalPort: integer("internalPort"),
enabled: boolean("enabled").notNull().default(true)
});
export const exitNodes = pgTable("exitNodes", {
exitNodeId: serial("exitNodeId").primaryKey(),
name: varchar("name").notNull(),
address: varchar("address").notNull(),
endpoint: varchar("endpoint").notNull(),
publicKey: varchar("publicKey").notNull(),
listenPort: integer("listenPort").notNull(),
reachableAt: varchar("reachableAt")
});
export const users = pgTable("user", {
userId: varchar("id").primaryKey(),
email: varchar("email"),
username: varchar("username").notNull(),
name: varchar("name"),
type: varchar("type").notNull(), // "internal", "oidc"
idpId: integer("idpId").references(() => idp.idpId, {
onDelete: "cascade"
}),
passwordHash: varchar("passwordHash"),
twoFactorEnabled: boolean("twoFactorEnabled").notNull().default(false),
twoFactorSecret: varchar("twoFactorSecret"),
emailVerified: boolean("emailVerified").notNull().default(false),
dateCreated: varchar("dateCreated").notNull(),
serverAdmin: boolean("serverAdmin").notNull().default(false)
});
export const newts = pgTable("newt", {
newtId: varchar("id").primaryKey(),
secretHash: varchar("secretHash").notNull(),
dateCreated: varchar("dateCreated").notNull(),
siteId: integer("siteId").references(() => sites.siteId, {
onDelete: "cascade"
})
});
export const twoFactorBackupCodes = pgTable("twoFactorBackupCodes", {
codeId: serial("id").primaryKey(),
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
codeHash: varchar("codeHash").notNull()
});
export const sessions = pgTable("session", {
sessionId: varchar("id").primaryKey(),
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
});
export const newtSessions = pgTable("newtSession", {
sessionId: varchar("id").primaryKey(),
newtId: varchar("newtId")
.notNull()
.references(() => newts.newtId, { onDelete: "cascade" }),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
});
export const userOrgs = pgTable("userOrgs", {
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
orgId: varchar("orgId")
.references(() => orgs.orgId, {
onDelete: "cascade"
})
.notNull(),
roleId: integer("roleId")
.notNull()
.references(() => roles.roleId),
isOwner: boolean("isOwner").notNull().default(false)
});
export const emailVerificationCodes = pgTable("emailVerificationCodes", {
codeId: serial("id").primaryKey(),
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
email: varchar("email").notNull(),
code: varchar("code").notNull(),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
});
export const passwordResetTokens = pgTable("passwordResetTokens", {
tokenId: serial("id").primaryKey(),
email: varchar("email").notNull(),
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
tokenHash: varchar("tokenHash").notNull(),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
});
export const actions = pgTable("actions", {
actionId: varchar("actionId").primaryKey(),
name: varchar("name"),
description: varchar("description")
});
export const roles = pgTable("roles", {
roleId: serial("roleId").primaryKey(),
orgId: varchar("orgId")
.references(() => orgs.orgId, {
onDelete: "cascade"
})
.notNull(),
isAdmin: boolean("isAdmin"),
name: varchar("name").notNull(),
description: varchar("description")
});
export const roleActions = pgTable("roleActions", {
roleId: integer("roleId")
.notNull()
.references(() => roles.roleId, { onDelete: "cascade" }),
actionId: varchar("actionId")
.notNull()
.references(() => actions.actionId, { onDelete: "cascade" }),
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" })
});
export const userActions = pgTable("userActions", {
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
actionId: varchar("actionId")
.notNull()
.references(() => actions.actionId, { onDelete: "cascade" }),
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" })
});
export const roleSites = pgTable("roleSites", {
roleId: integer("roleId")
.notNull()
.references(() => roles.roleId, { onDelete: "cascade" }),
siteId: integer("siteId")
.notNull()
.references(() => sites.siteId, { onDelete: "cascade" })
});
export const userSites = pgTable("userSites", {
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
siteId: integer("siteId")
.notNull()
.references(() => sites.siteId, { onDelete: "cascade" })
});
export const roleResources = pgTable("roleResources", {
roleId: integer("roleId")
.notNull()
.references(() => roles.roleId, { onDelete: "cascade" }),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" })
});
export const userResources = pgTable("userResources", {
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" })
});
export const limitsTable = pgTable("limits", {
limitId: serial("limitId").primaryKey(),
orgId: varchar("orgId")
.references(() => orgs.orgId, {
onDelete: "cascade"
})
.notNull(),
name: varchar("name").notNull(),
value: bigint("value", { mode: "number" }).notNull(),
description: varchar("description")
});
export const userInvites = pgTable("userInvites", {
inviteId: varchar("inviteId").primaryKey(),
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
email: varchar("email").notNull(),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull(),
tokenHash: varchar("token").notNull(),
roleId: integer("roleId")
.notNull()
.references(() => roles.roleId, { onDelete: "cascade" })
});
export const resourcePincode = pgTable("resourcePincode", {
pincodeId: serial("pincodeId").primaryKey(),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" }),
pincodeHash: varchar("pincodeHash").notNull(),
digitLength: integer("digitLength").notNull()
});
export const resourcePassword = pgTable("resourcePassword", {
passwordId: serial("passwordId").primaryKey(),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" }),
passwordHash: varchar("passwordHash").notNull()
});
export const resourceAccessToken = pgTable("resourceAccessToken", {
accessTokenId: varchar("accessTokenId").primaryKey(),
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" }),
tokenHash: varchar("tokenHash").notNull(),
sessionLength: bigint("sessionLength", { mode: "number" }).notNull(),
expiresAt: bigint("expiresAt", { mode: "number" }),
title: varchar("title"),
description: varchar("description"),
createdAt: bigint("createdAt", { mode: "number" }).notNull()
});
export const resourceSessions = pgTable("resourceSessions", {
sessionId: varchar("id").primaryKey(),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" }),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull(),
sessionLength: bigint("sessionLength", { mode: "number" }).notNull(),
doNotExtend: boolean("doNotExtend").notNull().default(false),
isRequestToken: boolean("isRequestToken"),
userSessionId: varchar("userSessionId").references(
() => sessions.sessionId,
{
onDelete: "cascade"
}
),
passwordId: integer("passwordId").references(
() => resourcePassword.passwordId,
{
onDelete: "cascade"
}
),
pincodeId: integer("pincodeId").references(
() => resourcePincode.pincodeId,
{
onDelete: "cascade"
}
),
whitelistId: integer("whitelistId").references(
() => resourceWhitelist.whitelistId,
{
onDelete: "cascade"
}
),
accessTokenId: varchar("accessTokenId").references(
() => resourceAccessToken.accessTokenId,
{
onDelete: "cascade"
}
)
});
export const resourceWhitelist = pgTable("resourceWhitelist", {
whitelistId: serial("id").primaryKey(),
email: varchar("email").notNull(),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" })
});
export const resourceOtp = pgTable("resourceOtp", {
otpId: serial("otpId").primaryKey(),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" }),
email: varchar("email").notNull(),
otpHash: varchar("otpHash").notNull(),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
});
export const versionMigrations = pgTable("versionMigrations", {
version: varchar("version").primaryKey(),
executedAt: bigint("executedAt", { mode: "number" }).notNull()
});
export const resourceRules = pgTable("resourceRules", {
ruleId: serial("ruleId").primaryKey(),
resourceId: integer("resourceId")
.notNull()
.references(() => resources.resourceId, { onDelete: "cascade" }),
enabled: boolean("enabled").notNull().default(true),
priority: integer("priority").notNull(),
action: varchar("action").notNull(), // ACCEPT, DROP
match: varchar("match").notNull(), // CIDR, PATH, IP
value: varchar("value").notNull()
});
export const supporterKey = pgTable("supporterKey", {
keyId: serial("keyId").primaryKey(),
key: varchar("key").notNull(),
githubUsername: varchar("githubUsername").notNull(),
phrase: varchar("phrase"),
tier: varchar("tier"),
valid: boolean("valid").notNull().default(false)
});
export const idp = pgTable("idp", {
idpId: serial("idpId").primaryKey(),
name: varchar("name").notNull(),
type: varchar("type").notNull(),
defaultRoleMapping: varchar("defaultRoleMapping"),
defaultOrgMapping: varchar("defaultOrgMapping"),
autoProvision: boolean("autoProvision").notNull().default(false)
});
export const idpOidcConfig = pgTable("idpOidcConfig", {
idpOauthConfigId: serial("idpOauthConfigId").primaryKey(),
idpId: integer("idpId")
.notNull()
.references(() => idp.idpId, { onDelete: "cascade" }),
clientId: varchar("clientId").notNull(),
clientSecret: varchar("clientSecret").notNull(),
authUrl: varchar("authUrl").notNull(),
tokenUrl: varchar("tokenUrl").notNull(),
identifierPath: varchar("identifierPath").notNull(),
emailPath: varchar("emailPath"),
namePath: varchar("namePath"),
scopes: varchar("scopes").notNull()
});
export const licenseKey = pgTable("licenseKey", {
licenseKeyId: varchar("licenseKeyId").primaryKey().notNull(),
instanceId: varchar("instanceId").notNull(),
token: varchar("token").notNull()
});
export const hostMeta = pgTable("hostMeta", {
hostMetaId: varchar("hostMetaId").primaryKey().notNull(),
createdAt: bigint("createdAt", { mode: "number" }).notNull()
});
export const apiKeys = pgTable("apiKeys", {
apiKeyId: varchar("apiKeyId").primaryKey(),
name: varchar("name").notNull(),
apiKeyHash: varchar("apiKeyHash").notNull(),
lastChars: varchar("lastChars").notNull(),
createdAt: varchar("dateCreated").notNull(),
isRoot: boolean("isRoot").notNull().default(false)
});
export const apiKeyActions = pgTable("apiKeyActions", {
apiKeyId: varchar("apiKeyId")
.notNull()
.references(() => apiKeys.apiKeyId, { onDelete: "cascade" }),
actionId: varchar("actionId")
.notNull()
.references(() => actions.actionId, { onDelete: "cascade" })
});
export const apiKeyOrg = pgTable("apiKeyOrg", {
apiKeyId: varchar("apiKeyId")
.notNull()
.references(() => apiKeys.apiKeyId, { onDelete: "cascade" }),
orgId: varchar("orgId")
.references(() => orgs.orgId, {
onDelete: "cascade"
})
.notNull()
});
export const idpOrg = pgTable("idpOrg", {
idpId: integer("idpId")
.notNull()
.references(() => idp.idpId, { onDelete: "cascade" }),
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
roleMapping: varchar("roleMapping"),
orgMapping: varchar("orgMapping")
});
export type Org = InferSelectModel<typeof orgs>;
export type User = InferSelectModel<typeof users>;
export type Site = InferSelectModel<typeof sites>;
export type Resource = InferSelectModel<typeof resources>;
export type ExitNode = InferSelectModel<typeof exitNodes>;
export type Target = InferSelectModel<typeof targets>;
export type Session = InferSelectModel<typeof sessions>;
export type Newt = InferSelectModel<typeof newts>;
export type NewtSession = InferSelectModel<typeof newtSessions>;
export type EmailVerificationCode = InferSelectModel<
typeof emailVerificationCodes
>;
export type TwoFactorBackupCode = InferSelectModel<typeof twoFactorBackupCodes>;
export type PasswordResetToken = InferSelectModel<typeof passwordResetTokens>;
export type Role = InferSelectModel<typeof roles>;
export type Action = InferSelectModel<typeof actions>;
export type RoleAction = InferSelectModel<typeof roleActions>;
export type UserAction = InferSelectModel<typeof userActions>;
export type RoleSite = InferSelectModel<typeof roleSites>;
export type UserSite = InferSelectModel<typeof userSites>;
export type RoleResource = InferSelectModel<typeof roleResources>;
export type UserResource = InferSelectModel<typeof userResources>;
export type Limit = InferSelectModel<typeof limitsTable>;
export type UserInvite = InferSelectModel<typeof userInvites>;
export type UserOrg = InferSelectModel<typeof userOrgs>;
export type ResourceSession = InferSelectModel<typeof resourceSessions>;
export type ResourcePincode = InferSelectModel<typeof resourcePincode>;
export type ResourcePassword = InferSelectModel<typeof resourcePassword>;
export type ResourceOtp = InferSelectModel<typeof resourceOtp>;
export type ResourceAccessToken = InferSelectModel<typeof resourceAccessToken>;
export type ResourceWhitelist = InferSelectModel<typeof resourceWhitelist>;
export type VersionMigration = InferSelectModel<typeof versionMigrations>;
export type ResourceRule = InferSelectModel<typeof resourceRules>;
export type Domain = InferSelectModel<typeof domains>;
export type SupporterKey = InferSelectModel<typeof supporterKey>;
export type Idp = InferSelectModel<typeof idp>;
export type ApiKey = InferSelectModel<typeof apiKeys>;
export type ApiKeyAction = InferSelectModel<typeof apiKeyActions>;
export type ApiKeyOrg = InferSelectModel<typeof apiKeyOrg>;

View file

View file

@ -1,2 +1 @@
export * from "./driver";
export * from "./schema";

View file

@ -41,10 +41,7 @@ export const sites = sqliteTable("sites", {
megabytesOut: integer("bytesOut"),
lastBandwidthUpdate: text("lastBandwidthUpdate"),
type: text("type").notNull(), // "newt" or "wireguard"
online: integer("online", { mode: "boolean" }).notNull().default(false),
dockerSocketEnabled: integer("dockerSocketEnabled", { mode: "boolean" })
.notNull()
.default(true)
online: integer("online", { mode: "boolean" }).notNull().default(false)
});
export const resources = sqliteTable("resources", {

View file

@ -1,58 +0,0 @@
import { drizzle as DrizzleSqlite } from "drizzle-orm/better-sqlite3";
import Database from "better-sqlite3";
import * as schema from "./schema";
import path from "path";
import fs from "fs/promises";
import { APP_PATH } from "@server/lib/consts";
import { existsSync, mkdirSync } from "fs";
import { readConfigFile } from "@server/lib/readConfigFile";
export const location = path.join(APP_PATH, "db", "db.sqlite");
export const exists = await checkFileExists(location);
bootstrapVolume();
function createDb() {
const config = readConfigFile();
const sqlite = new Database(location);
return DrizzleSqlite(sqlite, { schema });
}
export const db = createDb();
export default db;
async function checkFileExists(filePath: string): Promise<boolean> {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
}
function bootstrapVolume() {
const appPath = APP_PATH;
const dbDir = path.join(appPath, "db");
const logsDir = path.join(appPath, "logs");
// check if the db directory exists and create it if it doesn't
if (!existsSync(dbDir)) {
mkdirSync(dbDir, { recursive: true });
}
// check if the logs directory exists and create it if it doesn't
if (!existsSync(logsDir)) {
mkdirSync(logsDir, { recursive: true });
}
// THIS IS FOR TRAEFIK; NOT REALLY NEEDED, BUT JUST IN CASE
const traefikDir = path.join(appPath, "traefik");
// check if the traefik directory exists and create it if it doesn't
if (!existsSync(traefikDir)) {
mkdirSync(traefikDir, { recursive: true });
}
}

View file

@ -1,2 +0,0 @@
export * from "./driver";
export * from "./schema";

View file

@ -4,9 +4,8 @@ import { runSetupFunctions } from "./setup";
import { createApiServer } from "./apiServer";
import { createNextServer } from "./nextServer";
import { createInternalServer } from "./internalServer";
import { ApiKey, ApiKeyOrg, Session, User, UserOrg } from "@server/db";
import { createIntegrationApiServer } from "./integrationApiServer";
import config from "@server/lib/config";
import { ApiKey, ApiKeyOrg, Session, User, UserOrg } from "./db/schemas";
// import { createIntegrationApiServer } from "./integrationApiServer";
async function startServers() {
await runSetupFunctions();
@ -17,9 +16,7 @@ async function startServers() {
const nextServer = await createNextServer();
let integrationServer;
if (config.getRawConfig().flags?.enable_integration_api) {
integrationServer = createIntegrationApiServer();
}
// integrationServer = createIntegrationApiServer();
return {
apiServer,

View file

@ -1,102 +0,0 @@
import express from "express";
import cors from "cors";
import cookieParser from "cookie-parser";
import config from "@server/lib/config";
import logger from "@server/logger";
import {
errorHandlerMiddleware,
notFoundMiddleware,
} from "@server/middlewares";
import { authenticated, unauthenticated } from "@server/routers/integration";
import { logIncomingMiddleware } from "./middlewares/logIncoming";
import helmet from "helmet";
import swaggerUi from "swagger-ui-express";
import { OpenApiGeneratorV3 } from "@asteasolutions/zod-to-openapi";
import { registry } from "./openApi";
const dev = process.env.ENVIRONMENT !== "prod";
const externalPort = config.getRawConfig().server.integration_port;
export function createIntegrationApiServer() {
const apiServer = express();
if (config.getRawConfig().server.trust_proxy) {
apiServer.set("trust proxy", 1);
}
apiServer.use(cors());
if (!dev) {
apiServer.use(helmet());
}
apiServer.use(cookieParser());
apiServer.use(express.json());
apiServer.use(
"/v1/docs",
swaggerUi.serve,
swaggerUi.setup(getOpenApiDocumentation())
);
// API routes
const prefix = `/v1`;
apiServer.use(logIncomingMiddleware);
apiServer.use(prefix, unauthenticated);
apiServer.use(prefix, authenticated);
// Error handling
apiServer.use(notFoundMiddleware);
apiServer.use(errorHandlerMiddleware);
// Create HTTP server
const httpServer = apiServer.listen(externalPort, (err?: any) => {
if (err) throw err;
logger.info(
`Integration API server is running on http://localhost:${externalPort}`
);
});
return httpServer;
}
function getOpenApiDocumentation() {
const bearerAuth = registry.registerComponent(
"securitySchemes",
"Bearer Auth",
{
type: "http",
scheme: "bearer"
}
);
for (const def of registry.definitions) {
if (def.type === "route") {
def.route.security = [
{
[bearerAuth.name]: []
}
];
}
}
registry.registerPath({
method: "get",
path: "/",
description: "Health check",
tags: [],
request: {},
responses: {}
});
const generator = new OpenApiGeneratorV3(registry.definitions);
return generator.generateDocument({
openapi: "3.0.0",
info: {
version: "v1",
title: "Pangolin Integration API"
},
servers: [{ url: "/v1" }]
});
}

View file

@ -1,6 +1,6 @@
import { db } from "@server/db";
import db from "@server/db";
import { and, eq, inArray } from "drizzle-orm";
import { roleResources, userResources } from "@server/db";
import { roleResources, userResources } from "@server/db/schemas";
export async function canUserAccessResource({
userId,

View file

@ -1,6 +1,169 @@
import fs from "fs";
import yaml from "js-yaml";
import { z } from "zod";
import { __DIRNAME, APP_VERSION } from "@server/lib/consts";
import { configSchema, readConfigFile } from "./readConfigFile";
import { fromError } from "zod-validation-error";
import {
__DIRNAME,
APP_VERSION,
configFilePath1,
configFilePath2
} from "@server/lib/consts";
import { passwordSchema } from "@server/auth/passwordSchema";
import stoi from "./stoi";
const portSchema = z.number().positive().gt(0).lte(65535);
const getEnvOrYaml = (envVar: string) => (valFromYaml: any) => {
return process.env[envVar] ?? valFromYaml;
};
const configSchema = z.object({
app: z.object({
dashboard_url: z
.string()
.url()
.optional()
.pipe(z.string().url())
.transform((url) => url.toLowerCase()),
log_level: z.enum(["debug", "info", "warn", "error"]),
save_logs: z.boolean(),
log_failed_attempts: z.boolean().optional()
}),
domains: z
.record(
z.string(),
z.object({
base_domain: z
.string()
.nonempty("base_domain must not be empty")
.transform((url) => url.toLowerCase()),
cert_resolver: z.string().optional(),
prefer_wildcard_cert: z.boolean().optional()
})
)
.refine(
(domains) => {
const keys = Object.keys(domains);
if (keys.length === 0) {
return false;
}
return true;
},
{
message: "At least one domain must be defined"
}
),
server: z.object({
integration_port: portSchema
.optional()
.transform(stoi)
.pipe(portSchema.optional()),
external_port: portSchema.optional().transform(stoi).pipe(portSchema),
internal_port: portSchema.optional().transform(stoi).pipe(portSchema),
next_port: portSchema.optional().transform(stoi).pipe(portSchema),
internal_hostname: z.string().transform((url) => url.toLowerCase()),
session_cookie_name: z.string(),
resource_access_token_param: z.string(),
resource_access_token_headers: z.object({
id: z.string(),
token: z.string()
}),
resource_session_request_param: z.string(),
dashboard_session_length_hours: z
.number()
.positive()
.gt(0)
.optional()
.default(720),
resource_session_length_hours: z
.number()
.positive()
.gt(0)
.optional()
.default(720),
cors: z
.object({
origins: z.array(z.string()).optional(),
methods: z.array(z.string()).optional(),
allowed_headers: z.array(z.string()).optional(),
credentials: z.boolean().optional()
})
.optional(),
trust_proxy: z.boolean().optional().default(true),
secret: z
.string()
.optional()
.transform(getEnvOrYaml("SERVER_SECRET"))
.pipe(z.string().min(8))
}),
traefik: z.object({
http_entrypoint: z.string(),
https_entrypoint: z.string().optional(),
additional_middlewares: z.array(z.string()).optional()
}),
gerbil: z.object({
start_port: portSchema.optional().transform(stoi).pipe(portSchema),
base_endpoint: z
.string()
.optional()
.pipe(z.string())
.transform((url) => url.toLowerCase()),
use_subdomain: z.boolean(),
subnet_group: z.string(),
block_size: z.number().positive().gt(0),
site_block_size: z.number().positive().gt(0)
}),
rate_limits: z.object({
global: z.object({
window_minutes: z.number().positive().gt(0),
max_requests: z.number().positive().gt(0)
}),
auth: z
.object({
window_minutes: z.number().positive().gt(0),
max_requests: z.number().positive().gt(0)
})
.optional()
}),
email: z
.object({
smtp_host: z.string().optional(),
smtp_port: portSchema.optional(),
smtp_user: z.string().optional(),
smtp_pass: z.string().optional(),
smtp_secure: z.boolean().optional(),
smtp_tls_reject_unauthorized: z.boolean().optional(),
no_reply: z.string().email().optional()
})
.optional(),
users: z.object({
server_admin: z.object({
email: z
.string()
.email()
.optional()
.transform(getEnvOrYaml("USERS_SERVERADMIN_EMAIL"))
.pipe(z.string().email())
.transform((v) => v.toLowerCase()),
password: passwordSchema
.optional()
.transform(getEnvOrYaml("USERS_SERVERADMIN_PASSWORD"))
.pipe(passwordSchema)
})
}),
flags: z
.object({
require_email_verification: z.boolean().optional(),
disable_signup_without_invite: z.boolean().optional(),
disable_user_create_org: z.boolean().optional(),
allow_raw_resources: z.boolean().optional(),
allow_base_domain_resources: z.boolean().optional(),
allow_local_sites: z.boolean().optional()
})
.optional()
});
export class Config {
private rawConfig!: z.infer<typeof configSchema>;
@ -8,53 +171,92 @@ export class Config {
isDev: boolean = process.env.ENVIRONMENT !== "prod";
constructor() {
this.load();
this.loadConfig();
}
public load() {
const parsedConfig = readConfigFile();
public loadConfig() {
const loadConfig = (configPath: string) => {
try {
const yamlContent = fs.readFileSync(configPath, "utf8");
const config = yaml.load(yamlContent);
return config;
} catch (error) {
if (error instanceof Error) {
throw new Error(
`Error loading configuration file: ${error.message}`
);
}
throw error;
}
};
let environment: any;
if (fs.existsSync(configFilePath1)) {
environment = loadConfig(configFilePath1);
} else if (fs.existsSync(configFilePath2)) {
environment = loadConfig(configFilePath2);
}
if (process.env.APP_BASE_DOMAIN) {
console.log(
"You're using deprecated environment variables. Transition to the configuration file. https://docs.fossorial.io/"
);
}
if (!environment) {
throw new Error(
"No configuration file found. Please create one. https://docs.fossorial.io/"
);
}
const parsedConfig = configSchema.safeParse(environment);
if (!parsedConfig.success) {
const errors = fromError(parsedConfig.error);
throw new Error(`Invalid configuration file: ${errors}`);
}
process.env.APP_VERSION = APP_VERSION;
process.env.NEXT_PORT = parsedConfig.server.next_port.toString();
process.env.NEXT_PORT = parsedConfig.data.server.next_port.toString();
process.env.SERVER_EXTERNAL_PORT =
parsedConfig.server.external_port.toString();
parsedConfig.data.server.external_port.toString();
process.env.SERVER_INTERNAL_PORT =
parsedConfig.server.internal_port.toString();
process.env.FLAGS_EMAIL_VERIFICATION_REQUIRED = parsedConfig.flags
parsedConfig.data.server.internal_port.toString();
process.env.FLAGS_EMAIL_VERIFICATION_REQUIRED = parsedConfig.data.flags
?.require_email_verification
? "true"
: "false";
process.env.FLAGS_ALLOW_RAW_RESOURCES = parsedConfig.flags
process.env.FLAGS_ALLOW_RAW_RESOURCES = parsedConfig.data.flags
?.allow_raw_resources
? "true"
: "false";
process.env.SESSION_COOKIE_NAME =
parsedConfig.server.session_cookie_name;
process.env.EMAIL_ENABLED = parsedConfig.email ? "true" : "false";
process.env.DISABLE_SIGNUP_WITHOUT_INVITE = parsedConfig.flags
parsedConfig.data.server.session_cookie_name;
process.env.EMAIL_ENABLED = parsedConfig.data.email ? "true" : "false";
process.env.DISABLE_SIGNUP_WITHOUT_INVITE = parsedConfig.data.flags
?.disable_signup_without_invite
? "true"
: "false";
process.env.DISABLE_USER_CREATE_ORG = parsedConfig.flags
process.env.DISABLE_USER_CREATE_ORG = parsedConfig.data.flags
?.disable_user_create_org
? "true"
: "false";
process.env.RESOURCE_ACCESS_TOKEN_PARAM =
parsedConfig.server.resource_access_token_param;
parsedConfig.data.server.resource_access_token_param;
process.env.RESOURCE_ACCESS_TOKEN_HEADERS_ID =
parsedConfig.server.resource_access_token_headers.id;
parsedConfig.data.server.resource_access_token_headers.id;
process.env.RESOURCE_ACCESS_TOKEN_HEADERS_TOKEN =
parsedConfig.server.resource_access_token_headers.token;
parsedConfig.data.server.resource_access_token_headers.token;
process.env.RESOURCE_SESSION_REQUEST_PARAM =
parsedConfig.server.resource_session_request_param;
process.env.FLAGS_ALLOW_BASE_DOMAIN_RESOURCES = parsedConfig.flags
parsedConfig.data.server.resource_session_request_param;
process.env.FLAGS_ALLOW_BASE_DOMAIN_RESOURCES = parsedConfig.data.flags
?.allow_base_domain_resources
? "true"
: "false";
process.env.DASHBOARD_URL = parsedConfig.app.dashboard_url;
process.env.DASHBOARD_URL = parsedConfig.data.app.dashboard_url;
this.rawConfig = parsedConfig;
this.rawConfig = parsedConfig.data;
}
public getRawConfig() {

View file

@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process
export const APP_VERSION = "1.5.1";
export const APP_VERSION = "1.3.0";
export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME);

View file

@ -1,264 +0,0 @@
import fs from "fs";
import yaml from "js-yaml";
import { configFilePath1, configFilePath2 } from "./consts";
import { z } from "zod";
import stoi from "./stoi";
import { passwordSchema } from "@server/auth/passwordSchema";
import { fromError } from "zod-validation-error";
const portSchema = z.number().positive().gt(0).lte(65535);
const getEnvOrYaml = (envVar: string) => (valFromYaml: any) => {
return process.env[envVar] ?? valFromYaml;
};
export const configSchema = z.object({
app: z.object({
dashboard_url: z
.string()
.url()
.optional()
.pipe(z.string().url())
.transform((url) => url.toLowerCase()),
log_level: z
.enum(["debug", "info", "warn", "error"])
.optional()
.default("info"),
save_logs: z.boolean().optional().default(false),
log_failed_attempts: z.boolean().optional().default(false)
}),
domains: z
.record(
z.string(),
z.object({
base_domain: z
.string()
.nonempty("base_domain must not be empty")
.transform((url) => url.toLowerCase()),
cert_resolver: z.string().optional().default("letsencrypt"),
prefer_wildcard_cert: z.boolean().optional().default(false)
})
)
.refine(
(domains) => {
const keys = Object.keys(domains);
if (keys.length === 0) {
return false;
}
return true;
},
{
message: "At least one domain must be defined"
}
),
server: z.object({
integration_port: portSchema
.optional()
.default(3003)
.transform(stoi)
.pipe(portSchema.optional()),
external_port: portSchema
.optional()
.default(3000)
.transform(stoi)
.pipe(portSchema),
internal_port: portSchema
.optional()
.default(3001)
.transform(stoi)
.pipe(portSchema),
next_port: portSchema
.optional()
.default(3002)
.transform(stoi)
.pipe(portSchema),
internal_hostname: z
.string()
.optional()
.default("pangolin")
.transform((url) => url.toLowerCase()),
session_cookie_name: z.string().optional().default("p_session_token"),
resource_access_token_param: z.string().optional().default("p_token"),
resource_access_token_headers: z
.object({
id: z.string().optional().default("P-Access-Token-Id"),
token: z.string().optional().default("P-Access-Token")
})
.optional()
.default({}),
resource_session_request_param: z
.string()
.optional()
.default("resource_session_request_param"),
dashboard_session_length_hours: z
.number()
.positive()
.gt(0)
.optional()
.default(720),
resource_session_length_hours: z
.number()
.positive()
.gt(0)
.optional()
.default(720),
cors: z
.object({
origins: z.array(z.string()).optional(),
methods: z.array(z.string()).optional(),
allowed_headers: z.array(z.string()).optional(),
credentials: z.boolean().optional()
})
.optional(),
trust_proxy: z.boolean().optional().default(true),
secret: z
.string()
.optional()
.transform(getEnvOrYaml("SERVER_SECRET"))
.pipe(z.string().min(8))
}),
postgres: z
.object({
connection_string: z.string().optional()
})
.default({}),
traefik: z
.object({
http_entrypoint: z.string().optional().default("web"),
https_entrypoint: z.string().optional().default("websecure"),
additional_middlewares: z.array(z.string()).optional()
})
.optional()
.default({}),
gerbil: z
.object({
start_port: portSchema
.optional()
.default(51820)
.transform(stoi)
.pipe(portSchema),
base_endpoint: z
.string()
.optional()
.pipe(z.string())
.transform((url) => url.toLowerCase()),
use_subdomain: z.boolean().optional().default(false),
subnet_group: z.string().optional().default("100.89.137.0/20"),
block_size: z.number().positive().gt(0).optional().default(24),
site_block_size: z.number().positive().gt(0).optional().default(30)
})
.optional()
.default({}),
rate_limits: z
.object({
global: z
.object({
window_minutes: z
.number()
.positive()
.gt(0)
.optional()
.default(1),
max_requests: z
.number()
.positive()
.gt(0)
.optional()
.default(500)
})
.optional()
.default({}),
auth: z
.object({
window_minutes: z.number().positive().gt(0),
max_requests: z.number().positive().gt(0)
})
.optional()
})
.optional()
.default({}),
email: z
.object({
smtp_host: z.string().optional(),
smtp_port: portSchema.optional(),
smtp_user: z.string().optional(),
smtp_pass: z.string().optional(),
smtp_secure: z.boolean().optional(),
smtp_tls_reject_unauthorized: z.boolean().optional(),
no_reply: z.string().email().optional()
})
.optional(),
users: z.object({
server_admin: z.object({
email: z
.string()
.email()
.optional()
.transform(getEnvOrYaml("USERS_SERVERADMIN_EMAIL"))
.pipe(z.string().email())
.transform((v) => v.toLowerCase()),
password: passwordSchema
.optional()
.transform(getEnvOrYaml("USERS_SERVERADMIN_PASSWORD"))
.pipe(passwordSchema)
})
}),
flags: z
.object({
require_email_verification: z.boolean().optional(),
disable_signup_without_invite: z.boolean().optional(),
disable_user_create_org: z.boolean().optional(),
allow_raw_resources: z.boolean().optional(),
allow_base_domain_resources: z.boolean().optional(),
allow_local_sites: z.boolean().optional(),
enable_integration_api: z.boolean().optional()
})
.optional()
});
export function readConfigFile() {
const loadConfig = (configPath: string) => {
try {
const yamlContent = fs.readFileSync(configPath, "utf8");
const config = yaml.load(yamlContent);
return config;
} catch (error) {
if (error instanceof Error) {
throw new Error(
`Error loading configuration file: ${error.message}`
);
}
throw error;
}
};
let environment: any;
if (fs.existsSync(configFilePath1)) {
environment = loadConfig(configFilePath1);
} else if (fs.existsSync(configFilePath2)) {
environment = loadConfig(configFilePath2);
}
if (process.env.APP_BASE_DOMAIN) {
console.log(
"You're using deprecated environment variables. Transition to the configuration file. https://docs.fossorial.io/"
);
}
if (!environment) {
throw new Error(
"No configuration file found. Please create one. https://docs.fossorial.io/"
);
}
const parsedConfig = configSchema.safeParse(environment);
if (!parsedConfig.success) {
const errors = fromError(parsedConfig.error);
throw new Error(`Invalid configuration file: ${errors}`);
}
return parsedConfig.data;
}

View file

@ -9,10 +9,6 @@ export function isValidIP(ip: string): boolean {
}
export function isValidUrlGlobPattern(pattern: string): boolean {
if (pattern === "/") {
return true;
}
// Remove leading slash if present
pattern = pattern.startsWith("/") ? pattern.slice(1) : pattern;

View file

@ -1,5 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db, userOrgs } from "@server/db";
import { db } from "@server/db";
import { userOrgs, orgs } from "@server/db/schemas";
import { eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -16,6 +16,6 @@ export * from "./verifyUserInRole";
export * from "./verifyAccessTokenAccess";
export * from "./verifyUserIsServerAdmin";
export * from "./verifyIsLoggedInUser";
export * from "./integration";
// export * from "./integration";
export * from "./verifyUserHasAction";
export * from "./verifyApiKeyAccess";
// export * from "./verifyApiKeyAccess";

View file

@ -1,12 +0,0 @@
export * from "./verifyApiKey";
export * from "./verifyApiKeyOrgAccess";
export * from "./verifyApiKeyHasAction";
export * from "./verifyApiKeySiteAccess";
export * from "./verifyApiKeyResourceAccess";
export * from "./verifyApiKeyTargetAccess";
export * from "./verifyApiKeyRoleAccess";
export * from "./verifyApiKeyUserAccess";
export * from "./verifyApiKeySetResourceUsers";
export * from "./verifyAccessTokenAccess";
export * from "./verifyApiKeyIsRoot";
export * from "./verifyApiKeyApiKeyAccess";

View file

@ -1,110 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { resourceAccessToken, resources, apiKeyOrg } from "@server/db";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeyAccessTokenAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const apiKey = req.apiKey;
const accessTokenId = req.params.accessTokenId;
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
const [accessToken] = await db
.select()
.from(resourceAccessToken)
.where(eq(resourceAccessToken.accessTokenId, accessTokenId))
.limit(1);
if (!accessToken) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Access token with ID ${accessTokenId} not found`
)
);
}
const resourceId = accessToken.resourceId;
if (!resourceId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`Access token with ID ${accessTokenId} does not have a resource ID`
)
);
}
const [resource] = await db
.select()
.from(resources)
.where(eq(resources.resourceId, resourceId))
.limit(1);
if (!resource) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Resource with ID ${resourceId} not found`
)
);
}
if (!resource.orgId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`Resource with ID ${resourceId} does not have an organization ID`
)
);
}
// Verify that the API key is linked to the resource's organization
if (!req.apiKeyOrg) {
const apiKeyOrgResult = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKey.apiKeyId),
eq(apiKeyOrg.orgId, resource.orgId)
)
)
.limit(1);
if (apiKeyOrgResult.length > 0) {
req.apiKeyOrg = apiKeyOrgResult[0];
}
}
if (!req.apiKeyOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
return next();
} catch (e) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying access token access"
)
);
}
}

View file

@ -1,60 +0,0 @@
import { verifyPassword } from "@server/auth/password";
import { db } from "@server/db";
import { apiKeys } from "@server/db";
import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode";
import { eq } from "drizzle-orm";
import { Request, Response, NextFunction } from "express";
import createHttpError from "http-errors";
export async function verifyApiKey(
req: Request,
res: Response,
next: NextFunction
): Promise<void> {
try {
const authHeader = req.headers["authorization"];
if (!authHeader || !authHeader.startsWith("Bearer ")) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "API key required")
);
}
const key = authHeader.split(" ")[1]; // Get the token part after "Bearer"
const [apiKeyId, apiKeySecret] = key.split(".");
const [apiKey] = await db
.select()
.from(apiKeys)
.where(eq(apiKeys.apiKeyId, apiKeyId))
.limit(1);
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Invalid API key")
);
}
const secretHash = apiKey.apiKeyHash;
const valid = await verifyPassword(apiKeySecret, secretHash);
if (!valid) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Invalid API key")
);
}
req.apiKey = apiKey;
return next();
} catch (error) {
logger.error(error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"An error occurred checking API key"
)
);
}
}

View file

@ -1,81 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { apiKeys, apiKeyOrg } from "@server/db";
import { and, eq, or } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeyApiKeyAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const {apiKey: callerApiKey } = req;
const apiKeyId =
req.params.apiKeyId || req.body.apiKeyId || req.query.apiKeyId;
const orgId = req.params.orgId;
if (!callerApiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (!orgId) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid organization ID")
);
}
if (!apiKeyId) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid key ID")
);
}
const [callerApiKeyOrg] = await db
.select()
.from(apiKeyOrg)
.where(
and(eq(apiKeys.apiKeyId, callerApiKey.apiKeyId), eq(apiKeyOrg.orgId, orgId))
)
.limit(1);
if (!callerApiKeyOrg) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`API key with ID ${apiKeyId} does not have an organization ID`
)
);
}
const [otherApiKeyOrg] = await db
.select()
.from(apiKeyOrg)
.where(
and(eq(apiKeys.apiKeyId, apiKeyId), eq(apiKeyOrg.orgId, orgId))
)
.limit(1);
if (!otherApiKeyOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
`API key with ID ${apiKeyId} does not have access to organization with ID ${orgId}`
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying key access"
)
);
}
}

View file

@ -1,56 +0,0 @@
import { Request, Response, NextFunction } from "express";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import logger from "@server/logger";
import { ActionsEnum } from "@server/auth/actions";
import { db } from "@server/db";
import { apiKeyActions } from "@server/db";
import { and, eq } from "drizzle-orm";
export function verifyApiKeyHasAction(action: ActionsEnum) {
return async function (
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
if (!req.apiKey) {
return next(
createHttpError(
HttpCode.UNAUTHORIZED,
"API Key not authenticated"
)
);
}
const [actionRes] = await db
.select()
.from(apiKeyActions)
.where(
and(
eq(apiKeyActions.apiKeyId, req.apiKey.apiKeyId),
eq(apiKeyActions.actionId, action)
)
);
if (!actionRes) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have permission perform this action"
)
);
}
return next();
} catch (error) {
logger.error("Error verifying key action access:", error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying key action access"
)
);
}
};
}

View file

@ -1,39 +0,0 @@
import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode";
import { Request, Response, NextFunction } from "express";
import createHttpError from "http-errors";
export async function verifyApiKeyIsRoot(
req: Request,
res: Response,
next: NextFunction
): Promise<void> {
try {
const { apiKey } = req;
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (!apiKey.isRoot) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have root access"
)
);
}
return next();
} catch (error) {
logger.error(error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"An error occurred checking API key"
)
);
}
}

View file

@ -1,61 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { apiKeyOrg } from "@server/db";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import logger from "@server/logger";
export async function verifyApiKeyOrgAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const apiKeyId = req.apiKey?.apiKeyId;
const orgId = req.params.orgId;
if (!apiKeyId) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (!orgId) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid organization ID")
);
}
if (!req.apiKeyOrg) {
const apiKeyOrgRes = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
);
req.apiKeyOrg = apiKeyOrgRes[0];
}
if (!req.apiKeyOrg) {
next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
return next();
} catch (e) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying organization access"
)
);
}
}

View file

@ -1,85 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { resources, apiKeyOrg } from "@server/db";
import { eq, and } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeyResourceAccess(
req: Request,
res: Response,
next: NextFunction
) {
const apiKey = req.apiKey;
const resourceId =
req.params.resourceId || req.body.resourceId || req.query.resourceId;
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
try {
// Retrieve the resource
const [resource] = await db
.select()
.from(resources)
.where(eq(resources.resourceId, resourceId))
.limit(1);
if (!resource) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Resource with ID ${resourceId} not found`
)
);
}
if (!resource.orgId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`Resource with ID ${resourceId} does not have an organization ID`
)
);
}
// Verify that the API key is linked to the resource's organization
if (!req.apiKeyOrg) {
const apiKeyOrgResult = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKey.apiKeyId),
eq(apiKeyOrg.orgId, resource.orgId)
)
)
.limit(1);
if (apiKeyOrgResult.length > 0) {
req.apiKeyOrg = apiKeyOrgResult[0];
}
}
if (!req.apiKeyOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying resource access"
)
);
}
}

View file

@ -1,127 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { roles, apiKeyOrg } from "@server/db";
import { and, eq, inArray } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import logger from "@server/logger";
export async function verifyApiKeyRoleAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const apiKey = req.apiKey;
const singleRoleId = parseInt(
req.params.roleId || req.body.roleId || req.query.roleId
);
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
const { roleIds } = req.body;
const allRoleIds =
roleIds || (isNaN(singleRoleId) ? [] : [singleRoleId]);
if (allRoleIds.length === 0) {
return next();
}
const rolesData = await db
.select()
.from(roles)
.where(inArray(roles.roleId, allRoleIds));
if (rolesData.length !== allRoleIds.length) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
"One or more roles not found"
)
);
}
const orgIds = new Set(rolesData.map((role) => role.orgId));
for (const role of rolesData) {
const apiKeyOrgAccess = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKey.apiKeyId),
eq(apiKeyOrg.orgId, role.orgId!)
)
)
.limit(1);
if (apiKeyOrgAccess.length === 0) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
`Key does not have access to organization for role ID ${role.roleId}`
)
);
}
}
if (orgIds.size > 1) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Roles must belong to the same organization"
)
);
}
const orgId = orgIds.values().next().value;
if (!orgId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Roles do not have an organization ID"
)
);
}
if (!req.apiKeyOrg) {
// Retrieve the API key's organization link if not already set
const apiKeyOrgRes = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKey.apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
)
.limit(1);
if (apiKeyOrgRes.length === 0) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
req.apiKeyOrg = apiKeyOrgRes[0];
}
return next();
} catch (error) {
logger.error("Error verifying role access:", error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying role access"
)
);
}
}

View file

@ -1,69 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { userOrgs } from "@server/db";
import { and, eq, inArray } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeySetResourceUsers(
req: Request,
res: Response,
next: NextFunction
) {
const apiKey = req.apiKey;
const userIds = req.body.userIds;
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (!req.apiKeyOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
if (!userIds) {
return next(createHttpError(HttpCode.BAD_REQUEST, "Invalid user IDs"));
}
if (userIds.length === 0) {
return next();
}
try {
const orgId = req.apiKeyOrg.orgId;
const userOrgsData = await db
.select()
.from(userOrgs)
.where(
and(
inArray(userOrgs.userId, userIds),
eq(userOrgs.orgId, orgId)
)
);
if (userOrgsData.length !== userIds.length) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to one or more specified users"
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error checking if key has access to the specified users"
)
);
}
}

View file

@ -1,89 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import {
sites,
apiKeyOrg
} from "@server/db";
import { and, eq, or } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeySiteAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const apiKey = req.apiKey;
const siteId = parseInt(
req.params.siteId || req.body.siteId || req.query.siteId
);
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (isNaN(siteId)) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid site ID")
);
}
const site = await db
.select()
.from(sites)
.where(eq(sites.siteId, siteId))
.limit(1);
if (site.length === 0) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Site with ID ${siteId} not found`
)
);
}
if (!site[0].orgId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`Site with ID ${siteId} does not have an organization ID`
)
);
}
if (!req.apiKeyOrg) {
const apiKeyOrgRes = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKey.apiKeyId),
eq(apiKeyOrg.orgId, site[0].orgId)
)
);
req.apiKeyOrg = apiKeyOrgRes[0];
}
if (!req.apiKeyOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying site access"
)
);
}
}

View file

@ -1,112 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { resources, targets, apiKeyOrg } from "@server/db";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeyTargetAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const apiKey = req.apiKey;
const targetId = parseInt(req.params.targetId);
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (isNaN(targetId)) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid target ID")
);
}
const [target] = await db
.select()
.from(targets)
.where(eq(targets.targetId, targetId))
.limit(1);
if (!target) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Target with ID ${targetId} not found`
)
);
}
const resourceId = target.resourceId;
if (!resourceId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`Target with ID ${targetId} does not have a resource ID`
)
);
}
const [resource] = await db
.select()
.from(resources)
.where(eq(resources.resourceId, resourceId))
.limit(1);
if (!resource) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Resource with ID ${resourceId} not found`
)
);
}
if (!resource.orgId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`Resource with ID ${resourceId} does not have an organization ID`
)
);
}
if (!req.apiKeyOrg) {
const apiKeyOrgResult = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKey.apiKeyId),
eq(apiKeyOrg.orgId, resource.orgId)
)
)
.limit(1);
if (apiKeyOrgResult.length > 0) {
req.apiKeyOrg = apiKeyOrgResult[0];
}
}
if (!req.apiKeyOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying target access"
)
);
}
}

View file

@ -1,67 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { userOrgs } from "@server/db";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeyUserAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const apiKey = req.apiKey;
const reqUserId =
req.params.userId || req.body.userId || req.query.userId;
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (!reqUserId) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid user ID")
);
}
if (!req.apiKeyOrg || !req.apiKeyOrg.orgId) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have organization access"
)
);
}
const orgId = req.apiKeyOrg.orgId;
const [userOrgRecord] = await db
.select()
.from(userOrgs)
.where(
and(eq(userOrgs.userId, reqUserId), eq(userOrgs.orgId, orgId))
)
.limit(1);
if (!userOrgRecord) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this user"
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error checking if key has access to this user"
)
);
}
}

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { resourceAccessToken, resources, userOrgs } from "@server/db";
import { resourceAccessToken, resources, userOrgs } from "@server/db/schemas";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { roles, userOrgs } from "@server/db";
import { roles, userOrgs } from "@server/db/schemas";
import { and, eq, inArray } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,96 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { userOrgs, apiKeys, apiKeyOrg } from "@server/db";
import { and, eq, or } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeyAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const userId = req.user!.userId;
const apiKeyId =
req.params.apiKeyId || req.body.apiKeyId || req.query.apiKeyId;
const orgId = req.params.orgId;
if (!userId) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated")
);
}
if (!orgId) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid organization ID")
);
}
if (!apiKeyId) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid key ID")
);
}
const [apiKey] = await db
.select()
.from(apiKeys)
.innerJoin(apiKeyOrg, eq(apiKeys.apiKeyId, apiKeyOrg.apiKeyId))
.where(
and(eq(apiKeys.apiKeyId, apiKeyId), eq(apiKeyOrg.orgId, orgId))
)
.limit(1);
if (!apiKey.apiKeys) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`API key with ID ${apiKeyId} not found`
)
);
}
if (!apiKeyOrg.orgId) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`API key with ID ${apiKeyId} does not have an organization ID`
)
);
}
if (!req.userOrg) {
const userOrgRole = await db
.select()
.from(userOrgs)
.where(
and(
eq(userOrgs.userId, userId),
eq(userOrgs.orgId, apiKeyOrg.orgId)
)
);
req.userOrg = userOrgRole[0];
req.userRoleIds = userOrgRole.map((r) => r.roleId);
}
if (!req.userOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"User does not have access to this organization"
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying key access"
)
);
}
}

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { userOrgs } from "@server/db";
import { userOrgs } from "@server/db/schemas";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -4,8 +4,8 @@ import {
resources,
userOrgs,
userResources,
roleResources,
} from "@server/db";
roleResources
} from "@server/db/schemas";
import { and, eq, inArray } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { roles, userOrgs } from "@server/db";
import { roles, userOrgs } from "@server/db/schemas";
import { and, eq, inArray } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,7 +1,7 @@
import { NextFunction, Response } from "express";
import ErrorResponse from "@server/types/ErrorResponse";
import { db } from "@server/db";
import { users } from "@server/db";
import { users } from "@server/db/schemas";
import { eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { userOrgs } from "@server/db";
import { userOrgs } from "@server/db/schemas";
import { and, eq, inArray, or } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -5,8 +5,8 @@ import {
userOrgs,
userSites,
roleSites,
roles,
} from "@server/db";
roles
} from "@server/db/schemas";
import { and, eq, inArray } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { resources, targets, userOrgs } from "@server/db";
import { resources, targets, userOrgs } from "@server/db/schemas";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,7 +1,7 @@
import { NextFunction, Response } from "express";
import ErrorResponse from "@server/types/ErrorResponse";
import { db } from "@server/db";
import { users } from "@server/db";
import { users } from "@server/db/schemas";
import { eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { userOrgs } from "@server/db";
import { userOrgs } from "@server/db/schemas";
import { and, eq, or } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { userOrgs } from "@server/db";
import { userOrgs } from "@server/db/schemas";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";

View file

@ -5,9 +5,9 @@ import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { resourceAccessToken } from "@server/db";
import { resourceAccessToken } from "@server/db/schemas";
import { and, eq } from "drizzle-orm";
import { db } from "@server/db";
import db from "@server/db";
import { OpenAPITags, registry } from "@server/openApi";
const deleteAccessTokenParamsSchema = z

View file

@ -4,12 +4,12 @@ import {
generateIdFromEntropySize,
SESSION_COOKIE_EXPIRES
} from "@server/auth/sessions/app";
import { db } from "@server/db";
import db from "@server/db";
import {
ResourceAccessToken,
resourceAccessToken,
resources
} from "@server/db";
} from "@server/db/schemas";
import HttpCode from "@server/types/HttpCode";
import response from "@server/lib/response";
import { eq } from "drizzle-orm";

View file

@ -7,7 +7,7 @@ import {
roleResources,
resourceAccessToken,
sites
} from "@server/db";
} from "@server/db/schemas";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";

View file

@ -1,128 +0,0 @@
import { NextFunction, Request, Response } from "express";
import { db } from "@server/db";
import HttpCode from "@server/types/HttpCode";
import { z } from "zod";
import { apiKeyOrg, apiKeys } from "@server/db";
import { fromError } from "zod-validation-error";
import createHttpError from "http-errors";
import response from "@server/lib/response";
import moment from "moment";
import {
generateId,
generateIdFromEntropySize
} from "@server/auth/sessions/app";
import logger from "@server/logger";
import { hashPassword } from "@server/auth/password";
import { OpenAPITags, registry } from "@server/openApi";
const paramsSchema = z.object({
orgId: z.string().nonempty()
});
const bodySchema = z.object({
name: z.string().min(1).max(255)
});
export type CreateOrgApiKeyBody = z.infer<typeof bodySchema>;
export type CreateOrgApiKeyResponse = {
apiKeyId: string;
name: string;
apiKey: string;
lastChars: string;
createdAt: string;
};
registry.registerPath({
method: "put",
path: "/org/{orgId}/api-key",
description: "Create a new API key scoped to the organization.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey],
request: {
params: paramsSchema,
body: {
content: {
"application/json": {
schema: bodySchema
}
}
}
},
responses: {}
});
export async function createOrgApiKey(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const parsedBody = bodySchema.safeParse(req.body);
if (!parsedBody.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedBody.error).toString()
)
);
}
const { orgId } = parsedParams.data;
const { name } = parsedBody.data;
const apiKeyId = generateId(15);
const apiKey = generateIdFromEntropySize(25);
const apiKeyHash = await hashPassword(apiKey);
const lastChars = apiKey.slice(-4);
const createdAt = moment().toISOString();
await db.transaction(async (trx) => {
await trx.insert(apiKeys).values({
name,
apiKeyId,
apiKeyHash,
createdAt,
lastChars
});
await trx.insert(apiKeyOrg).values({
apiKeyId,
orgId
});
});
try {
return response<CreateOrgApiKeyResponse>(res, {
data: {
apiKeyId,
apiKey,
name,
lastChars,
createdAt
},
success: true,
error: false,
message: "API key created",
status: HttpCode.CREATED
});
} catch (e) {
logger.error(e);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Failed to create API key"
)
);
}
}

View file

@ -1,100 +0,0 @@
import { NextFunction, Request, Response } from "express";
import { db } from "@server/db";
import HttpCode from "@server/types/HttpCode";
import { z } from "zod";
import { apiKeyOrg, apiKeys, orgs } from "@server/db";
import { fromError } from "zod-validation-error";
import createHttpError from "http-errors";
import response from "@server/lib/response";
import moment from "moment";
import {
generateId,
generateIdFromEntropySize
} from "@server/auth/sessions/app";
import logger from "@server/logger";
import { hashPassword } from "@server/auth/password";
const bodySchema = z
.object({
name: z.string().min(1).max(255)
})
.strict();
export type CreateRootApiKeyBody = z.infer<typeof bodySchema>;
export type CreateRootApiKeyResponse = {
apiKeyId: string;
name: string;
apiKey: string;
lastChars: string;
createdAt: string;
};
export async function createRootApiKey(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
const parsedBody = bodySchema.safeParse(req.body);
if (!parsedBody.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedBody.error).toString()
)
);
}
const { name } = parsedBody.data;
const apiKeyId = generateId(15);
const apiKey = generateIdFromEntropySize(25);
const apiKeyHash = await hashPassword(apiKey);
const lastChars = apiKey.slice(-4);
const createdAt = moment().toISOString();
await db.transaction(async (trx) => {
await trx.insert(apiKeys).values({
apiKeyId,
name,
apiKeyHash,
createdAt,
lastChars,
isRoot: true
});
const allOrgs = await trx.select().from(orgs);
for (const org of allOrgs) {
await trx.insert(apiKeyOrg).values({
apiKeyId,
orgId: org.orgId
});
}
});
try {
return response<CreateRootApiKeyResponse>(res, {
data: {
apiKeyId,
name,
apiKey,
lastChars,
createdAt
},
success: true,
error: false,
message: "API key created",
status: HttpCode.CREATED
});
} catch (e) {
logger.error(e);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Failed to create API key"
)
);
}
}

View file

@ -1,76 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { apiKeys } from "@server/db";
import { eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi";
const paramsSchema = z.object({
apiKeyId: z.string().nonempty()
});
registry.registerPath({
method: "delete",
path: "/org/{orgId}/api-key/{apiKeyId}",
description: "Delete an API key.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey],
request: {
params: paramsSchema
},
responses: {}
});
export async function deleteApiKey(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { apiKeyId } = parsedParams.data;
const [apiKey] = await db
.select()
.from(apiKeys)
.where(eq(apiKeys.apiKeyId, apiKeyId))
.limit(1);
if (!apiKey) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`API Key with ID ${apiKeyId} not found`
)
);
}
await db.delete(apiKeys).where(eq(apiKeys.apiKeyId, apiKeyId));
return response(res, {
data: null,
success: true,
error: false,
message: "API key deleted successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View file

@ -1,99 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { apiKeyOrg, apiKeys } from "@server/db";
import { and, eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
const paramsSchema = z.object({
apiKeyId: z.string().nonempty(),
orgId: z.string().nonempty()
});
export async function deleteOrgApiKey(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { apiKeyId, orgId } = parsedParams.data;
const [apiKey] = await db
.select()
.from(apiKeys)
.where(eq(apiKeys.apiKeyId, apiKeyId))
.innerJoin(
apiKeyOrg,
and(
eq(apiKeys.apiKeyId, apiKeyOrg.apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
)
.limit(1);
if (!apiKey) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`API Key with ID ${apiKeyId} not found`
)
);
}
if (apiKey.apiKeys.isRoot) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Cannot delete root API key"
)
);
}
await db.transaction(async (trx) => {
await trx
.delete(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
);
const apiKeyOrgs = await db
.select()
.from(apiKeyOrg)
.where(eq(apiKeyOrg.apiKeyId, apiKeyId));
if (apiKeyOrgs.length === 0) {
await trx.delete(apiKeys).where(eq(apiKeys.apiKeyId, apiKeyId));
}
});
return response(res, {
data: null,
success: true,
error: false,
message: "API removed from organization",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View file

@ -1,76 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { apiKeys } from "@server/db";
import { eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
const paramsSchema = z.object({
apiKeyId: z.string().nonempty()
});
async function query(apiKeyId: string) {
return await db
.select({
apiKeyId: apiKeys.apiKeyId,
lastChars: apiKeys.lastChars,
createdAt: apiKeys.createdAt,
isRoot: apiKeys.isRoot,
name: apiKeys.name
})
.from(apiKeys)
.where(eq(apiKeys.apiKeyId, apiKeyId))
.limit(1);
}
export type GetApiKeyResponse = NonNullable<
Awaited<ReturnType<typeof query>>[0]
>;
export async function getApiKey(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { apiKeyId } = parsedParams.data;
const [apiKey] = await query(apiKeyId);
if (!apiKey) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`API Key with ID ${apiKeyId} not found`
)
);
}
return response<GetApiKeyResponse>(res, {
data: apiKey,
success: true,
error: false,
message: "API key deleted successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View file

@ -1,11 +0,0 @@
export * from "./createRootApiKey";
export * from "./deleteApiKey";
export * from "./getApiKey";
export * from "./listApiKeyActions";
export * from "./listOrgApiKeys";
export * from "./listApiKeyActions";
export * from "./listRootApiKeys";
export * from "./setApiKeyActions";
export * from "./setApiKeyOrgs";
export * from "./createOrgApiKey";
export * from "./deleteOrgApiKey";

View file

@ -1,113 +0,0 @@
import { db } from "@server/db";
import { actions, apiKeyActions, apiKeyOrg, apiKeys } from "@server/db";
import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode";
import response from "@server/lib/response";
import { NextFunction, Request, Response } from "express";
import createHttpError from "http-errors";
import { z } from "zod";
import { fromError } from "zod-validation-error";
import { eq } from "drizzle-orm";
import { OpenAPITags, registry } from "@server/openApi";
const paramsSchema = z.object({
apiKeyId: z.string().nonempty()
});
const querySchema = z.object({
limit: z
.string()
.optional()
.default("1000")
.transform(Number)
.pipe(z.number().int().positive()),
offset: z
.string()
.optional()
.default("0")
.transform(Number)
.pipe(z.number().int().nonnegative())
});
function queryActions(apiKeyId: string) {
return db
.select({
actionId: actions.actionId
})
.from(apiKeyActions)
.where(eq(apiKeyActions.apiKeyId, apiKeyId))
.innerJoin(actions, eq(actions.actionId, apiKeyActions.actionId));
}
export type ListApiKeyActionsResponse = {
actions: Awaited<ReturnType<typeof queryActions>>;
pagination: { total: number; limit: number; offset: number };
};
registry.registerPath({
method: "get",
path: "/org/{orgId}/api-key/{apiKeyId}/actions",
description:
"List all actions set for an API key.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey],
request: {
params: paramsSchema,
query: querySchema
},
responses: {}
});
export async function listApiKeyActions(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = querySchema.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const { limit, offset } = parsedQuery.data;
const { apiKeyId } = parsedParams.data;
const baseQuery = queryActions(apiKeyId);
const actionsList = await baseQuery.limit(limit).offset(offset);
return response<ListApiKeyActionsResponse>(res, {
data: {
actions: actionsList,
pagination: {
total: actionsList.length,
limit,
offset
}
},
success: true,
error: false,
message: "API keys retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View file

@ -1,116 +0,0 @@
import { db } from "@server/db";
import { apiKeyOrg, apiKeys } from "@server/db";
import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode";
import response from "@server/lib/response";
import { NextFunction, Request, Response } from "express";
import createHttpError from "http-errors";
import { z } from "zod";
import { fromError } from "zod-validation-error";
import { eq, and } from "drizzle-orm";
import { OpenAPITags, registry } from "@server/openApi";
const querySchema = z.object({
limit: z
.string()
.optional()
.default("1000")
.transform(Number)
.pipe(z.number().int().positive()),
offset: z
.string()
.optional()
.default("0")
.transform(Number)
.pipe(z.number().int().nonnegative())
});
const paramsSchema = z.object({
orgId: z.string()
});
function queryApiKeys(orgId: string) {
return db
.select({
apiKeyId: apiKeys.apiKeyId,
orgId: apiKeyOrg.orgId,
lastChars: apiKeys.lastChars,
createdAt: apiKeys.createdAt,
name: apiKeys.name
})
.from(apiKeyOrg)
.where(and(eq(apiKeyOrg.orgId, orgId), eq(apiKeys.isRoot, false)))
.innerJoin(apiKeys, eq(apiKeys.apiKeyId, apiKeyOrg.apiKeyId));
}
export type ListOrgApiKeysResponse = {
apiKeys: Awaited<ReturnType<typeof queryApiKeys>>;
pagination: { total: number; limit: number; offset: number };
};
registry.registerPath({
method: "get",
path: "/org/{orgId}/api-keys",
description: "List all API keys for an organization",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey],
request: {
params: paramsSchema,
query: querySchema
},
responses: {}
});
export async function listOrgApiKeys(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = querySchema.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const { limit, offset } = parsedQuery.data;
const { orgId } = parsedParams.data;
const baseQuery = queryApiKeys(orgId);
const apiKeysList = await baseQuery.limit(limit).offset(offset);
return response<ListOrgApiKeysResponse>(res, {
data: {
apiKeys: apiKeysList,
pagination: {
total: apiKeysList.length,
limit,
offset
}
},
success: true,
error: false,
message: "API keys retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View file

@ -1,85 +0,0 @@
import { db } from "@server/db";
import { apiKeys } from "@server/db";
import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode";
import response from "@server/lib/response";
import { NextFunction, Request, Response } from "express";
import createHttpError from "http-errors";
import { z } from "zod";
import { fromError } from "zod-validation-error";
import { eq } from "drizzle-orm";
const querySchema = z.object({
limit: z
.string()
.optional()
.default("1000")
.transform(Number)
.pipe(z.number().int().positive()),
offset: z
.string()
.optional()
.default("0")
.transform(Number)
.pipe(z.number().int().nonnegative())
});
function queryApiKeys() {
return db
.select({
apiKeyId: apiKeys.apiKeyId,
lastChars: apiKeys.lastChars,
createdAt: apiKeys.createdAt,
name: apiKeys.name
})
.from(apiKeys)
.where(eq(apiKeys.isRoot, true));
}
export type ListRootApiKeysResponse = {
apiKeys: Awaited<ReturnType<typeof queryApiKeys>>;
pagination: { total: number; limit: number; offset: number };
};
export async function listRootApiKeys(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = querySchema.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const { limit, offset } = parsedQuery.data;
const baseQuery = queryApiKeys();
const apiKeysList = await baseQuery.limit(limit).offset(offset);
return response<ListRootApiKeysResponse>(res, {
data: {
apiKeys: apiKeysList,
pagination: {
total: apiKeysList.length,
limit,
offset
}
},
success: true,
error: false,
message: "API keys retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View file

@ -1,136 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { actions, apiKeyActions } from "@server/db";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { eq, and, inArray } from "drizzle-orm";
import { OpenAPITags, registry } from "@server/openApi";
const bodySchema = z
.object({
actionIds: z
.array(z.string().nonempty())
.transform((v) => Array.from(new Set(v)))
})
.strict();
const paramsSchema = z.object({
apiKeyId: z.string().nonempty()
});
registry.registerPath({
method: "post",
path: "/org/{orgId}/api-key/{apiKeyId}/actions",
description:
"Set actions for an API key. This will replace any existing actions.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey],
request: {
params: paramsSchema,
body: {
content: {
"application/json": {
schema: bodySchema
}
}
}
},
responses: {}
});
export async function setApiKeyActions(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedBody = bodySchema.safeParse(req.body);
if (!parsedBody.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedBody.error).toString()
)
);
}
const { actionIds: newActionIds } = parsedBody.data;
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { apiKeyId } = parsedParams.data;
const actionsExist = await db
.select()
.from(actions)
.where(inArray(actions.actionId, newActionIds));
if (actionsExist.length !== newActionIds.length) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"One or more actions do not exist"
)
);
}
await db.transaction(async (trx) => {
const existingActions = await trx
.select()
.from(apiKeyActions)
.where(eq(apiKeyActions.apiKeyId, apiKeyId));
const existingActionIds = existingActions.map((a) => a.actionId);
const actionIdsToAdd = newActionIds.filter(
(id) => !existingActionIds.includes(id)
);
const actionIdsToRemove = existingActionIds.filter(
(id) => !newActionIds.includes(id)
);
if (actionIdsToRemove.length > 0) {
await trx
.delete(apiKeyActions)
.where(
and(
eq(apiKeyActions.apiKeyId, apiKeyId),
inArray(apiKeyActions.actionId, actionIdsToRemove)
)
);
}
if (actionIdsToAdd.length > 0) {
const insertValues = actionIdsToAdd.map((actionId) => ({
apiKeyId,
actionId
}));
await trx.insert(apiKeyActions).values(insertValues);
}
});
return response(res, {
data: {},
success: true,
error: false,
message: "API key actions updated successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View file

@ -1,117 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { apiKeyOrg, orgs } from "@server/db";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { eq, and, inArray } from "drizzle-orm";
const bodySchema = z
.object({
orgIds: z
.array(z.string().nonempty())
.transform((v) => Array.from(new Set(v)))
})
.strict();
const paramsSchema = z.object({
apiKeyId: z.string().nonempty()
});
export async function setApiKeyOrgs(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedBody = bodySchema.safeParse(req.body);
if (!parsedBody.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedBody.error).toString()
)
);
}
const { orgIds: newOrgIds } = parsedBody.data;
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { apiKeyId } = parsedParams.data;
// make sure all orgs exist
const allOrgs = await db
.select()
.from(orgs)
.where(inArray(orgs.orgId, newOrgIds));
if (allOrgs.length !== newOrgIds.length) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"One or more orgs do not exist"
)
);
}
await db.transaction(async (trx) => {
const existingOrgs = await trx
.select({ orgId: apiKeyOrg.orgId })
.from(apiKeyOrg)
.where(eq(apiKeyOrg.apiKeyId, apiKeyId));
const existingOrgIds = existingOrgs.map((a) => a.orgId);
const orgIdsToAdd = newOrgIds.filter(
(id) => !existingOrgIds.includes(id)
);
const orgIdsToRemove = existingOrgIds.filter(
(id) => !newOrgIds.includes(id)
);
if (orgIdsToRemove.length > 0) {
await trx
.delete(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKeyId),
inArray(apiKeyOrg.orgId, orgIdsToRemove)
)
);
}
if (orgIdsToAdd.length > 0) {
const insertValues = orgIdsToAdd.map((orgId) => ({
apiKeyId,
orgId
}));
await trx.insert(apiKeyOrg).values(insertValues);
}
return response(res, {
data: {},
success: true,
error: false,
message: "API key orgs updated successfully",
status: HttpCode.OK
});
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

Some files were not shown because too many files have changed in this diff Show more