checkin
This commit is contained in:
parent
042e74c744
commit
f9bcc11bab
9
API/.dockerignore
Normal file
9
API/.dockerignore
Normal file
@ -0,0 +1,9 @@
|
||||
*
|
||||
|
||||
# Explicitly include desired files
|
||||
!./src
|
||||
!./Cargo.toml
|
||||
!./Cargo.lock
|
||||
!./sqlx-data.json
|
||||
!./configuration.yaml
|
||||
!./migrations
|
25
API/.envrc
25
API/.envrc
@ -1,23 +1,2 @@
|
||||
#!/usr/bin/env zsh
|
||||
|
||||
layout_poetry() {
|
||||
PYPROJECT_TOML="${PYPROJECT_TOML:-pyproject.toml}"
|
||||
if [[ ! -f "$PYPROJECT_TOML" ]]; then
|
||||
log_status "No pyproject.toml found. Executing \`poetry init\` to create a \`$PYPROJECT_TOML\` first."
|
||||
poetry init
|
||||
fi
|
||||
|
||||
VIRTUAL_ENV=$(poetry env info --path 2>/dev/null ; true)
|
||||
|
||||
if [[ -z $VIRTUAL_ENV || ! -d $VIRTUAL_ENV ]]; then
|
||||
log_status "No virtual environment exists. Executing \`poetry install\` to create one."
|
||||
poetry install
|
||||
VIRTUAL_ENV=$(poetry env info --path)
|
||||
fi
|
||||
|
||||
PATH_add "$VIRTUAL_ENV/bin"
|
||||
export POETRY_ACTIVE=1
|
||||
export VIRTUAL_ENV
|
||||
}
|
||||
|
||||
layout_poetry
|
||||
# vim: set ft=bash:
|
||||
source ./scripts/dev-setup.bash
|
||||
|
3
API/.gitignore
vendored
3
API/.gitignore
vendored
@ -1,2 +1 @@
|
||||
__pycache__/
|
||||
*.ignore
|
||||
/target
|
||||
|
34
API/.gitlab-ci.yml
Normal file
34
API/.gitlab-ci.yml
Normal file
@ -0,0 +1,34 @@
|
||||
image: "rust:latest"
|
||||
|
||||
default:
|
||||
before_script:
|
||||
- rustc --version
|
||||
- cargo --version
|
||||
|
||||
stages:
|
||||
- test
|
||||
|
||||
test-code:
|
||||
stage: test
|
||||
script:
|
||||
- cargo test
|
||||
- cargo install cargo-tarpaulin
|
||||
- cargo tarpaulin --ignore-tests
|
||||
|
||||
lint-code:
|
||||
stage: test
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
|
||||
format-code:
|
||||
stage: test
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- cargo fmt -- --check
|
||||
|
||||
audit-code:
|
||||
stage: test
|
||||
script:
|
||||
- cargo install cargo-audit
|
||||
- cargo audit
|
@ -0,0 +1,15 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "UPDATE users SET superuser = $1 WHERE id = $2;",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Bool",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "3a29bf9da31e1477e629f636f0233b05cbca54a1681e2c2e4e229e0e60d250c2"
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT * FROM users WHERE id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "discord_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "superuser",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "843923b9a0257cf80f1dff554e7dc8fdfc05f489328e8376513124dfb42996e3"
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT * FROM orgs WHERE owner_id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "owner_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "discord_server_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "active",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "9c52bdefb3f03a8f78b6e218c54da00a3f9c485b8a239daca6efffd224151e64"
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT * FROM orgs WHERE EXISTS (SELECT * FROM org_group_members WHERE user_id = $1)",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "owner_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "discord_server_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "active",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "b45023d484289474ccf749ece5be7aeee395aa73a5b1214826f270482471c3e8"
|
||||
}
|
3671
API/Cargo.lock
generated
Normal file
3671
API/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
72
API/Cargo.toml
Normal file
72
API/Cargo.toml
Normal file
@ -0,0 +1,72 @@
|
||||
[package]
|
||||
name = "whitelist_api"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
[[bin]]
|
||||
path = "src/main.rs"
|
||||
name = "api"
|
||||
|
||||
[lints.rust]
|
||||
unsafe_code = "forbid"
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.6.20", features = [ "tokio" ] }
|
||||
utoipa = { version = "4.1.0", features = ["axum_extras", "uuid" ] }
|
||||
utoipa-swagger-ui = { version = "4.0.0", features = ["axum"] }
|
||||
anyhow = "1.0.75"
|
||||
json-patch = "1.2.0"
|
||||
thiserror = "1.0.50"
|
||||
hyper = "0.14.27"
|
||||
tower = "0.4.13"
|
||||
claims = "0.7.1"
|
||||
validator = { version = "0.16.1", features = [ "derive" ] }
|
||||
unicode-segmentation = "1.10.1"
|
||||
tower-http = { version = "0.4.4", features = ["tracing", "trace", "request-id"] }
|
||||
config = "0.13.3"
|
||||
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "macro-diagnostics", "serde"] }
|
||||
chrono = "0.4.31"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0.108"
|
||||
serde-aux = "4.2.0"
|
||||
serde_with = "3.4.0"
|
||||
tokio = { version = "1.33.0", features = ["full"] }
|
||||
tracing = { version = "0.1.40", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.17", features = [
|
||||
"env-filter",
|
||||
"registry",
|
||||
"fmt",
|
||||
"json",
|
||||
] }
|
||||
tracing-bunyan-formatter = "0.3.9"
|
||||
tracing-log = "0.2.0"
|
||||
secrecy = { version = "0.8.0", features = ["serde"] }
|
||||
derive_more = "0.99.17"
|
||||
lazy_static = "1.4.0"
|
||||
|
||||
[dependencies.sqlx]
|
||||
version = "0.7.2"
|
||||
default-features = false
|
||||
features = [
|
||||
"macros",
|
||||
"uuid",
|
||||
"chrono",
|
||||
"migrate",
|
||||
"postgres",
|
||||
"runtime-tokio-rustls",
|
||||
"rust_decimal"
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
reqwest = "0.11.22"
|
||||
once_cell = "1.18.0"
|
||||
fake = "2.9.1"
|
||||
quickcheck = "1.0.3"
|
||||
quickcheck_macros = "1.0.0"
|
||||
rand = "0.8.5"
|
65
API/Dockerfile
Normal file
65
API/Dockerfile
Normal file
@ -0,0 +1,65 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
FROM rust:1.73 as base
|
||||
|
||||
|
||||
FROM base as test-builder
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean
|
||||
RUN --mount=type=cache,target="/var/cache/apt" \
|
||||
--mount=type=cache,target="/usr/local/cargo/registry" \
|
||||
apt-get update && \
|
||||
apt-get -y install postgresql libpq5 --no-install-recommends && \
|
||||
apt-get update && \
|
||||
cargo install sqlx-cli --version="^0.5" --features="postgres" && \
|
||||
cargo install cargo-tarpaulin cargo-audit && \
|
||||
rustup component add clippy rustfmt && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
FROM base AS prod-builder
|
||||
WORKDIR /app
|
||||
COPY ./ .
|
||||
# Make sqlx use saved metadata instead of querying a live database
|
||||
ENV SQLX_OFFLINE=true
|
||||
|
||||
# Our user argument for unprivileged use
|
||||
ENV USER=api
|
||||
ENV UID=10001
|
||||
ENV APP_ENVIRONMENT=production
|
||||
RUN --mount=type=cache,target="/usr/local/cargo/registry" \
|
||||
--mount=type=cache,sharing=private,target="/app/target" \
|
||||
adduser \
|
||||
--disabled-password \
|
||||
--gecos "" \
|
||||
--home "/nonexistent" \
|
||||
--shell "/sbin/nologin" \
|
||||
--no-create-home \
|
||||
--uid "${UID}" \
|
||||
"${USER}" && \
|
||||
cargo build --release && \
|
||||
cp target/release/api ./release-executable
|
||||
|
||||
|
||||
FROM debian:buster-slim as prod
|
||||
WORKDIR /app
|
||||
# Synchronize users
|
||||
ENV APP_ENVIRONMENT=production
|
||||
COPY --from=prod-builder /etc/passwd /etc/passwd
|
||||
COPY --from=prod-builder /etc/group /etc/group
|
||||
|
||||
# Copy the build over
|
||||
COPY --from=prod-builder /app/configuration/ ./configuration/
|
||||
COPY --from=prod-builder /app/.env ./.env
|
||||
COPY --from=prod-builder /app/release-executable ./api
|
||||
|
||||
RUN --mount=type=cache,target="/var/cache/apt" \
|
||||
apt-get update -y && \
|
||||
apt-get install -y --no-install-recommends openssl ca-certificates && \
|
||||
apt-get autoremove -y && \
|
||||
apt-get clean -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Use our unprivileged user
|
||||
USER api:api
|
||||
|
||||
ENTRYPOINT ["/app/api"]
|
42
API/Makefile
Normal file
42
API/Makefile
Normal file
@ -0,0 +1,42 @@
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
SHELL := /bin/bash
|
||||
|
||||
help:
|
||||
$(info The following targets are available:)
|
||||
$(info ------------------------------------)
|
||||
$(info build - Build with `cargo build`)
|
||||
$(info dev - Setup full dev environment)
|
||||
$(info help - Shows this message)
|
||||
$(info test - Test without any trace output)
|
||||
$(info test-error - Test with trace error output)
|
||||
$(info test-warn - Test with trace warning output)
|
||||
$(info test-info - Test with trace info output)
|
||||
$(info test-trace - Test with trace trace output)
|
||||
$(info test-debug - Test with trace debug output)
|
||||
$(info )
|
||||
|
||||
build:
|
||||
cargo build
|
||||
|
||||
dev:
|
||||
source ./scripts/dev-setup.bash
|
||||
|
||||
test:
|
||||
cargo test
|
||||
|
||||
test-info:
|
||||
TEST_LOG=info $(MAKE) test
|
||||
|
||||
test-error:
|
||||
TEST_LOG=error $(MAKE) test
|
||||
|
||||
test-warn:
|
||||
TEST_LOG=warn $(MAKE) test
|
||||
|
||||
test-trace:
|
||||
TEST_LOG=trace $(MAKE) test
|
||||
|
||||
test-debug:
|
||||
TEST_LOG=debug $(MAKE) test
|
||||
|
@ -1,18 +0,0 @@
|
||||
from litestar.response.redirect import Redirect
|
||||
import uvicorn
|
||||
from litestar import Litestar, get
|
||||
|
||||
|
||||
@get("/")
|
||||
async def index() -> Redirect:
|
||||
return Redirect("/schema/swagger")
|
||||
|
||||
|
||||
@get("/books/{book_id:int}")
|
||||
async def get_book(book_id: int) -> dict[str, int]:
|
||||
return {"book_id": book_id}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = Litestar([index, get_book])
|
||||
uvicorn.run(__name__ + ":app", port=5000, log_level="info")
|
@ -1,13 +1,35 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: postgres
|
||||
restart: always
|
||||
ports:
|
||||
- "5432:5432"
|
||||
database:
|
||||
image: postgres:latest
|
||||
restart: unless-stopped
|
||||
container_name: whitelist-database
|
||||
environment:
|
||||
POSTGRES_PASSWORD: example
|
||||
POSTGRES_USER: "${APP__DATABASE__USERNAME:-postgres}"
|
||||
POSTGRES_PASSWORD: "${APP__DATABASE__PASSWORD:-password}"
|
||||
POSTGRES_DB: "${APP__DATABASE__NAME:-whitelist}"
|
||||
ports:
|
||||
- 5432:5432
|
||||
volumes:
|
||||
- ./docker-entrypoint-sql:/docker-entrypoint-initdb.d
|
||||
- ./postgres-data:/var/lib/postgresql/data
|
||||
- database-data:/var/lib/postgresql/data
|
||||
|
||||
api:
|
||||
build: .
|
||||
container_name: "whitelist-api"
|
||||
links:
|
||||
- "database:db"
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8000:8000
|
||||
environment:
|
||||
APP__APPLICATION__HOST: "0.0.0.0"
|
||||
APP__APPLICATION__PORT: "8000"
|
||||
APP_DATABASE_REQUIRE_SSL: true
|
||||
APP__DATABASE__HOST: "db"
|
||||
APP__DATABASE__PORT: "${APP__DATABASE__PORT:-5432}"
|
||||
APP__DATABASE__USERNAME: "${APP__DATABASE__USERNAME:-postgres}"
|
||||
APP__DATABASE__PASSWORD: "${APP__DATABASE__PASSWORD:-password}"
|
||||
APP__DATABASE__NAME: "${APP__DATABASE__NAME:-whitelist}"
|
||||
APP__DATABASE__REQUIRE__SSL: false
|
||||
|
||||
volumes:
|
||||
database-data:
|
||||
|
@ -1,17 +0,0 @@
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
CREATE USER api WITH LOGIN PASSWORD 'api';
|
||||
CREATE DATABASE api OWNER api;
|
||||
GRANT ALL PRIVILEGES ON DATABASE api TO api;
|
||||
\connect api api;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS roles (
|
||||
id PRIMARY KEY DEFAULT uuid_generate_v4()
|
||||
title varchar(40),
|
||||
summary varchar(160),
|
||||
content text,
|
||||
published date,
|
||||
updated date,
|
||||
tags varchar(10)[],
|
||||
UNIQUE(title)
|
||||
);
|
4
API/hurl/create-user.hurl
Normal file
4
API/hurl/create-user.hurl
Normal file
@ -0,0 +1,4 @@
|
||||
POST http://localhost:3000/users/76561197960287931
|
||||
HTTP 204
|
||||
|
||||
GET http://localhost:3000/users/all
|
86
API/migrations/20231116212316_tables.sql
Normal file
86
API/migrations/20231116212316_tables.sql
Normal file
@ -0,0 +1,86 @@
|
||||
-- For UUIDS
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id BIGINT PRIMARY KEY,
|
||||
discord_id BIGINT,
|
||||
superuser BOOL NOT NULL DEFAULT false
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS orgs (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
name VARCHAR(32) UNIQUE NOT NULL,
|
||||
owner_id BIGINT REFERENCES users(id),
|
||||
discord_server_id BIGINT,
|
||||
active BOOLEAN NOT NULL DEFAULT true
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS org_managers (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
user_id BIGINT REFERENCES users(id) ON DELETE CASCADE,
|
||||
org_id UUID REFERENCES orgs(id) ON DELETE CASCADE,
|
||||
active BOOLEAN NOT NULL DEFAULT true
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS org_tags (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
tag VARCHAR(32) NOT NULL,
|
||||
org_id UUID REFERENCES orgs(id) ON DELETE CASCADE,
|
||||
UNIQUE (tag, org_id) -- A tag can only appear in a organization once
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS org_groups (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
name VARCHAR(32) NOT NULL,
|
||||
org_id UUID REFERENCES orgs(id) ON DELETE CASCADE,
|
||||
active BOOLEAN NOT NULL DEFAULT true,
|
||||
UNIQUE (name, org_id) -- A group may only have one name
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS org_group_tags (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
group_id UUID REFERENCES org_groups(id) ON DELETE CASCADE,
|
||||
tag_id UUID REFERENCES org_tags(id) ON DELETE CASCADE,
|
||||
UNIQUE (tag_id, group_id) -- A tag can only appear in a group once
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS org_group_notes (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
group_id UUID REFERENCES org_groups(id) ON DELETE CASCADE,
|
||||
note VARCHAR(512) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS org_group_members (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
user_id BIGINT REFERENCES users(id) ON DELETE CASCADE,
|
||||
group_id UUID REFERENCES org_groups(id) ON DELETE CASCADE,
|
||||
owner BOOLEAN NOT NULL DEFAULT false,
|
||||
UNIQUE (user_id, group_id) -- A user can only appear in a group once
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS org_group_squad_details (
|
||||
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
|
||||
group_id UUID REFERENCES org_groups(id) ON DELETE CASCADE,
|
||||
remote_admin_list VARCHAR(256),
|
||||
remote_admin_list_group VARCHAR(32),
|
||||
perm_changemap BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_pause BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_cheat BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_private BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_balance BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_chat BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_kick BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_ban BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_config BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_cameraman BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_immune BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_manageserver BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_featuretest BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_reserve BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_demos BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_clientdemos BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_debug BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_teamchange BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_forceteamchange BOOLEAN NOT NULL DEFAULT false,
|
||||
perm_canseeadminchat BOOLEAN NOT NULL DEFAULT false
|
||||
);
|
1515
API/poetry.lock
generated
1515
API/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,16 +0,0 @@
|
||||
[tool.poetry]
|
||||
name = "whitelist-api"
|
||||
version = "0.1.0"
|
||||
description = "A whitelist API for the Asgard Eternal Squad Server"
|
||||
authors = ["Price Hiller <price@orion-technologies.io>"]
|
||||
license = "GPLv3"
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
psycopg = {extras = ["binary"], version = "^3.1.12"}
|
||||
litestar = {extras = ["jwt", "opentelemetry", "standard"], version = "^2.2.1"}
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
227
API/scripts/dev-setup.bash
Executable file
227
API/scripts/dev-setup.bash
Executable file
@ -0,0 +1,227 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
check-deps() {
|
||||
local err=false
|
||||
if ! [[ -x "$(command -v psql)" ]]; then
|
||||
cat >&2 <<-__EOF__
|
||||
Error: psql is not executable!
|
||||
Ensure it is installed and executable!
|
||||
__EOF__
|
||||
err=true
|
||||
fi
|
||||
|
||||
if ! [[ -x "$(command -v sqlx)" ]]; then
|
||||
cat >&2 <<-__EOF__
|
||||
Error: sqlx is not executable!
|
||||
Install it with:
|
||||
cargo install --version=0.7.2 sqlx-cli --features postgres
|
||||
|
||||
Then ensure it can be ran by you!
|
||||
__EOF__
|
||||
err=true
|
||||
fi
|
||||
|
||||
if [[ "${err}" == true ]]; then
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
|
||||
}
|
||||
|
||||
init-db() {
|
||||
local container_name="${1}"
|
||||
local db_user="${2}"
|
||||
local db_password="${3}"
|
||||
local db_name="${4}"
|
||||
local db_port="${5}"
|
||||
local skip_docker="${6}"
|
||||
|
||||
local check_deps_ret
|
||||
check_deps_ret=$(check-deps)
|
||||
if ((check_deps_ret != 0)); then
|
||||
printf "Missing some required dependencies, unable to setup a dev environment! Exiting.\n" >&2
|
||||
exit "${check_deps_ret}"
|
||||
fi
|
||||
|
||||
if ((skip_docker == 0)); then
|
||||
if docker container inspect "${container_name}" >/dev/null 2>&1; then
|
||||
printf "Container %s exists. Removing it.\n" "${container_name}" >&2
|
||||
docker container stop "${container_name}" >/dev/null 2>&1 || true
|
||||
docker container rm "${container_name}" >/dev/null 2>&1
|
||||
printf "Stopped container %s\n" "${container_name}" >&2
|
||||
fi
|
||||
printf "Starting container %s\n" "${container_name}"
|
||||
docker run \
|
||||
--name "${container_name}" \
|
||||
-e POSTGRES_USER="${db_user}" \
|
||||
-e POSTGRES_PASSWORD="${db_password}" \
|
||||
-e POSTGRES_DB="${db_name}" \
|
||||
-p "${db_port}":5432 \
|
||||
-d postgres \
|
||||
postgres -N 1000 >/dev/null
|
||||
printf "Container %s started\n" "${container_name}"
|
||||
fi
|
||||
|
||||
local max_wait=10
|
||||
local curr_attempts=0
|
||||
until PGPASSWORD="${db_password}" psql -h "0.0.0.0" -U "${db_user}" -p "${db_port}" -d "postgres" -c '\q' >/dev/null 2>&1; do
|
||||
printf "Postgress is still unavailable -- waiting\n" >&2
|
||||
curr_attempts=$((curr_attempts + 1))
|
||||
if ((curr_attempts == max_wait)); then
|
||||
printf "Unable to contact the postgres container after %s seconds! Exiting.\n" "${max_wait}" >&2
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
printf "Postgres is up and running on port %s\n" "${db_port}" >&2
|
||||
|
||||
local db_url="postgres://${db_user}:${db_password}@0.0.0.0:${db_port}/${db_name}"
|
||||
printf "Creating database with sqlx\n"
|
||||
DATABASE_URL="${db_url}" sqlx database create
|
||||
printf "Database created\n" >&2
|
||||
|
||||
printf "Migrating database with sqlx\n"
|
||||
DATABASE_URL="${db_url}" sqlx migrate run
|
||||
printf "Finished migration\n" >&2
|
||||
|
||||
printf "Exporting Environment Variables\n" >&2
|
||||
printf -- "-------------------------------\n" >&2
|
||||
export DATABASE_URL="${db_url}"
|
||||
export APP__APPLICATION__HOST="localhost"
|
||||
export APP__APPLICATION__PORT="3000"
|
||||
export APP__DATABASE__HOST="localhost"
|
||||
export APP__DATABASE__PORT="${db_port}"
|
||||
export APP__DATABASE__USERNAME="${db_user}"
|
||||
export APP__DATABASE__PASSWORD="${db_password}"
|
||||
export APP__DATABASE__NAME="${db_name}"
|
||||
export APP__DATABASE__REQUIRE_SSL="false"
|
||||
|
||||
for var in \
|
||||
DATABASE_URL \
|
||||
APP__APPLICATION__HOST \
|
||||
APP__APPLICATION__PORT \
|
||||
APP__DATABASE__HOST \
|
||||
APP__DATABASE__PORT \
|
||||
APP__DATABASE__USERNAME \
|
||||
APP__DATABASE__PASSWORD \
|
||||
APP__DATABASE__NAME \
|
||||
APP__DATABASE__REQUIRE_SSL; do
|
||||
printf " Exported %s = %s\n" "${var}" "${!var}"
|
||||
done
|
||||
|
||||
cat >&2 <<- __EOF__
|
||||
================================
|
||||
Done Setting up Dev Environment!
|
||||
================================
|
||||
__EOF__
|
||||
}
|
||||
|
||||
usage() {
|
||||
cat <<-__EOF__
|
||||
Usage: $(basename "${0}") -c Container-Name -u User -p DBPass -d DBName -P DBPort
|
||||
|
||||
-c <string> | --container-name <string>
|
||||
Sets the name of the docker container to be ran.
|
||||
|
||||
Example:
|
||||
--container-name "My New Container Name"
|
||||
|
||||
-u <string> | --db-user <string>
|
||||
The username to use for the database.
|
||||
|
||||
Example:
|
||||
--db-user "User"
|
||||
|
||||
-p <string> | --db-password <string>
|
||||
The database user's password. As a note, do not use a live database with this.
|
||||
This is meant for a dev environment where exposing this password has little to no security impact.
|
||||
|
||||
Example:
|
||||
--db-password "password"
|
||||
|
||||
-d <string> | --db-name <string>
|
||||
The database name to use.
|
||||
|
||||
Example:
|
||||
--db-name "My Database Name"
|
||||
|
||||
-P <int> | --db-port <int>
|
||||
The port used to access the database.
|
||||
|
||||
Example:
|
||||
--db-port 1337
|
||||
|
||||
-s | --skip-docker
|
||||
If this flag is used, all docker container setup will be skipped.
|
||||
|
||||
Example:
|
||||
--skip-docker
|
||||
|
||||
-h | --help | -?
|
||||
Shows this usage menu.
|
||||
|
||||
Example:
|
||||
--help
|
||||
__EOF__
|
||||
}
|
||||
main() {
|
||||
local container_name="${CONTAINER_NAME:="Whitelist-API-DB"}"
|
||||
local db_user="${DB_USER:=postgres}"
|
||||
local db_password="${DB_PASSWORD:=password}"
|
||||
local db_name="${DB_NAME:=whitelist}"
|
||||
local db_port="${DB_PORT:=5432}"
|
||||
local skip_docker="${SKIP_DOCKER:=0}"
|
||||
|
||||
set +u
|
||||
while :; do
|
||||
case "${1}" in
|
||||
-h | -\? | --help)
|
||||
usage # Display a usage synopsis.
|
||||
exit
|
||||
;;
|
||||
--) # End of all options.
|
||||
break
|
||||
;;
|
||||
-c | --container-name)
|
||||
shift
|
||||
container_name="${1}"
|
||||
;;
|
||||
-u | --db-user)
|
||||
shift
|
||||
db_user="${1}"
|
||||
;;
|
||||
-p | --db-password)
|
||||
shift
|
||||
db_password="${1}"
|
||||
;;
|
||||
-d | --db-name)
|
||||
shift
|
||||
db_name="${1}"
|
||||
;;
|
||||
-P | --db-port)
|
||||
shift
|
||||
db_port="${1}"
|
||||
;;
|
||||
-s | --skip-docker)
|
||||
shift
|
||||
skip_docker=1
|
||||
;;
|
||||
-?*)
|
||||
printf 'Unknown option: %s\n' "$1" >&2
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
*) # Default case: No more options, so break out of the loop.
|
||||
break ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
set -u
|
||||
|
||||
init-db "${container_name}" "${db_user}" "${db_password}" "${db_name}" "${db_port}" "${skip_docker}"
|
||||
}
|
||||
|
||||
main "${@}"
|
88
API/src/configuration.rs
Normal file
88
API/src/configuration.rs
Normal file
@ -0,0 +1,88 @@
|
||||
use secrecy::ExposeSecret;
|
||||
use secrecy::Secret;
|
||||
use serde_aux::field_attributes::deserialize_bool_from_anything;
|
||||
use serde_aux::field_attributes::deserialize_number_from_string;
|
||||
use sqlx::postgres::PgConnectOptions;
|
||||
use sqlx::postgres::PgSslMode;
|
||||
use sqlx::ConnectOptions;
|
||||
|
||||
#[derive(serde::Deserialize, Debug)]
|
||||
pub struct Settings {
|
||||
pub database: DatabaseSettings,
|
||||
pub application: AppConfig,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Debug)]
|
||||
pub struct AppConfig {
|
||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||
pub port: u16,
|
||||
pub host: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Debug)]
|
||||
pub struct DatabaseSettings {
|
||||
pub username: String,
|
||||
pub password: Secret<String>,
|
||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||
pub port: u16,
|
||||
pub host: String,
|
||||
pub name: String,
|
||||
#[serde(deserialize_with = "deserialize_bool_from_anything")]
|
||||
pub require_ssl: bool,
|
||||
}
|
||||
|
||||
impl DatabaseSettings {
|
||||
pub fn without_db(&self) -> PgConnectOptions {
|
||||
let ssl_mode = if self.require_ssl {
|
||||
PgSslMode::Require
|
||||
} else {
|
||||
PgSslMode::Prefer
|
||||
};
|
||||
|
||||
PgConnectOptions::new()
|
||||
.host(&self.host)
|
||||
.username(&self.username)
|
||||
.password(&self.password.expose_secret())
|
||||
.port(self.port)
|
||||
.ssl_mode(ssl_mode)
|
||||
}
|
||||
|
||||
pub fn with_db(&self) -> PgConnectOptions {
|
||||
self.without_db()
|
||||
.database(&self.name)
|
||||
.log_statements(tracing::log::LevelFilter::Trace)
|
||||
}
|
||||
|
||||
pub fn connection_string(&self) -> Secret<String> {
|
||||
Secret::new(format!(
|
||||
"postgres://{}:{}@{}:{}/{}",
|
||||
self.username,
|
||||
self.password.expose_secret(),
|
||||
self.host,
|
||||
self.port,
|
||||
self.name
|
||||
))
|
||||
}
|
||||
|
||||
pub fn connection_string_without_db(&self) -> Secret<String> {
|
||||
Secret::new(format!(
|
||||
"postgres://{}:{}@{}:{}",
|
||||
self.username,
|
||||
self.password.expose_secret(),
|
||||
self.host,
|
||||
self.port
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_configuration() -> Result<Settings, config::ConfigError> {
|
||||
let settings = config::Config::builder()
|
||||
.add_source(
|
||||
config::Environment::with_prefix("APP")
|
||||
.try_parsing(true)
|
||||
.separator("__"),
|
||||
)
|
||||
.build()?;
|
||||
|
||||
settings.try_deserialize::<Settings>()
|
||||
}
|
51
API/src/domain/mod.rs
Normal file
51
API/src/domain/mod.rs
Normal file
@ -0,0 +1,51 @@
|
||||
use derive_more::{Deref, Display, From, Into};
|
||||
use utoipa::ToSchema;
|
||||
use std::option::Option;
|
||||
|
||||
pub mod org;
|
||||
pub mod org_group;
|
||||
pub mod user;
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Clone,
|
||||
Copy,
|
||||
Into,
|
||||
Display,
|
||||
From,
|
||||
Deref,
|
||||
sqlx::Type,
|
||||
ToSchema
|
||||
)]
|
||||
pub struct DiscordID(i64);
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Deref,
|
||||
sqlx::Type,
|
||||
ToSchema
|
||||
)]
|
||||
pub struct NullableDiscordID(Option<DiscordID>);
|
||||
|
||||
impl Into<std::option::Option<i64>> for NullableDiscordID {
|
||||
fn into(self) -> std::option::Option<i64> {
|
||||
self.0.map(|num| *num)
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<NullableDiscordID> for std::option::Option<i64> {
|
||||
fn into(self) -> NullableDiscordID {
|
||||
match self {
|
||||
Some(num) => NullableDiscordID(Some(DiscordID(num))),
|
||||
None => NullableDiscordID(None)
|
||||
}
|
||||
}
|
||||
}
|
55
API/src/domain/org.rs
Normal file
55
API/src/domain/org.rs
Normal file
@ -0,0 +1,55 @@
|
||||
use super::{user::UserID, DiscordID};
|
||||
use derive_more::{Display, From, Into};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(
|
||||
Debug, serde::Deserialize, Eq, PartialEq, Ord, PartialOrd, Clone, From, Into, Display,
|
||||
)]
|
||||
pub struct OrgID(Uuid);
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct Org {
|
||||
id: OrgID,
|
||||
owner_id: Option<UserID>,
|
||||
discord_id: Option<DiscordID>,
|
||||
active: bool,
|
||||
}
|
||||
|
||||
impl Org {
|
||||
pub fn new(
|
||||
id: impl Into<OrgID>,
|
||||
owner_id: Option<impl Into<UserID>>,
|
||||
discord_id: Option<impl Into<DiscordID>>,
|
||||
active: bool,
|
||||
) -> Org {
|
||||
Org {
|
||||
id: id.into(),
|
||||
owner_id: owner_id.map(|owner| owner.into()),
|
||||
discord_id: discord_id.map(|discord_id| discord_id.into()),
|
||||
active,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, serde::Deserialize, Eq, PartialEq, Ord, PartialOrd, Clone, From, Into, Display,
|
||||
)]
|
||||
pub struct OrgManagerID(Uuid);
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgManager {
|
||||
id: OrgManagerID,
|
||||
user_id: UserID,
|
||||
org_id: Uuid,
|
||||
active: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize, Eq, PartialEq, Ord, PartialOrd, Clone)]
|
||||
pub struct OrgTagID(Uuid);
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgTag {
|
||||
id: Uuid,
|
||||
name: String,
|
||||
org_id: OrgID,
|
||||
}
|
79
API/src/domain/org_group.rs
Normal file
79
API/src/domain/org_group.rs
Normal file
@ -0,0 +1,79 @@
|
||||
use super::{user::UserID, DiscordID, org::OrgTagID};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupID(Uuid);
|
||||
|
||||
// TODO: Use a varchar or some equivalent type to limit the length of the name
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroup {
|
||||
id: Uuid,
|
||||
name: String,
|
||||
org_id: Uuid,
|
||||
active: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupTagID(Uuid);
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupTag {
|
||||
id: OrgGroupTagID,
|
||||
group_id: OrgGroupID,
|
||||
tag_id: OrgTagID,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupNoteID(Uuid);
|
||||
|
||||
// TODO: Use a varchar or some equivalent type to limit the length of the note
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupNote {
|
||||
id: OrgGroupNoteID,
|
||||
group_id: OrgGroupID,
|
||||
note: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupMemberID(Uuid);
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupMember {
|
||||
id: OrgGroupMemberID,
|
||||
user_id: UserID,
|
||||
group_id: OrgGroupID,
|
||||
owner: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupSquadDetailsID(Uuid);
|
||||
|
||||
|
||||
// TODO: Use a varchar or some equivalent type to limit the length of the remote_admin_list & remote_admin_list_group
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct OrgGroupSquadDetails {
|
||||
id: OrgGroupSquadDetailsID,
|
||||
group_id: OrgGroupID,
|
||||
remote_admin_list: Option<String>,
|
||||
remote_admin_list_group: Option<String>,
|
||||
perm_changemap: bool,
|
||||
perm_pause: bool,
|
||||
perm_cheat: bool,
|
||||
perm_private: bool,
|
||||
perm_balance: bool,
|
||||
perm_chat: bool,
|
||||
perm_kick: bool,
|
||||
perm_ban: bool,
|
||||
perm_config: bool,
|
||||
perm_cameraman: bool,
|
||||
perm_immune: bool,
|
||||
perm_manageserver: bool,
|
||||
perm_featuretest: bool,
|
||||
perm_reserve: bool,
|
||||
perm_demos: bool,
|
||||
perm_clientdemos: bool,
|
||||
perm_debug: bool,
|
||||
perm_teamchange: bool,
|
||||
perm_forceteamchange: bool,
|
||||
perm_canseeadminchat: bool,
|
||||
}
|
193
API/src/domain/user.rs
Normal file
193
API/src/domain/user.rs
Normal file
@ -0,0 +1,193 @@
|
||||
use derive_more::{Deref, Display, From, Into};
|
||||
use sqlx::PgPool;
|
||||
use tracing::Level;
|
||||
use utoipa::ToSchema;
|
||||
use validator::ValidationError;
|
||||
|
||||
use super::{org::Org, NullableDiscordID};
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Ord,
|
||||
PartialOrd,
|
||||
Clone,
|
||||
From,
|
||||
Into,
|
||||
Display,
|
||||
Deref,
|
||||
ToSchema,
|
||||
sqlx::Type,
|
||||
)]
|
||||
pub struct Steam64(i64);
|
||||
|
||||
impl Steam64 {
|
||||
pub fn new(num: i64) -> Steam64 {
|
||||
Steam64(num)
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> Result<(), ValidationError> {
|
||||
// TODO: Validate the Steam64 represents an individual user account
|
||||
// See https://developer.valvesoftware.com/wiki/SteamID#Types_of_Steam_Accounts for details
|
||||
if self.0 <= 0 || self.0.ilog10() + 1 != 17 {
|
||||
return Err(ValidationError::new(
|
||||
"Steam64 has incorrect number of digits",
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Ord,
|
||||
PartialOrd,
|
||||
Clone,
|
||||
Display,
|
||||
ToSchema,
|
||||
sqlx::Type,
|
||||
)]
|
||||
pub struct UserID(Steam64);
|
||||
|
||||
impl std::ops::Deref for UserID {
|
||||
type Target = i64;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&*self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for UserID {
|
||||
fn from(value: i64) -> Self {
|
||||
Self::new(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl UserID {
|
||||
pub fn new(user_id: i64) -> UserID {
|
||||
UserID(Steam64::new(user_id))
|
||||
}
|
||||
|
||||
/// Creates a user from the id
|
||||
#[tracing::instrument(name = "DB: Insert user into database", skip(pool), ret(level = Level::DEBUG))]
|
||||
pub async fn create(&self, pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||
sqlx::query!(r#"INSERT INTO users (id) VALUES ($1);"#, **self)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
/// Gets the full user struct for the given user id
|
||||
#[tracing::instrument(name = "DB: Get user from user id", skip(pool), ret(level = Level::DEBUG))]
|
||||
pub async fn get_user(&self, pool: &PgPool) -> Result<Option<User>, sqlx::Error> {
|
||||
let result = sqlx::query_as!(User, "SELECT * FROM users WHERE id = $1", **self)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!("Failed to execute query: {:?}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
if let Some(result) = result {
|
||||
Ok(Some(User {
|
||||
id: UserID::new(*result.id),
|
||||
discord_id: result.discord_id,
|
||||
superuser: result.superuser,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets all Organizations this user id is an owner of
|
||||
#[tracing::instrument(name = "DB: Get owned orgs for user id", skip(pool), ret(level = Level::DEBUG))]
|
||||
pub async fn get_owned_orgs(&self, pool: &PgPool) -> Result<Vec<Org>, sqlx::Error> {
|
||||
let result: Vec<_> = sqlx::query!("SELECT * FROM orgs WHERE owner_id = $1", **self)
|
||||
.fetch_all(pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|record| {
|
||||
Org::new(
|
||||
record.id,
|
||||
record.owner_id,
|
||||
record.discord_server_id,
|
||||
record.active,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Gets all Organizations this user id exists in
|
||||
#[tracing::instrument(name = "DB: Get orgs for user id", skip(pool), ret(level = Level::DEBUG))]
|
||||
pub async fn get_orgs(&self, pool: &PgPool) -> Result<Vec<Org>, sqlx::Error> {
|
||||
let result: Vec<_> = sqlx::query!(
|
||||
"SELECT * FROM orgs WHERE EXISTS (SELECT * FROM org_group_members WHERE user_id = $1)",
|
||||
**self
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|record| {
|
||||
Org::new(
|
||||
record.id,
|
||||
record.owner_id,
|
||||
record.discord_server_id,
|
||||
record.active,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize, serde::Serialize, sqlx::Type, ToSchema)]
|
||||
pub struct User {
|
||||
id: UserID,
|
||||
pub discord_id: NullableDiscordID,
|
||||
pub superuser: bool,
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn new(
|
||||
id: impl Into<UserID>,
|
||||
discord_id: impl Into<NullableDiscordID>,
|
||||
superuser: bool,
|
||||
) -> User {
|
||||
User {
|
||||
id: id.into(),
|
||||
discord_id: discord_id.into(),
|
||||
superuser,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &UserID {
|
||||
&self.id
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "DB: Update current user with new data", skip(pool), ret(level = Level::DEBUG))]
|
||||
pub async fn update(&self, pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||
sqlx::query!(
|
||||
r#"UPDATE users SET discord_id = $1, superuser = $2 WHERE id = $3;"#,
|
||||
self.discord_id.map(|num| *num),
|
||||
self.superuser,
|
||||
*self.id
|
||||
)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "DB: Get all users", skip(pool), ret(level = Level::DEBUG))]
|
||||
pub async fn get_all(pool: &PgPool) -> Result<Vec<User>, sqlx::Error> {
|
||||
Ok(sqlx::query_as!(User, r#"SELECT * FROM users;"#)
|
||||
.fetch_all(pool)
|
||||
.await?)
|
||||
}
|
||||
}
|
62
API/src/errors.rs
Normal file
62
API/src/errors.rs
Normal file
@ -0,0 +1,62 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use axum::{http::StatusCode, response::IntoResponse};
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use tracing::Level;
|
||||
use utoipa::ToSchema;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// A default error response for most API errors.
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
pub struct AppError {
|
||||
/// An error message.
|
||||
message: Option<String>,
|
||||
/// A unique error ID.
|
||||
id: Uuid,
|
||||
#[serde(skip)]
|
||||
status: StatusCode,
|
||||
/// Optional Additional error details.
|
||||
details: Option<Value>,
|
||||
}
|
||||
|
||||
impl AppError {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
message: None,
|
||||
id: Uuid::new_v4(),
|
||||
status: StatusCode::INTERNAL_SERVER_ERROR,
|
||||
details: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn message(mut self, message: impl Into<String>) -> Self {
|
||||
self.message = Some(message.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn status(mut self, status: StatusCode) -> Self {
|
||||
self.status = status;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn details(mut self, details: Value) -> Self {
|
||||
self.details = Some(details);
|
||||
self
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "Uncaught Internal Server Error", ret(level = Level::ERROR))]
|
||||
pub fn anyhow(err: impl std::error::Error) -> Self {
|
||||
tracing::error!(name: "Unhandled Error", "Error: {}", anyhow::anyhow!("{err}"));
|
||||
Self::new().status(StatusCode::INTERNAL_SERVER_ERROR).message("INTERNAL SERVER ERROR! Please copy your `id` and submit it for troubleshooting.")
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for AppError {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
let status = self.status;
|
||||
let mut res = axum::Json(self).into_response();
|
||||
*res.status_mut() = status;
|
||||
res
|
||||
}
|
||||
}
|
66
API/src/extractors.rs
Normal file
66
API/src/extractors.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use axum::{
|
||||
async_trait,
|
||||
extract::{FromRequestParts, Query},
|
||||
http::request::Parts,
|
||||
response::Response
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub enum Format {
|
||||
Json,
|
||||
SquadAdminList,
|
||||
}
|
||||
|
||||
impl FromStr for Format {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(input: &str) -> Result<Format, ()> {
|
||||
match input.to_lowercase().as_str() {
|
||||
"json" => Ok(Format::Json),
|
||||
"squadadminlist" => Ok(Format::SquadAdminList),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Format {
|
||||
fn render<T>(self, data: T) -> Response
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
match self {
|
||||
Format::Json => {
|
||||
todo!("render json...")
|
||||
}
|
||||
Format::SquadAdminList => {
|
||||
todo!("render xml...")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<S> FromRequestParts<S> for Format
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = String;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
#[derive(Deserialize)]
|
||||
struct FormatQuery {
|
||||
format: String,
|
||||
}
|
||||
|
||||
let Query(query) = match Query::<FormatQuery>::from_request_parts(parts, state).await {
|
||||
Ok(query) => query,
|
||||
Err(_) => return Ok(Self::Json),
|
||||
};
|
||||
|
||||
match Format::from_str(&query.format) {
|
||||
Ok(format) => Ok(format),
|
||||
Err(_) => Err(format!("Invalid format: {}", query.format))
|
||||
}
|
||||
}
|
||||
}
|
9
API/src/lib.rs
Normal file
9
API/src/lib.rs
Normal file
@ -0,0 +1,9 @@
|
||||
pub mod configuration;
|
||||
pub mod domain;
|
||||
pub mod routes;
|
||||
pub mod startup;
|
||||
pub mod telemetry;
|
||||
pub mod extractors;
|
||||
pub mod errors;
|
||||
|
||||
pub use startup::run;
|
29
API/src/main.rs
Normal file
29
API/src/main.rs
Normal file
@ -0,0 +1,29 @@
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
use std::net::TcpListener;
|
||||
use whitelist_api::{
|
||||
configuration::get_configuration,
|
||||
startup::run,
|
||||
telemetry::{get_subscriber, init_subscriber},
|
||||
};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let subscriber = get_subscriber("api".into(), "info".into(), std::io::stdout);
|
||||
init_subscriber(subscriber);
|
||||
|
||||
let config = get_configuration().expect("Failed to read configuration!");
|
||||
|
||||
let db_pool = PgPoolOptions::new()
|
||||
.max_connections(5)
|
||||
.connect_with(config.database.with_db())
|
||||
.await
|
||||
.expect("Failed to connect to database!");
|
||||
|
||||
let address = format!("{}:{}", config.application.host, config.application.port);
|
||||
let listener = TcpListener::bind(&address).expect(&format!("Failed to bind to {}!", &address));
|
||||
|
||||
tracing::info!("Starting application on {}", &address);
|
||||
Ok(run(listener, db_pool)
|
||||
.expect("Failed to run application!")
|
||||
.await?)
|
||||
}
|
31
API/src/routes/health_check.rs
Normal file
31
API/src/routes/health_check.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use axum::{http::StatusCode, Router, routing};
|
||||
use tracing::Level;
|
||||
use utoipa::OpenApi;
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new().route("/health", routing::get(health_check))
|
||||
}
|
||||
|
||||
pub fn docs() -> utoipa::openapi::OpenApi {
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
paths(
|
||||
health_check
|
||||
)
|
||||
)]
|
||||
struct ApiDoc;
|
||||
|
||||
ApiDoc::openapi()
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/health",
|
||||
responses(
|
||||
(status = StatusCode::NO_CONTENT, description = "Get health check success. If this ever fails then something has gone drastically wrong.")
|
||||
)
|
||||
)]
|
||||
#[tracing::instrument(name = "Get health" ret(level = Level::DEBUG))]
|
||||
pub async fn health_check() -> StatusCode {
|
||||
StatusCode::NO_CONTENT
|
||||
}
|
2
API/src/routes/mod.rs
Normal file
2
API/src/routes/mod.rs
Normal file
@ -0,0 +1,2 @@
|
||||
pub mod user;
|
||||
pub mod health_check;
|
95
API/src/routes/user.rs
Normal file
95
API/src/routes/user.rs
Normal file
@ -0,0 +1,95 @@
|
||||
use crate::{
|
||||
domain::user::{User, UserID, Steam64},
|
||||
errors::AppError,
|
||||
};
|
||||
use axum::{
|
||||
extract::{Path, Query},
|
||||
http::StatusCode,
|
||||
routing::{get, post},
|
||||
Extension, Json, Router,
|
||||
};
|
||||
use sqlx::PgPool;
|
||||
use tracing::Level;
|
||||
use utoipa::OpenApi;
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new().nest(
|
||||
"/users/",
|
||||
Router::new().nest(
|
||||
"/:user_id",
|
||||
Router::new()
|
||||
.route("/", get(get_user_by_id))
|
||||
.route("/", post(create_user_from_id)),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn docs() -> utoipa::openapi::OpenApi {
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
paths(
|
||||
get_user_by_id,
|
||||
create_user_from_id
|
||||
),
|
||||
components(
|
||||
schemas(User, UserID, Steam64)
|
||||
)
|
||||
)]
|
||||
struct ApiDoc;
|
||||
|
||||
ApiDoc::openapi()
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "Getting user by id", skip(pool), ret(level = Level::DEBUG))]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/users/{user_id}",
|
||||
params(
|
||||
("user_id" = UserID, description = "A given user id")
|
||||
),
|
||||
responses(
|
||||
(status = StatusCode::OK, description = "Successfully returned a user from the database"),
|
||||
(status = StatusCode::NOT_FOUND, description = "Failed to find a user in the database with the given id")
|
||||
)
|
||||
)]
|
||||
pub async fn get_user_by_id(
|
||||
Extension(pool): Extension<PgPool>,
|
||||
Path(user_id): Path<UserID>,
|
||||
) -> Result<Json<User>, AppError> {
|
||||
match user_id.get_user(&pool).await {
|
||||
Ok(user) => match user {
|
||||
Some(user) => Ok(Json(user)),
|
||||
None => Err(AppError::new().message(format!("Could not find a user with id: {user_id}!")).status(StatusCode::NOT_FOUND))
|
||||
},
|
||||
Err(e) => {
|
||||
Err(AppError::anyhow(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "Creating a new user from id", skip(pool), ret(level = Level::DEBUG))]
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/users/{user_id}",
|
||||
params(
|
||||
("user_id" = UserID, description = "A given user id")
|
||||
),
|
||||
responses(
|
||||
(status = StatusCode::NO_CONTENT, description = "Successfully created the given user"),
|
||||
(status = StatusCode::CONFLICT, description = "User already exists in the database")
|
||||
)
|
||||
)]
|
||||
pub async fn create_user_from_id(
|
||||
Extension(pool): Extension<PgPool>,
|
||||
Path(user_id): Path<UserID>,
|
||||
) -> Result<StatusCode, AppError> {
|
||||
match user_id.create(&pool).await {
|
||||
Ok(()) => Ok(StatusCode::NO_CONTENT),
|
||||
Err(err) => match err {
|
||||
sqlx::Error::Database(db) if db.constraint() == Some("users_pkey") => Err(
|
||||
AppError::new().message(format!("A user with id '{user_id}' already exists!")).status(StatusCode::CONFLICT)
|
||||
),
|
||||
e => Err(AppError::anyhow(e))
|
||||
},
|
||||
}
|
||||
}
|
85
API/src/startup.rs
Normal file
85
API/src/startup.rs
Normal file
@ -0,0 +1,85 @@
|
||||
use crate::errors::AppError;
|
||||
use axum::{
|
||||
body::Body,
|
||||
http::Request,
|
||||
response::Response,
|
||||
routing::get,
|
||||
Extension, Router,
|
||||
};
|
||||
use sqlx::PgPool;
|
||||
use std::{net::TcpListener, time::Duration};
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::trace::TraceLayer;
|
||||
use tracing::{Level, Span};
|
||||
use utoipa::OpenApi;
|
||||
use utoipa_swagger_ui::SwaggerUi;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub fn run(
|
||||
listener: TcpListener,
|
||||
connection: PgPool,
|
||||
) -> anyhow::Result<
|
||||
axum::Server<hyper::server::conn::AddrIncoming, axum::routing::IntoMakeService<Router>>,
|
||||
> {
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
components(
|
||||
schemas(AppError)
|
||||
),
|
||||
tags(
|
||||
(name = "Squad Whitelist", description = "Squad Whitelist management API")
|
||||
)
|
||||
)]
|
||||
struct ApiDoc;
|
||||
|
||||
let mut doc = ApiDoc::openapi();
|
||||
doc.merge(crate::routes::health_check::docs());
|
||||
doc.merge(crate::routes::user::docs());
|
||||
|
||||
let router = Router::new()
|
||||
.route("/", get(|| async { "API Online" }))
|
||||
.merge(SwaggerUi::new("/docs").url(
|
||||
"/docs/openapi.json",
|
||||
doc
|
||||
))
|
||||
.merge(crate::routes::health_check::router())
|
||||
.merge(crate::routes::user::router())
|
||||
.layer(Extension(connection))
|
||||
.layer(
|
||||
ServiceBuilder::new().layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(|_request: &Request<Body>| {
|
||||
tracing::span!(
|
||||
tracing::Level::INFO,
|
||||
"HTTP-Request",
|
||||
request_id = format!("{}", Uuid::new_v4())
|
||||
)
|
||||
})
|
||||
.on_request(|request: &Request<_>, _span: &Span| {
|
||||
let request_method = request.method().as_str();
|
||||
let request_path = request.uri().path();
|
||||
tracing::event!(
|
||||
Level::INFO,
|
||||
method = request_method,
|
||||
uri = request_path,
|
||||
"Started {} {}",
|
||||
request_method,
|
||||
request_path
|
||||
)
|
||||
})
|
||||
.on_response(|response: &Response, latency: Duration, _span: &Span| {
|
||||
let status = response.status().as_u16();
|
||||
let latency = &format!("{:?}", latency)[..];
|
||||
tracing::event!(
|
||||
Level::INFO,
|
||||
response_latency = latency,
|
||||
status = status,
|
||||
"Response status code: {}",
|
||||
status
|
||||
)
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
Ok(axum::Server::from_tcp(listener)?.serve(router.into_make_service()))
|
||||
}
|
42
API/src/telemetry.rs
Normal file
42
API/src/telemetry.rs
Normal file
@ -0,0 +1,42 @@
|
||||
use tracing::{subscriber::set_global_default, Subscriber};
|
||||
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
|
||||
use tracing_log::LogTracer;
|
||||
use tracing_subscriber::{
|
||||
fmt::MakeWriter, prelude::__tracing_subscriber_SubscriberExt, EnvFilter, Registry,
|
||||
};
|
||||
|
||||
/// Compose multiple layers into a `tracing`'s subscriber.
|
||||
///
|
||||
/// # Implementation Notes
|
||||
///
|
||||
/// We are using `impl Subscriber` as return type to avoid having to
|
||||
/// spell out the actual type of the returned subscriber, which is
|
||||
/// indeed quite complex.
|
||||
/// We need to explicitly call out that the returned subscriber is
|
||||
/// `Send` and `Sync` to make it possible to pass it to `init_subscriber`
|
||||
/// later on.
|
||||
pub fn get_subscriber<Sink>(
|
||||
name: String,
|
||||
env_filter: String,
|
||||
sink: Sink,
|
||||
) -> impl Subscriber + Send + Sync
|
||||
where
|
||||
Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static,
|
||||
{
|
||||
let env_filter =
|
||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
|
||||
let formatting_layer = BunyanFormattingLayer::new(name, sink);
|
||||
|
||||
Registry::default()
|
||||
.with(env_filter)
|
||||
.with(JsonStorageLayer)
|
||||
.with(formatting_layer)
|
||||
}
|
||||
|
||||
/// Register a subscriber as global default to process span data.
|
||||
///
|
||||
/// It should only be called once!
|
||||
pub fn init_subscriber(subscriber: impl Subscriber + Send + Sync) {
|
||||
LogTracer::init().expect("Failed to set logger");
|
||||
set_global_default(subscriber).expect("Failed to set subscriber");
|
||||
}
|
59
API/tests/helpers/mod.rs
Normal file
59
API/tests/helpers/mod.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use sqlx::PgPool;
|
||||
use std::net::TcpListener;
|
||||
use tracing::Level;
|
||||
use whitelist_api::{
|
||||
configuration::{get_configuration, AppConfig},
|
||||
telemetry::{get_subscriber, init_subscriber},
|
||||
};
|
||||
|
||||
static TRACING: Lazy<()> = Lazy::new(|| {
|
||||
if std::env::var("TEST_LOG").is_ok() {
|
||||
let subscriber = match std::env::var("TEST_LOG") {
|
||||
Ok(log_level) => get_subscriber(
|
||||
"test".into(),
|
||||
log_level.to_lowercase().into(),
|
||||
std::io::stdout,
|
||||
),
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to get log level from environment variable `TEST_LOG`! Error: {e}");
|
||||
get_subscriber("test".into(), "debug".into(), std::io::stdout)
|
||||
}
|
||||
};
|
||||
init_subscriber(subscriber);
|
||||
} else {
|
||||
let subscriber = get_subscriber("test".into(), "debug".into(), std::io::sink);
|
||||
init_subscriber(subscriber);
|
||||
}
|
||||
});
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TestApp {
|
||||
pub address: String,
|
||||
pub config: AppConfig,
|
||||
pub client: reqwest::Client,
|
||||
}
|
||||
|
||||
impl TestApp {
|
||||
#[tracing::instrument(name = "Spawning test application" level = Level::DEBUG)]
|
||||
pub async fn spawn(connection_pool: &PgPool) -> TestApp {
|
||||
Lazy::force(&TRACING);
|
||||
|
||||
let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind to random port!");
|
||||
let port = listener.local_addr().unwrap().port();
|
||||
let address = format!("http://127.0.0.1:{}", port);
|
||||
|
||||
let configuration = get_configuration().expect("Failed to read configuration!");
|
||||
|
||||
let server =
|
||||
whitelist_api::run(listener, connection_pool.clone()).expect("Failed to bind address!");
|
||||
|
||||
let _ = tokio::spawn(server);
|
||||
|
||||
TestApp {
|
||||
address,
|
||||
config: configuration.application,
|
||||
client: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
}
|
2
API/tests/main.rs
Normal file
2
API/tests/main.rs
Normal file
@ -0,0 +1,2 @@
|
||||
mod helpers;
|
||||
mod routes;
|
18
API/tests/routes/health_check.rs
Normal file
18
API/tests/routes/health_check.rs
Normal file
@ -0,0 +1,18 @@
|
||||
use crate::helpers::TestApp;
|
||||
|
||||
#[sqlx::test]
|
||||
async fn health_check_works(pool: sqlx::PgPool) {
|
||||
let app = TestApp::spawn(&pool).await;
|
||||
|
||||
// Act
|
||||
let response = app
|
||||
.client
|
||||
.get(String::from(&app.address) + "/health")
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
// Check
|
||||
assert_eq!(response.status(), reqwest::StatusCode::NO_CONTENT);
|
||||
assert_eq!(Some(0), response.content_length());
|
||||
}
|
2
API/tests/routes/mod.rs
Normal file
2
API/tests/routes/mod.rs
Normal file
@ -0,0 +1,2 @@
|
||||
mod health_check;
|
||||
mod user;
|
151
API/tests/routes/user.rs
Normal file
151
API/tests/routes/user.rs
Normal file
@ -0,0 +1,151 @@
|
||||
use crate::helpers::TestApp;
|
||||
use sqlx::PgPool;
|
||||
use whitelist_api::domain::user::{User, UserID};
|
||||
|
||||
#[sqlx::test]
|
||||
async fn gets_404_for_missing_user(pool: PgPool) {
|
||||
// Arrange
|
||||
let app = TestApp::spawn(&pool).await;
|
||||
|
||||
// Act
|
||||
let user_id: i64 = 76561197960287930;
|
||||
let response = app
|
||||
.client
|
||||
.get(format!("{}/user/{}", &app.address, &user_id))
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
// Check
|
||||
assert_eq!(response.status(), reqwest::StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn can_get_existing_user_from_db(pool: PgPool) {
|
||||
// Arrange
|
||||
let app = TestApp::spawn(&pool).await;
|
||||
let user_id: i64 = 76561197960287930;
|
||||
sqlx::query!("INSERT INTO users (id) VALUES ($1);", &user_id)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.expect("Failed to insert user into database!");
|
||||
|
||||
// Act
|
||||
let response = app
|
||||
.client
|
||||
.get(format!("{}/user/{}", &app.address, &user_id))
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
// Check
|
||||
assert_eq!(response.status(), reqwest::StatusCode::OK);
|
||||
assert_eq!(
|
||||
response
|
||||
.headers()
|
||||
.get("Content-Type")
|
||||
.expect("No content type was returned to the request."),
|
||||
"application/json"
|
||||
);
|
||||
assert!(response.content_length() > Some(0));
|
||||
let user: User = serde_json::from_str(
|
||||
&response
|
||||
.text()
|
||||
.await
|
||||
.expect("Failed to get returned text from the request."),
|
||||
)
|
||||
.expect("Failed to deserialize response as a User.");
|
||||
assert_eq!(user.id(), &UserID::new(user_id));
|
||||
// Check that the user is not a superuser by default
|
||||
assert_eq!(user.superuser, false);
|
||||
// Check that the user has a null discord_id by default
|
||||
assert_eq!(user.discord_id, None.into());
|
||||
}
|
||||
|
||||
|
||||
#[sqlx::test]
|
||||
async fn can_create_a_new_user(pool: PgPool) {
|
||||
// Arrange
|
||||
let app = TestApp::spawn(&pool).await;
|
||||
let user_id: i64 = 76561197960287930;
|
||||
|
||||
// Act
|
||||
let response = app
|
||||
.client
|
||||
.post(format!("{}/user/{}", &app.address, &user_id))
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
// Check
|
||||
assert_eq!(response.status(), reqwest::StatusCode::NO_CONTENT);
|
||||
assert_eq!(response.content_length(), Some(0));
|
||||
|
||||
// Check that the user was actually created
|
||||
let response = app
|
||||
.client
|
||||
.get(format!("{}/user/{}", &app.address, &user_id))
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
// Check
|
||||
assert_eq!(response.status(), reqwest::StatusCode::OK);
|
||||
assert_eq!(
|
||||
response
|
||||
.headers()
|
||||
.get("Content-Type")
|
||||
.expect("No content type was returned to the request."),
|
||||
"application/json"
|
||||
);
|
||||
assert!(response.content_length() > Some(0));
|
||||
let user: User = serde_json::from_str(
|
||||
&response
|
||||
.text()
|
||||
.await
|
||||
.expect("Failed to get returned text from the request."),
|
||||
)
|
||||
.expect("Failed to deserialize response as a User.");
|
||||
assert_eq!(user.id(), &UserID::new(user_id));
|
||||
// Check that the user is not a superuser by default
|
||||
assert_eq!(user.superuser, false);
|
||||
// Check that the user has a null discord_id by default
|
||||
assert_eq!(user.discord_id, None.into());
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn attempting_to_create_user_twice_results_in_conflict(pool: PgPool) {
|
||||
// Arrange
|
||||
let app = TestApp::spawn(&pool).await;
|
||||
let user_id: i64 = 76561197960287930;
|
||||
|
||||
// Act
|
||||
let response = app
|
||||
.client
|
||||
.post(format!("{}/user/{}", &app.address, &user_id))
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
// Check
|
||||
assert_eq!(response.status(), reqwest::StatusCode::NO_CONTENT);
|
||||
assert_eq!(response.content_length(), Some(0));
|
||||
|
||||
// Act Pt.2
|
||||
let response = app
|
||||
.client
|
||||
.post(format!("{}/user/{}", &app.address, &user_id))
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
// Check
|
||||
assert_eq!(response.status(), reqwest::StatusCode::CONFLICT);
|
||||
assert_eq!(
|
||||
response
|
||||
.headers()
|
||||
.get("Content-Type")
|
||||
.expect("No content type was returned to the request."),
|
||||
"application/json"
|
||||
);
|
||||
}
|
322
README.org
322
README.org
@ -1,3 +1,4 @@
|
||||
#+OPTIONS: H:99 ^:nil
|
||||
* Squad Whitelist Management
|
||||
|
||||
|
||||
@ -6,185 +7,196 @@
|
||||
|
||||
*** Commands
|
||||
|
||||
- */link*
|
||||
- Example: ~/link 7312312312395~
|
||||
- Links user's discord ID with their provided steam64
|
||||
- If linked to another discord id, reject
|
||||
- Use the user's discord username for their username in the API
|
||||
- */link*
|
||||
- Example: =/link 7312312312395=
|
||||
- Links user's discord ID with their provided steam64
|
||||
- If linked to another discord id, reject
|
||||
- Use the user's discord username for their username in the API
|
||||
|
||||
- */group-link*
|
||||
- Example: ~/group-link @Odin {group-uuid}~
|
||||
- Links a Group on the API with a given discord ID for the role
|
||||
- If the Discord ID already exists, inform about it but allow
|
||||
- */group-link*
|
||||
- Example: =/group-link @Odin {group-uuid}=
|
||||
- Links a Group on the API with a given discord ID for the role
|
||||
- If the Discord ID already exists, inform about it but allow
|
||||
|
||||
- */list-groups*
|
||||
- Example: ~/list-groups~
|
||||
- Outputs:
|
||||
- Groups with Associated Discord Role
|
||||
- Example: ~@Odin | https://my-api.com/org/asgard/group-uuid~
|
||||
- */list-groups*
|
||||
- Example: =/list-groups=
|
||||
- Outputs:
|
||||
- Groups with Associated Discord Role
|
||||
- Example: =@Odin | https://my-api.com/org/asgard/group-uuid=
|
||||
|
||||
*** Events
|
||||
|
||||
- ~on-member-update~
|
||||
- For each member, look at the change in discord roles
|
||||
- If they had a role added to them that's a linked group, add them to the Group
|
||||
- If they don't have a linked Steam64, ping them in a channel to do so
|
||||
- *on-member-update*
|
||||
- For each member, look at the change in discord roles
|
||||
- If they had a role added to them that's a linked group, add them to the Group
|
||||
- If they don't have a linked Steam64, ping them in a channel to do so
|
||||
|
||||
- ~on-startup~
|
||||
- Hit up API for all members of linked roles
|
||||
- Check if the members are in those roles and modify as needed
|
||||
|
||||
** API
|
||||
|
||||
|
||||
*** Functionality
|
||||
|
||||
|
||||
**** Generate Remote Admin List
|
||||
|
||||
- Capability to export as remote admin list
|
||||
- Can be applied to an entire Org
|
||||
- Example: ~https://api-url.com/org/asgard?reserved~
|
||||
- Example: ~https://api-url.com/group/group-uuid?startvote&changemap&pause~
|
||||
- Query params
|
||||
- startvote
|
||||
- changemap
|
||||
- pause
|
||||
- cheat
|
||||
- private
|
||||
- balance
|
||||
- chat
|
||||
- kick
|
||||
- ban
|
||||
- config
|
||||
- cameraman
|
||||
- immune
|
||||
- manageserver
|
||||
- featuretest
|
||||
- reserve
|
||||
- demos
|
||||
- clientdemos
|
||||
- debug
|
||||
- teamchange
|
||||
- forceteamchange
|
||||
- canseeadminchat
|
||||
|
||||
- Example:
|
||||
- Query: ~https://api-url.com/org/{org-name}/{group-uuid}~
|
||||
- Output: ~Group={group-uuid}:{query-params} // {group-name}~
|
||||
- *on-startup*
|
||||
- Hit up API for all members of linked roles
|
||||
- Check if the members are in those roles and modify as needed
|
||||
|
||||
** Data
|
||||
|
||||
- Org
|
||||
- Tags
|
||||
- Single Owner
|
||||
- All permissions Managers have
|
||||
- Org Perms
|
||||
- Toggle the active state
|
||||
- Delete
|
||||
- Rename
|
||||
- Add and remove org managers
|
||||
- Transfer ownership (member must be a manager of the org)
|
||||
- Managers
|
||||
- Group Perms
|
||||
- Delete
|
||||
- Create
|
||||
- Rename
|
||||
- Toggle active state
|
||||
- Manage group users
|
||||
- Manage group moderators
|
||||
- Manage group tags
|
||||
- Manage group names
|
||||
- Tag Perms
|
||||
- Delete
|
||||
- Create
|
||||
- Rename
|
||||
- Groups
|
||||
- Group: whitelist-{group-id}
|
||||
- Notes
|
||||
- Members
|
||||
- Group Perms
|
||||
- Manage Users (Add or Remove Users)
|
||||
- Manage Moderators (Allow other users to manage the group)
|
||||
- Manage Remote Import (Allowed to set a remote import)
|
||||
- This does not use the perms from the remote import, only pulls the members from the remote import list
|
||||
- Can specify a group in the remote (only pull members in that remote admin list)
|
||||
- Owner (All group perms)
|
||||
- Tags
|
||||
- Must be a tag that exists in the Org (reference a tag table)
|
||||
- Game Perms
|
||||
- Squad
|
||||
- Reserve
|
||||
- Demos
|
||||
- Kick
|
||||
- Ban
|
||||
- Etc.
|
||||
|
||||
*** Users
|
||||
|
||||
- Table Name: ~USERS~
|
||||
| Key | Type | Constraints | Description |
|
||||
|============|=============|=============|=========================================================================|
|
||||
| id | Primary Key | | Steam64 |
|
||||
| username | String | | |
|
||||
| password | String? | | Salted Hash, don't allow login if null. Owning tenant can set Password. |
|
||||
| discord_id | Int? | | |
|
||||
| superuser | bool | | |
|
||||
- Table Name: =users=
|
||||
| Key | Type | Constraints | Description |
|
||||
|--------------|-----------|--------------------------|-------------|
|
||||
| =id= | ~DECIMAL~ | ~PRIMARY KEY~ | Steam64 |
|
||||
| =discord_id= | ~DECIMAL~ | ~UNIQUE~ | |
|
||||
| =superuser= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | |
|
||||
|
||||
*** Organizations
|
||||
|
||||
- Table Name: ~ORGS~
|
||||
| Key | Type | Constraints | Description |
|
||||
|===================|============|=============|=============|
|
||||
| id | UUID4 | | |
|
||||
| name | String | Unique | |
|
||||
| owner_id | ~USERS.ID~ | | |
|
||||
| discord_server_id | Int? | | |
|
||||
- Table Name: =orgs=
|
||||
| Key | Type | Constraints | Description |
|
||||
|---------------------|---------------|-------------------------|-------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =name= | ~VARCHAR(32)~ | ~UNIQUE NOT NULL~ | |
|
||||
| =owner_id= | ~UUID4~ | ~REFERENCES users(id)~ | |
|
||||
| =discord_server_id= | ~DECIMAL~ | | |
|
||||
| =active= | ~BOOLEAN~ | ~NOT NULL DEFAULT true~ | |
|
||||
|
||||
*** Group Members
|
||||
**** Org Managers
|
||||
|
||||
- Table Name: ~GROUP_MEMBERS~
|
||||
| Key | Type | Constraints | Description |
|
||||
|====================|=======================|=============|================================================================|
|
||||
| user | ~ORG_MEMBERS.user_id~ | | |
|
||||
| org_id | ~ORGS.ID~ | | |
|
||||
| group | ~GROUPS.ID~ | | |
|
||||
| nick_name | String | | |
|
||||
| member | bool | | |
|
||||
| owner | bool | | |
|
||||
| perm_manage_mods | bool | | |
|
||||
| perm_manage_perms | bool | | |
|
||||
| perm_manage_users | bool | | |
|
||||
| perm_manage_tags | bool | | |
|
||||
| perm_manage_import | bool | | Allow user to specify a remote admin list to use for the Group |
|
||||
- Table Name: =org_managers=
|
||||
| Key | Type | Constraints | Description |
|
||||
|-----------|-----------|------------------------------------------|-------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =user_id= | ~DECIMAL~ | ~REFERENCES users(id) ON DELETE CASCADE~ | |
|
||||
| =org_id= | ~UUID4~ | ~REFERENCES orgs(id) ON DELETE CASCADE~ | |
|
||||
| =active= | ~BOOLEAN~ | ~NOT NULL DEFAULT true~ | |
|
||||
|
||||
*** Group Squad Perms
|
||||
Additional Constraints:
|
||||
- ~UNIQUE(user_id, org_id)~
|
||||
|
||||
- Table Name: ~GROUP_SQUAD_PERMS~
|
||||
| Key | Type | Constraints | Description |
|
||||
|======================|============|=============|====================================================================================|
|
||||
| group_id | ~GROUP.ID~ | Primary Key | |
|
||||
| perm_changemap | bool | | Allowed to change the current map |
|
||||
| perm_pause | bool | | Allowed to pause server gameplay |
|
||||
| perm_cheat | bool | | Allowed to use server cheat commands |
|
||||
| perm_private | bool | | Allowed to password protect the server |
|
||||
| perm_balance | bool | | Allowed to ignore team balance |
|
||||
| perm_chat | bool | | Allowed to access admin chat and make server broadcasts |
|
||||
| perm_kick | bool | | Allowed to kick players |
|
||||
| perm_ban | bool | | Allowed to ban players |
|
||||
| perm_config | bool | | Allowed to change server config |
|
||||
| perm_cameraman | bool | | Allowed to admin spectate mode |
|
||||
| perm_immune | bool | | Allowed to cannot be kicked / banned |
|
||||
| perm_manageserver | bool | | Allowed to shutdown server |
|
||||
| perm_featuretest | bool | | Allowed to any features added for testing by dev team |
|
||||
| perm_reserve | bool | | Allowed to reserve slot |
|
||||
| perm_demos | bool | | Allowed to record Demos on the server side via admin commands |
|
||||
| perm_clientdemos | bool | | Allowed to record Demos on the client side via commands or the replay UI |
|
||||
| perm_debug | bool | | Allowed to Show admin stats command and other debugging info |
|
||||
| perm_teamchange | bool | | Allowed to no timer limits on team change |
|
||||
| perm_forceteamchange | bool | | Allowed to can issue the ForceTeamChange command |
|
||||
| perm_canseeadminchat | bool | | Allowed to this group can see the admin chat and teamkill/admin-join notifications |
|
||||
**** Tags
|
||||
|
||||
*** TAGS
|
||||
- Table Name: =org_tags=
|
||||
| Key | Type | Constraints | Description |
|
||||
|----------|---------------|----------------------------------|-------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =name= | ~VARCHAR(32)~ | ~UNIQUE (name, org_id) NOT NULL~ | |
|
||||
| =org_id= | ~UUID4~ | ~REFERENCES orgs(id)~ | |
|
||||
|
||||
- Table Name: ~TAGS~
|
||||
| Key | Type | Constraints | Description |
|
||||
|========|==========|============================================|=============|
|
||||
| id | UUID4 | Primary Key | |
|
||||
| name | String | Must be unique when combined with ~org_id~ | |
|
||||
| org_id | ~ORG.ID~ | Must be unique when combined with ~name~ | |
|
||||
**** Groups
|
||||
|
||||
- Both ~Org~ & ~Name~ together must be unique, the combo of them is unique
|
||||
- Table Name: =org_groups=
|
||||
| Key | Type | Constraints | Description |
|
||||
|----------|---------------|-----------------------------------------|-------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =name= | ~VARCHAR(32)~ | ~NOT NULL UNIQUE(name,org_id)~ | |
|
||||
| =org_id= | ~UUID4~ | ~REFERENCES orgs(id) ON DELETE CASCADE~ | |
|
||||
| =active= | ~BOOLEAN~ | ~NOT NULL DEFAULT true~ | |
|
||||
|
||||
*** Group Tags
|
||||
- Additional Constraints
|
||||
- ~UNIQUE (name, org_id)~
|
||||
|
||||
- Table Name: ~GROUP_TAGS~
|
||||
| Key | Type | Constraints | Description |
|
||||
|==========|============|===========================================|=============|
|
||||
| id | UUID4 | Primary Key | |
|
||||
| group_id | ~GROUP.ID~ | Must be unique when combined with ~tag~ | |
|
||||
| tag_id | ~TAGS.ID~ | Must be unique when combined with ~group~ | |
|
||||
***** Group Tags
|
||||
|
||||
- Together both ~GROUP~ & ~TAG~ create primary key
|
||||
- ~SELECT * FROM GROUP_TAGS WHERE GROUP = "GROUP.ID" AND TAG = "TAG.ID"~
|
||||
- Table Name: =org_group_tags=
|
||||
| Key | Type | Constraints | Description |
|
||||
|------------|---------|-----------------------------------------------|-------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =group_id= | ~UUID4~ | ~REFERENCES org_groups(id) ON DELETE CASCADE~ | |
|
||||
| =tag_id= | ~UUID4~ | ~REFERENCES org_tags(id) ON DELETE CASCADE~ | |
|
||||
|
||||
- Org
|
||||
- Single Owner
|
||||
- Groups
|
||||
- Group: whitelist-{group-id}
|
||||
- Tags: vanilla
|
||||
- Single Owner
|
||||
- Perms
|
||||
- Group
|
||||
- Manage Users (Add or Remove Users)
|
||||
- Manage Moderators (Allow other users to manage the group)
|
||||
- Manage Remote Import (Allowed to set a remote import)
|
||||
- Manage Tags (Allowed to manage tags)
|
||||
- Squad
|
||||
- Reserve
|
||||
- Demos
|
||||
- Kick
|
||||
- Ban
|
||||
- Etc.
|
||||
- Additional Constraints
|
||||
- ~UNIQUE (group_id, tag_id)~
|
||||
|
||||
- Accessing URLs
|
||||
~https://api-url.com/export/squad-admin-list/org/asgard/groups/group-uuid~
|
||||
~https://api-url.com/export/squad-admin-list/org/asgard~
|
||||
~https://api-url.com/export/squad-admin-list/org/asgard?tag=tactical~
|
||||
~https://api-url.com/export/squad-admin-list/org/asgard?tag=tactical&tag=vanilla~
|
||||
***** Group Notes
|
||||
|
||||
- Table Name: =org_group_notes=
|
||||
| Key | Type | Constraints | Description |
|
||||
|------------|----------------|------------------------------------------------------|-------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =group_id= | ~UUID4~ | ~UNIQUE REFERENCES org_groups(id) ON DELETE CASCADE~ | |
|
||||
| =note= | ~VARCHAR(512)~ | ~NOT NULL~ | |
|
||||
|
||||
***** Group Members
|
||||
|
||||
- Table Name: =org_group_members=
|
||||
| Key | Type | Constraints | Description |
|
||||
|------------|-----------|-----------------------------------------------|-------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =user_id= | ~DECIMAL~ | ~REFERENCES users(id) ON DELETE CASCADE~ | |
|
||||
| =group_id= | ~UUID4~ | ~REFERENCES org_groups(id) ON DELETE CASCADE~ | |
|
||||
| =owner= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | |
|
||||
|
||||
- Additional Constraints
|
||||
- ~UNIQUE (user_id, group_id)~
|
||||
|
||||
***** Group Squad Details
|
||||
|
||||
- Table Name: =org_group_squad_details=
|
||||
| Key | Type | Constraints | Description |
|
||||
|---------------------------|----------------|------------------------------------------------------|------------------------------------------------------------------------------------|
|
||||
| =id= | ~UUID4~ | ~PRIMARY KEY~ | |
|
||||
| =group_id= | ~UUID4~ | ~REFERENCES org_groups(id) UNIQUE ON DELETE CASCADE~ | |
|
||||
| =remote_admin_list= | ~VARCHAR(256)~ | | |
|
||||
| =remote_admin_list_group= | ~VARCHAR(32)~ | | |
|
||||
| =perm_changemap= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to change the current map |
|
||||
| =perm_pause= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to pause server gameplay |
|
||||
| =perm_cheat= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to use server cheat commands |
|
||||
| =perm_private= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to password protect the server |
|
||||
| =perm_balance= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to ignore team balance |
|
||||
| =perm_chat= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to access admin chat and make server broadcasts |
|
||||
| =perm_kick= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to kick players |
|
||||
| =perm_ban= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to ban players |
|
||||
| =perm_config= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to change server config |
|
||||
| =perm_cameraman= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to admin spectate mode |
|
||||
| =perm_immune= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to cannot be kicked / banned |
|
||||
| =perm_manageserver= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to shutdown server |
|
||||
| =perm_featuretest= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to any features added for testing by dev team |
|
||||
| =perm_reserve= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to reserve slot |
|
||||
| =perm_demos= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to record Demos on the server side via admin commands |
|
||||
| =perm_clientdemos= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to record Demos on the client side via commands or the replay UI |
|
||||
| =perm_debug= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to Show admin stats command and other debugging info |
|
||||
| =perm_teamchange= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to no timer limits on team change |
|
||||
| =perm_forceteamchange= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to can issue the ForceTeamChange command |
|
||||
| =perm_canseeadminchat= | ~BOOLEAN~ | ~NOT NULL DEFAULT false~ | Allowed to this group can see the admin chat and teamkill/admin-join notifications |
|
||||
|
Loading…
Reference in New Issue
Block a user