diff options
author | sadbeast <sadbeast@sadbeast.com> | 2024-07-16 18:16:29 -0700 |
---|---|---|
committer | sadbeast <sadbeast@sadbeast.com> | 2024-10-05 16:40:55 -0700 |
commit | 6bd24af2ffbea91db1b10a5d5258980ce2068c7f (patch) | |
tree | 66634833f2d45260be5fcaf9111400eda12f03cc | |
download | teamdraft-6bd24af2ffbea91db1b10a5d5258980ce2068c7f.tar.gz teamdraft-6bd24af2ffbea91db1b10a5d5258980ce2068c7f.tar.bz2 |
43 files changed, 2679 insertions, 0 deletions
@@ -0,0 +1,9 @@ +PGHOST=/tmp/db +PGUSER=teamdraft +PGPASSWORD=$PGUSER +PGDATABASE=$PGUSER + +# used by dadbod.vim +DATABASE_URL=postgres: + +PGSOCKET=$PGHOST/.s.PGSQL.5432 @@ -0,0 +1,5 @@ +dotenv +if ! has nix_direnv_version || ! nix_direnv_version 3.0.5; then + source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.5/direnvrc" "sha256-RuwIS+QKFj/T9M2TFXScjBsLR6V3A17YVoEW/Q6AZ1w=" +fi +use flake diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cc56785 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +data +.direnv +logs + +.zig-cache +zig-out diff --git a/.proverc b/.proverc new file mode 100644 index 0000000..ee929dd --- /dev/null +++ b/.proverc @@ -0,0 +1,2 @@ +test/db/ +--ext .sql diff --git a/bin/deploy.sh b/bin/deploy.sh new file mode 100755 index 0000000..803ab64 --- /dev/null +++ b/bin/deploy.sh @@ -0,0 +1,3 @@ +zig build -Doptimize=ReleaseFast +scp -r zig-out/bin/* teamdraft: +ssh teamdraft -C "sudo systemctl stop teamdraft && sudo cp ./{teamdraft,scorecorder} /home/teamdraft/ && sudo systemctl start teamdraft" diff --git a/build.zig b/build.zig new file mode 100644 index 0000000..14aa19f --- /dev/null +++ b/build.zig @@ -0,0 +1,115 @@ +const std = @import("std"); + +// Although this function looks imperative, note that its job is to +// declaratively construct a build graph that will be executed by an external +// runner. +pub fn build(b: *std.Build) void { + // Standard target options allows the person running `zig build` to choose + // what target to build for. Here we do not override the defaults, which + // means any target is allowed, and the default is native. Other options + // for restricting supported target set are available. + const target = b.standardTargetOptions(.{}); + + // Standard optimization options allow the person running `zig build` to select + // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not + // set a preferred release mode, allowing the user to decide how to optimize. + const optimize = b.standardOptimizeOption(.{}); + + const teamdraft = b.addExecutable(.{ + .name = "teamdraft", + .root_source_file = b.path("src/main.zig"), + .target = target, + .optimize = optimize, + }); + + const scorecorder = b.addExecutable(.{ + .name = "scorecorder", + .root_source_file = b.path("src/scorecorder.zig"), + .target = target, + .optimize = optimize, + }); + + const zul = b.dependency("zul", .{ .target = target, .optimize = optimize }); + scorecorder.root_module.addImport("zul", zul.module("zul")); + teamdraft.root_module.addImport("zul", zul.module("zul")); + + const logz = b.dependency("logz", .{ .target = target, .optimize = optimize }); + scorecorder.root_module.addImport("logz", logz.module("logz")); + teamdraft.root_module.addImport("logz", logz.module("logz")); + + const httpz = b.dependency("httpz", .{ .target = target, .optimize = optimize }); + teamdraft.root_module.addImport("httpz", httpz.module("httpz")); + + const zmpl = b.dependency("zmpl", .{ .target = target, .optimize = optimize }); + teamdraft.root_module.addImport("zmpl", zmpl.module("zmpl")); + + const pg = b.dependency("pg", .{ .target = target, .optimize = optimize }); + teamdraft.root_module.addImport("pg", pg.module("pg")); + scorecorder.root_module.addImport("pg", pg.module("pg")); + + // This declares intent for the executable to be installed into the + // standard location when the user invokes the "install" step (the default + // step when running `zig build`). + b.installArtifact(teamdraft); + b.installArtifact(scorecorder); + + // This *creates* a Run step in the build graph, to be executed when another + // step is evaluated that depends on it. The next line below will establish + // such a dependency. + const run_cmd = b.addRunArtifact(teamdraft); + + // By making the run step depend on the install step, it will be run from the + // installation directory rather than directly from within the cache directory. + // This is not necessary, however, if the application depends on other installed + // files, this ensures they will be present and in the expected location. + run_cmd.step.dependOn(b.getInstallStep()); + + // This allows the user to pass arguments to the application in the build + // command itself, like this: `zig build run -- arg1 arg2 etc` + if (b.args) |args| { + run_cmd.addArgs(args); + } + + // This creates a build step. It will be visible in the `zig build --help` menu, + // and can be selected like this: `zig build run` + // This will evaluate the `run` step rather than the default, which is "install". + const run_step = b.step("run", "Run the app"); + run_step.dependOn(&run_cmd.step); + + // Creates a step for unit testing. This only builds the test executable + // but does not run it. + const exe_unit_tests = b.addTest(.{ + .root_source_file = b.path("src/main.zig"), + .target = target, + .optimize = optimize, + }); + + const run_exe_unit_tests = b.addRunArtifact(exe_unit_tests); + + // Similar to creating the run step earlier, this exposes a `test` step to + // the `zig build --help` menu, providing a way for the user to request + // running the unit tests. + const test_step = b.step("test", "Run unit tests"); + test_step.dependOn(&run_exe_unit_tests.step); + + const coverage_step = b.step("coverage", "Generate a coverage report with kcov"); + + const merge_step = std.Build.Step.Run.create(b, "merge coverage"); + merge_step.addArgs(&.{ "kcov", "--merge" }); + merge_step.rename_step_with_output_arg = false; + const merged_coverage_output = merge_step.addOutputFileArg("."); + + const kcov_collect = std.Build.Step.Run.create(b, "collect coverage"); + kcov_collect.addArgs(&.{ "kcov", "--collect-only" }); + kcov_collect.addPrefixedDirectoryArg("--include-pattern=", b.path("src")); + merge_step.addDirectoryArg(kcov_collect.addOutputFileArg(exe_unit_tests.name)); + kcov_collect.addArtifactArg(exe_unit_tests); + kcov_collect.enableTestRunnerMode(); + + const install_coverage = b.addInstallDirectory(.{ + .source_dir = merged_coverage_output, + .install_dir = .{ .custom = "coverage" }, + .install_subdir = "", + }); + coverage_step.dependOn(&install_coverage.step); +} diff --git a/build.zig.zon b/build.zig.zon new file mode 100644 index 0000000..c8fdd9e --- /dev/null +++ b/build.zig.zon @@ -0,0 +1,54 @@ +.{ + // This is the default name used by packages depending on this one. For + // example, when a user runs `zig fetch --save <url>`, this field is used + // as the key in the `dependencies` table. Although the user can choose a + // different name, most users will stick with this provided value. + // + // It is redundant to include "zig" in this name because it is already + // within the Zig package namespace. + .name = "teamdraft", + + // This is a [Semantic Version](https://semver.org/). + // In a future version of Zig it will be used for package deduplication. + .version = "0.1.0", + + // This field is optional. + // This is currently advisory only; Zig does not yet do anything + // with this value. + //.minimum_zig_version = "0.11.0", + + // This field is optional. + // Each dependency must either provide a `url` and `hash`, or a `path`. + // `zig build --fetch` can be used to fetch all dependencies of a package, recursively. + // Once all dependencies are fetched, `zig build` no longer requires + // internet connectivity. + .dependencies = .{ + .pg = .{ + // .url = "https://github.com/karlseguin/pg.zig/archive/1491270ac43c7eba91992bb06b3758254c36e39a.tar.gz", + .url = "http://localhost:8000/pg.zig.tar.gz", + .hash = "12202a0658f93c5a881d18d43ae0ebb1f4028f9221b8a3cbfba634855a5cf42e6877", + }, + .httpz = .{ + // .url = "https://github.com/karlseguin/http.zig/archive/792083d158a70850cb009f5528720ae6dcd77cd0.tar.gz", + .url = "http://localhost:8000/http.zig.tar.gz", + .hash = "1220a1c15dfe2d529275edccc6339337bfc19d4b32595f6bbf31e165f48627bf1a50", + }, + .zul = .{ + .url = "https://github.com/karlseguin/zul/archive/ae0c27350c0db6b460f22cba30b6b0c4a02d1ffd.tar.gz", + .hash = "1220457e2c8867f6734520d9b335f01e1d851d6fe7adaa7f6f0756158acaf6c5e87f", + }, + .logz = .{ + .url = "https://github.com/karlseguin/log.zig/archive/0a8d45fe70f15615ee4251b0a84afa243f728d21.tar.gz", + .hash = "1220a267bbb2165749fd6574cd2729aa70dd3cbbf74c8825b97334b22d9b2385ec3f", + }, + .zmpl = .{ + .url = "https://github.com/jetzig-framework/zmpl/archive/7c2e599807fe8d28ce45b8b3be1829e3d704422e.tar.gz", + .hash = "12207c30c6fbcb8c7519719fc47ff9d0acca72a3557ec671984d16260bdf1c832740", + }, + }, + .paths = .{ + "build.zig", + "build.zig.zon", + "src", + }, +} diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..2e726db --- /dev/null +++ b/flake.lock @@ -0,0 +1,160 @@ +{ + "nodes": { + "devshell": { + "inputs": { + "nixpkgs": "nixpkgs" + }, + "locked": { + "lastModified": 1722113426, + "narHash": "sha256-Yo/3loq572A8Su6aY5GP56knpuKYRvM2a1meP9oJZCw=", + "owner": "numtide", + "repo": "devshell", + "rev": "67cce7359e4cd3c45296fb4aaf6a19e2a9c757ae", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "devshell", + "type": "github" + } + }, + "flake-parts": { + "inputs": { + "nixpkgs-lib": "nixpkgs-lib" + }, + "locked": { + "lastModified": 1725234343, + "narHash": "sha256-+ebgonl3NbiKD2UD0x4BszCZQ6sTfL4xioaM49o5B3Y=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "567b938d64d4b4112ee253b9274472dc3a346eb6", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1722073938, + "narHash": "sha256-OpX0StkL8vpXyWOGUD6G+MA26wAXK6SpT94kLJXo6B4=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "e36e9f57337d0ff0cf77aceb58af4c805472bfae", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-lib": { + "locked": { + "lastModified": 1725233747, + "narHash": "sha256-Ss8QWLXdr2JCBPcYChJhz4xJm+h/xjl4G0c0XlP6a74=", + "type": "tarball", + "url": "https://github.com/NixOS/nixpkgs/archive/356624c12086a18f2ea2825fed34523d60ccc4e3.tar.gz" + }, + "original": { + "type": "tarball", + "url": "https://github.com/NixOS/nixpkgs/archive/356624c12086a18f2ea2825fed34523d60ccc4e3.tar.gz" + } + }, + "nixpkgs_2": { + "locked": { + "lastModified": 1725634671, + "narHash": "sha256-v3rIhsJBOMLR8e/RNWxr828tB+WywYIoajrZKFM+0Gg=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "574d1eac1c200690e27b8eb4e24887f8df7ac27c", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_3": { + "locked": { + "lastModified": 1725103162, + "narHash": "sha256-Ym04C5+qovuQDYL/rKWSR+WESseQBbNAe5DsXNx5trY=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "12228ff1752d7b7624a54e9c1af4b222b3c1073b", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "process-compose-flake": { + "locked": { + "lastModified": 1724606023, + "narHash": "sha256-rdGeNa/lCS8E1lXzPqgl+vZUUvnbEZT11Bqkx5jfYug=", + "owner": "Platonic-Systems", + "repo": "process-compose-flake", + "rev": "f6ce9481df9aec739e4e06b67492401a5bb4f0b1", + "type": "github" + }, + "original": { + "owner": "Platonic-Systems", + "repo": "process-compose-flake", + "type": "github" + } + }, + "root": { + "inputs": { + "devshell": "devshell", + "flake-parts": "flake-parts", + "nixpkgs": "nixpkgs_2", + "process-compose-flake": "process-compose-flake", + "services-flake": "services-flake", + "treefmt-nix": "treefmt-nix" + } + }, + "services-flake": { + "locked": { + "lastModified": 1725722005, + "narHash": "sha256-05jRFTaESYbUb6+C1/pstarDGZLCrrwtM8+NJwSL4Us=", + "owner": "juspay", + "repo": "services-flake", + "rev": "fbede4cbfd0f432ddb41e4cbe34acf93b76851a0", + "type": "github" + }, + "original": { + "owner": "juspay", + "repo": "services-flake", + "type": "github" + } + }, + "treefmt-nix": { + "inputs": { + "nixpkgs": "nixpkgs_3" + }, + "locked": { + "lastModified": 1725271838, + "narHash": "sha256-VcqxWT0O/gMaeWTTjf1r4MOyG49NaNxW4GHTO3xuThE=", + "owner": "numtide", + "repo": "treefmt-nix", + "rev": "9fb342d14b69aefdf46187f6bb80a4a0d97007cd", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "treefmt-nix", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..f59fbae --- /dev/null +++ b/flake.nix @@ -0,0 +1,96 @@ +{ + description = "teamdraft.net"; + + inputs = { + devshell.url = "github:numtide/devshell"; + flake-parts.url = "github:hercules-ci/flake-parts"; + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + process-compose-flake.url = "github:Platonic-Systems/process-compose-flake"; + services-flake.url = "github:juspay/services-flake"; + treefmt-nix.url = "github:numtide/treefmt-nix"; + }; + + outputs = inputs @ { + flake-parts, + ... + }: + flake-parts.lib.mkFlake {inherit inputs;} { + imports = [ + inputs.devshell.flakeModule + inputs.process-compose-flake.flakeModule + inputs.treefmt-nix.flakeModule + ]; + systems = ["x86_64-linux" "aarch64-linux" "aarch64-darwin" "x86_64-darwin"]; + perSystem = { + config, + self', + inputs', + pkgs, + system, + ... + }: + with builtins; rec { + process-compose."services" = { + imports = [ + inputs.services-flake.processComposeModules.default + ]; + services.postgres."db" = { + enable = true; + package = pkgs.postgresql_16; + extensions = extensions: [ + extensions.pgtap + extensions.pgsodium + ]; + + initialScript.before = readFile ./src/db/init.sql; + initialScript.after = readFile ./src/db/seed.sql; + + socketDir = "/tmp/db"; + superuser = "teamdraft"; + }; + settings = { + log_location = "./logs/services.log"; + }; + }; + + devshells.default = { + commands = [ + { + name = "db"; + help = "run postgresql"; + command = "nix run .#services"; + } + { + name = "reset"; + help = "recreate db and run postgresql"; + command = "rm -rf data && nix run .#services"; + } + { + name = "ci"; + help = "run all tests"; + command = "pg_prove"; + } + ]; + packages = with pkgs; [ + gdb + kcov + perl538Packages.TAPParserSourceHandlerpgTAP + pgcli + pgformatter + postgresql_16 + postgresql16Packages.pgtap + sops + zig + zls + ]; + }; + + treefmt.config = { + projectRootFile = "flake.nix"; + + flakeCheck = true; + flakeFormatter = true; + }; + }; + }; +} diff --git a/src/App.zig b/src/App.zig new file mode 100644 index 0000000..494ee8f --- /dev/null +++ b/src/App.zig @@ -0,0 +1,105 @@ +const App = @This(); + +allocator: std.mem.Allocator, + +pool: *pg.Pool, + +pub fn init(allocator: std.mem.Allocator) !App { + var env_map = try std.process.getEnvMap(allocator); + defer env_map.deinit(); + + const pool = try pg.Pool.init(allocator, .{ + .size = 5, + .connect = .{ + // .port = 5432, + .unix_socket = env_map.get("PGSOCKET") orelse "/tmp/db/.s.PGSQL.5432", + }, + .auth = .{ + .database = env_map.get("PGDATABASE") orelse "teamdraft", + .username = env_map.get("PGUSER") orelse "teamdraft_rw", + .password = env_map.get("PGPASSWORD") orelse "teamdraft", + .timeout = 10_000, + }, + }); + + return .{ + .allocator = allocator, + .pool = pool, + }; +} + +pub fn deinit(self: *App) void { + self.pool.deinit(); +} + +pub fn dispatch(self: *App, action: httpz.Action(*RequestContext), req: *httpz.Request, res: *httpz.Response) !void { + const start_time = std.time.milliTimestamp(); + + var logger = logz.logger().multiuse(); + + var ctx = RequestContext{ + .app = self, + .user = try self.loadUser(req), + }; + // defer env.deinit(); + + logger + .stringSafe("@l", "REQ") + .stringSafe("method", @tagName(req.method)) + .string("path", req.url.path) + .string("query", req.url.query) + .int("status", res.status) + .int("uid", if (ctx.user) |u| u.id else 0) + .int("ms", std.time.milliTimestamp() - start_time) + .log(); + + return action(&ctx, req, res); +} + +pub fn notFound(_: *App, req: *httpz.Request, res: *httpz.Response) !void { + try web.render("404", req, res); +} + +pub fn uncaughtError(_: *App, _: *httpz.Request, res: *httpz.Response, err: anyerror) void { + logz.info().err(err).log(); + + res.status = 500; + res.body = "sorry"; +} + +const Account = struct { + account_id: i64, +}; + +fn loadUser(self: *App, req: *httpz.Request) !?User { + if (req.header("cookie")) |cookie_header| { + var cookies = Cookie.init(req.arena, cookie_header); + defer cookies.deinit(); + + try cookies.parse(); + if (cookies.get("s")) |session_id| { + logz.info().string("session", session_id.value).log(); + var row = (try self.pool.row("SELECT account_id FROM get_account_by_session_id($1)", .{session_id.value})) orelse return null; + defer row.deinit() catch {}; + + const account = try row.to(Account, .{}); + + return try User.init(req.arena, account.account_id, ""); + } + // var it = req.headers.iterator(); + // while (it.next()) |kv| { + // logz.info().string("header", kv.key).string("value", kv.value).log(); + // } + } + return null; +} + +const std = @import("std"); +const pg = @import("pg"); +const httpz = @import("httpz"); +const logz = @import("logz"); + +const RequestContext = @import("RequestContext.zig"); +const User = @import("User.zig"); +const Cookie = @import("web/Cookie.zig"); +const web = @import("web/web.zig"); diff --git a/src/RequestContext.zig b/src/RequestContext.zig new file mode 100644 index 0000000..7d88294 --- /dev/null +++ b/src/RequestContext.zig @@ -0,0 +1,5 @@ +app: *App, +user: ?User = null, + +const App = @import("App.zig"); +const User = @import("User.zig"); diff --git a/src/User.zig b/src/User.zig new file mode 100644 index 0000000..7178a34 --- /dev/null +++ b/src/User.zig @@ -0,0 +1,13 @@ +id: i64, +name: []const u8, + +pub const User = @This(); + +pub fn init(allocator: Allocator, id: i64, name: []const u8) !User { + return .{ + .id = id, + .name = try allocator.dupe(u8, name), + }; +} + +const Allocator = @import("std").mem.Allocator; diff --git a/src/db/auth.sql b/src/db/auth.sql new file mode 100644 index 0000000..b3e4c50 --- /dev/null +++ b/src/db/auth.sql @@ -0,0 +1,120 @@ +-- https://github.com/jeremyevans/rodauth +GRANT CREATE ON SCHEMA :schema TO :password_role; + +CREATE COLLATION ignore_accents (provider = icu, locale = 'und-u-ks-level1-kc-true', deterministic = false); + +CREATE TABLE account_statuses( + account_status_id smallint, + name text NOT NULL, + + UNIQUE (name), + CONSTRAINT account_statuses_pk PRIMARY KEY (account_status_id) +); +INSERT INTO account_statuses (account_status_id, name) VALUES (1, 'Unverified'); +INSERT INTO account_statuses (account_status_id, name) VALUES (2, 'Verified'); +INSERT INTO account_statuses (account_status_id, name) VALUES (3, 'Closed'); + +CREATE TABLE accounts( + account_id bigint NOT NULL GENERATED ALWAYS AS IDENTITY, + status_id smallint NOT NULL DEFAULT 1, + -- email citext UNIQUE, + email text UNIQUE, + name text, + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + UNIQUE (email), + CONSTRAINT accounts_pk PRIMARY KEY (account_id), + CONSTRAINT accounts_account_statuses_fk FOREIGN KEY (status_id) REFERENCES account_statuses(account_status_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + + CONSTRAINT accounts_email_ck CHECK (char_length(email) <= 255) +); + +CREATE INDEX accounts_email_uniq ON accounts (email) WHERE status_id IN (1, 2); + +CREATE TABLE account_active_session_keys( + session_id text NOT NULL, + account_id bigint NOT NULL, + created_at timestamptz NOT NULL DEFAULT now(), + last_use timestamptz NOT NULL DEFAULT now(), + + CONSTRAINT account_active_session_keys_pk PRIMARY KEY (session_id, account_id), + CONSTRAINT account_active_session_keys_accounts_fk FOREIGN KEY (account_id) REFERENCES accounts(account_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION + +); + +CREATE OR REPLACE FUNCTION create_account(_email text, _password text) +RETURNS int +AS $$ + INSERT INTO accounts (email) VALUES (lower(_email)) RETURNING account_id; +$$ +LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION create_session(account_id bigint) +RETURNS text +AS $$ + INSERT INTO account_active_session_keys (session_id, account_id) VALUES + (pgsodium.crypto_auth_hmacsha256_keygen(), account_id) RETURNING session_id; +$$ +LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION get_account_by_session_id(_session_id text) +RETURNS SETOF accounts +AS $$ +SELECT accounts.* FROM account_active_session_keys JOIN accounts USING(account_id) WHERE session_id = _session_id; +$$ +LANGUAGE SQL; + +GRANT SELECT, INSERT, UPDATE, DELETE ON account_statuses TO :password_role; +GRANT SELECT, INSERT, UPDATE, DELETE ON accounts TO :password_role; + +SET ROLE teamdraft_password; +\c :database_name + +DROP TABLE IF EXISTS account_password_hashes; +CREATE TABLE account_password_hashes( + account_id bigint NOT NULL, + password_hash int NOT NULL, + + CONSTRAINT account_password_hashes_accounts_fk FOREIGN KEY (account_id) REFERENCES accounts(account_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION +); + +CREATE OR REPLACE FUNCTION get_salt(_account_id int8) RETURNS text AS $$ +DECLARE salt text; +BEGIN +SELECT +CASE + WHEN password_hash ~ '^\\$argon2id' + THEN substring(password_hash from '\\$argon2id\\$v=\\d+\\$m=\\d+,t=\\d+,p=\\d+\\$.+\\$') + ELSE substr(password_hash, 0, 30) + END INTO salt +END +FROM account_password_hashes +WHERE _account_id = account_id; +RETURN salt; +END; +$$ LANGUAGE plpgsql +SECURITY DEFINER +SET search_path = :schema, public; + +CREATE OR REPLACE FUNCTION valid_password_hash(_account_id int8, hash text) RETURNS boolean AS $$ +DECLARE valid boolean; +BEGIN +SELECT password_hash = hash INTO valid +FROM account_password_hashes +WHERE _account_id = account_id; +RETURN valid; +END; +$$ LANGUAGE plpgsql +SECURITY DEFINER +SET search_path = :schema, public; + +REVOKE ALL ON account_password_hashes FROM :schema; +REVOKE ALL ON FUNCTION get_salt(int8) FROM :schema; +REVOKE ALL ON FUNCTION valid_password_hash(int8, text) FROM :schema; +GRANT INSERT, UPDATE, DELETE ON account_password_hashes TO :password_role; +GRANT SELECT(account_id) ON account_password_hashes TO :password_role; +GRANT EXECUTE ON FUNCTION get_salt(int8) TO :password_role; +GRANT EXECUTE ON FUNCTION valid_password_hash(int8, text) TO :password_role; + +SET ROLE teamdraft; diff --git a/src/db/database.sql b/src/db/database.sql new file mode 100644 index 0000000..91ca755 --- /dev/null +++ b/src/db/database.sql @@ -0,0 +1,64 @@ +\set rw_name :database_name '_rw' +\set ro_name :database_name '_ro' +\set password_role :database_name '_password' + +DROP SCHEMA IF EXISTS :schema CASCADE; +DROP DATABASE IF EXISTS :database_name; + +-- Uncomment for non-development environments +-- DROP ROLE IF EXISTS :owner; +-- CREATE ROLE :owner WITH LOGIN PASSWORD :'owner'; + +DROP ROLE IF EXISTS :password_role; +CREATE ROLE :password_role; +ALTER ROLE :password_role SET search_path TO :schema, pgsodium, public; + +GRANT ddl TO :owner; + +DROP ROLE IF EXISTS :rw_name; +CREATE ROLE :rw_name WITH LOGIN PASSWORD :'rw_name'; + +GRANT dml TO :rw_name; + +DROP ROLE IF EXISTS :ro_name; +CREATE ROLE :ro_name WITH LOGIN PASSWORD :'ro_name'; + +GRANT read_only TO :ro_name; + +ALTER ROLE :owner SET search_path TO :schema, public; +ALTER ROLE :rw_name SET search_path TO :schema, pgsodium, public; +ALTER ROLE :ro_name SET search_path TO :schema, public; + +CREATE DATABASE :database_name OWNER :owner; + +REVOKE CREATE ON SCHEMA public FROM PUBLIC; + +REVOKE ALL ON DATABASE :database_name FROM PUBLIC; + +GRANT CONNECT, TEMPORARY ON DATABASE :database_name TO ddl, dml, read_only; + +\c :database_name +CREATE EXTENSION pgsodium; +GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA pgsodium TO dml; + +SET ROLE :owner; + +CREATE SCHEMA :schema AUTHORIZATION :schema; + +GRANT USAGE, CREATE ON SCHEMA :schema TO ddl; + +GRANT USAGE ON SCHEMA :schema TO dml, read_only; + +REVOKE ALL ON DATABASE :database_name FROM PUBLIC; + +GRANT CONNECT, TEMPORARY ON DATABASE :database_name TO ddl, dml, read_only; + +ALTER DEFAULT PRIVILEGES IN SCHEMA :schema GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO dml; + +ALTER DEFAULT PRIVILEGES IN SCHEMA :schema GRANT SELECT ON TABLES TO read_only; + +ALTER DEFAULT PRIVILEGES IN SCHEMA :schema GRANT USAGE, SELECT ON SEQUENCES TO dml, read_only; + +ALTER DEFAULT PRIVILEGES IN SCHEMA :schema GRANT UPDATE ON SEQUENCES TO dml; + +ALTER DEFAULT PRIVILEGES IN SCHEMA :schema GRANT EXECUTE ON ROUTINES TO dml, read_only; diff --git a/src/db/drafts.sql b/src/db/drafts.sql new file mode 100644 index 0000000..4fff19f --- /dev/null +++ b/src/db/drafts.sql @@ -0,0 +1,385 @@ +CREATE TABLE draft_statuses( + draft_status_id smallint NOT NULL, + name text NOT NULL, + UNIQUE (name), + CONSTRAINT draft_statuses_pk PRIMARY KEY (draft_status_id) +); + +INSERT INTO draft_statuses(draft_status_id, name) + VALUES (1, 'Scheduled'); + +INSERT INTO draft_statuses(draft_status_id, name) + VALUES (2, 'Running'); + +INSERT INTO draft_statuses(draft_status_id, name) + VALUES (3, 'Complete'); + +INSERT INTO draft_statuses(draft_status_id, name) + VALUES (4, 'Manual'); + +CREATE TABLE drafts ( + draft_id bigint NOT NULL GENERATED ALWAYS AS IDENTITY, + season_id bigint NOT NULL, + draft_status_id smallint NOT NULL DEFAULT 1, + name text, + account_id bigint NOT NULL, + started_at timestamptz, + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + CONSTRAINT drafts_pk PRIMARY KEY (draft_id), + CONSTRAINT drafts_seasons_fk FOREIGN KEY (season_id) REFERENCES seasons(season_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT drafts_name_ck CHECK (char_length(name) <= 255), + CONSTRAINT drafts_accounts_fk FOREIGN KEY (account_id) REFERENCES accounts(account_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION +); + +CREATE TABLE draft_users ( + draft_user_id bigint NOT NULL GENERATED ALWAYS AS IDENTITY, + draft_id bigint NOT NULL, + name text NOT NULL, + account_id bigint, + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + CONSTRAINT draft_users_pk PRIMARY KEY (draft_user_id), + CONSTRAINT draft_users_drafts_fk FOREIGN KEY (draft_id) REFERENCES drafts(draft_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT draft_users_accounts_fk FOREIGN KEY (account_id) REFERENCES accounts(account_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION +); + +CREATE OR REPLACE FUNCTION gen_invite_code() + RETURNS varchar + LANGUAGE plpgsql +AS $$ +DECLARE + base10 bigint; + base36 varchar := ''; + intval bigint; + char0z char[] := regexp_split_to_array('0123456789abcdefghijklmnopqrstuvwxyz', ''); +BEGIN + SELECT floor(random() * 78364164095 + 2176782336) INTO base10; + intval := abs(base10); + WHILE intval != 0 LOOP + base36 := char0z[(intval % 36)+1] || base36; + intval := intval / 36; + END LOOP; + + IF base10 = 0 THEN base36 := '0'; END IF; + RETURN base36; +END; +$$; + +CREATE TABLE draft_user_invites ( + draft_user_invite_id bigint NOT NULL GENERATED ALWAYS AS IDENTITY, + draft_user_id bigint NOT NULL, + name text NOT NULL, + code text NOT NULL DEFAULT gen_invite_code(), + created_at timestamptz NOT NULL DEFAULT now(), + + CONSTRAINT draft_user_invites_pk PRIMARY KEY (draft_user_id), + CONSTRAINT draft_user_invites_draft_users_fk FOREIGN KEY (draft_user_id) REFERENCES draft_users(draft_user_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT draft_user_invites_name_ck CHECK (char_length(name) <= 255) +); + +CREATE TABLE picks ( + pick_id integer NOT NULL GENERATED ALWAYS AS IDENTITY, + draft_id integer NOT NULL, + draft_user_id integer NOT NULL, + team_id int NOT NULL, + auto boolean NOT NULL DEFAULT FALSE, + picked_at timestamptz NOT NULL DEFAULT now(), + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + UNIQUE (draft_id, team_id), + CONSTRAINT picks_pk PRIMARY KEY (pick_id), + CONSTRAINT picks_drafts_fk FOREIGN KEY (draft_id) REFERENCES drafts(draft_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT picks_draft_users_fk FOREIGN KEY (draft_user_id) REFERENCES draft_users(draft_user_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT picks_teams_fk FOREIGN KEY (team_id) REFERENCES teams(team_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION +); + +-- TODO: support more than 2 players by persisting draft order and current_picker +CREATE OR REPLACE FUNCTION current_picker_name(_draft_id bigint) + RETURNS text + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +BEGIN + RETURN (SELECT name FROM draft_users WHERE draft_user_id = current_picker_id(_draft_id)); +END; +$$; + +CREATE OR REPLACE FUNCTION current_picker_account_id(_draft_id bigint) + RETURNS bigint + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +DECLARE + pick_count smallint; + player_1 integer; + player_2 integer; +BEGIN + SELECT count(1) FROM picks WHERE draft_id = _draft_id INTO pick_count; + + SELECT account_id + FROM draft_users + WHERE draft_id = _draft_id + ORDER BY draft_user_id + LIMIT 1 INTO player_1; + + SELECT account_id + FROM draft_users + WHERE draft_id = _draft_id + ORDER BY draft_user_id DESC + LIMIT 1 INTO player_2; + + IF pick_count % 2 = 0 THEN + RETURN player_1; + ELSE + RETURN player_2; + END IF; +END; +$$; + +CREATE OR REPLACE FUNCTION current_picker_id(_draft_id bigint) + RETURNS integer + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +DECLARE + pick_count smallint; + player_1 integer; + player_2 integer; +BEGIN + SELECT count(1) FROM picks WHERE draft_id = _draft_id INTO pick_count; + + SELECT draft_user_id + FROM draft_users + WHERE draft_id = _draft_id + ORDER BY draft_user_id + LIMIT 1 INTO player_1; + + SELECT draft_user_id + FROM draft_users + WHERE draft_id = _draft_id + ORDER BY draft_user_id DESC + LIMIT 1 INTO player_2; + + IF pick_count % 2 = 0 THEN + RETURN player_1; + ELSE + RETURN player_2; + END IF; +END; +$$; + +CREATE OR REPLACE FUNCTION pick(_draft_id integer, _team_id integer) + RETURNS boolean + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +BEGIN + INSERT INTO picks (draft_id, league_user_id, team_id) VALUES (_draft_id, current_picker_id(_draft_id), _team_id); +END; +$$; + +-- create missing draft picks if any +CREATE OR REPLACE PROCEDURE auto_draft(_draft_id integer) +LANGUAGE plpgsql +SECURITY INVOKER +AS $$ +DECLARE + latest_pick_time timestamptz; + latest_pick picks%ROWTYPE; + pick_count integer; + next_auto_draft_team integer; + draft drafts%ROWTYPE; + draft_league_id integer; +BEGIN + SELECT * FROM drafts WHERE draft_id = _draft_id INTO draft; + SELECT league_id FROM seasons WHERE season_id = draft.season_id INTO draft_league_id; + + latest_pick_time = draft.started_at; + + IF now() >= latest_pick_time AND draft.draft_status_id = 1 THEN + UPDATE drafts SET draft_status_id = 2, modified_at = now() WHERE draft_id = _draft_id; + END IF; + + LOOP + SELECT count(1) FROM picks WHERE draft_id = _draft_id INTO pick_count; + + IF pick_count = (SELECT count(1) FROM teams JOIN league_divisions USING(league_division_id) WHERE league_id = draft_league_id) THEN + UPDATE drafts SET draft_status_id = 3, modified_at = now() WHERE draft_id = _draft_id; + RETURN; + END IF; + + SELECT * FROM picks WHERE draft_id = _draft_id ORDER BY picked_at DESC LIMIT 1 INTO latest_pick; + + IF latest_pick IS NOT NULL THEN + latest_pick_time = latest_pick.picked_at; + END IF; + + IF (date_part('epoch', now())::integer - date_part('epoch', latest_pick_time)::integer) / 30 >= 1 THEN + SELECT + current_rankings.team_id + FROM + current_rankings + LEFT JOIN picks ON picks.team_id = current_rankings.team_id + WHERE + pick_id IS NULL + AND league_id = draft_league_id + ORDER BY + rank ASC + LIMIT 1 INTO next_auto_draft_team; + + INSERT INTO picks(draft_id, draft_user_id, team_id, auto, picked_at) + VALUES (_draft_id, current_picker_id(_draft_id), + (SELECT + current_rankings.team_id + FROM + current_rankings + LEFT JOIN picks ON picks.team_id = current_rankings.team_id + AND picks.draft_id = _draft_id + WHERE + pick_id IS NULL + ORDER BY + rank ASC + LIMIT 1), + TRUE, + latest_pick_time + interval '30 seconds'); + ELSE + EXIT; + END IF; + END LOOP; +END; +$$; + +CREATE OR REPLACE VIEW draft_picks AS +SELECT + picks.draft_id, + teams.team_id, + teams.name AS team, + accounts.name AS player +FROM + picks + JOIN teams USING (team_id) + JOIN draft_users USING (draft_user_id) + JOIN accounts USING (account_id) +ORDER BY + picks.created_at; + +CREATE OR REPLACE VIEW draft_info AS +WITH latest_draft_pick AS ( + SELECT + draft_id, + MAX(picked_at) AS picked_at + FROM + picks + GROUP BY + draft_id +) +SELECT + draft_id, + current_picker_account_id(draft_id) as current_player_id, + current_picker_name(draft_id) as current_player_name, + draft_status_id, + CASE WHEN draft_status_id IN (1, 3) THEN + 0 + ELSE + date_part('epoch',(coalesce(latest_draft_pick.picked_at, started_at) + interval '30 seconds') - now())::integer + END AS round_time_remaining, + CASE WHEN draft_status_id = 1 THEN + 'The draft will start in ' || to_char(started_at - now(), 'MI:SS') + WHEN draft_status_id = 2 THEN + 'It''s ' || current_picker_name(draft_id) || '''s turn!' + WHEN draft_status_id = 3 THEN + 'The draft is over, good luck!' + END AS message, + draft_status_id = 2 AS can_pick +FROM + drafts + LEFT JOIN latest_draft_pick USING (draft_id); + +CREATE OR REPLACE FUNCTION accept_invite(_code text) + RETURNS table (session_id text, invite_draft_id bigint) + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +DECLARE + new_account_id bigint; + invite_draft_user_id bigint; + invite_draft_id bigint; + new_session_id text; +BEGIN + SELECT draft_user_id FROM draft_user_invites WHERE code = _code INTO invite_draft_user_id; + SELECT draft_id FROM draft_users WHERE draft_user_id = invite_draft_user_id INTO invite_draft_id; + + INSERT INTO accounts DEFAULT VALUES RETURNING account_id INTO new_account_id; + + UPDATE draft_users SET account_id = new_account_id WHERE draft_user_id = invite_draft_user_id; + SELECT create_session(new_account_id) INTO new_session_id; + + RETURN QUERY SELECT new_session_id, invite_draft_id; +END; +$$; + +CREATE OR REPLACE FUNCTION create_draft(league_id int, player_1 text, player_2 text, started_at timestamptz) + RETURNS table (session_id text, new_draft_id bigint) + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +DECLARE + new_account_id bigint; + new_draft_id bigint; + current_season_id int := current_season_id(league_id); + new_session_id text; + player_2_id bigint; +BEGIN + INSERT INTO accounts DEFAULT VALUES RETURNING account_id INTO new_account_id; + SELECT create_session(new_account_id) INTO new_session_id; + + IF started_at IS NULL THEN + INSERT INTO drafts (season_id, account_id, draft_status_id) VALUES (current_season_id, new_account_id, 4) RETURNING draft_id INTO new_draft_id; + ELSE + INSERT INTO drafts (season_id, account_id, started_at) VALUES (current_season_id, new_account_id, started_at) RETURNING draft_id INTO new_draft_id; + END IF; + + INSERT INTO draft_users (draft_id, name, account_id) VALUES (new_draft_id, player_1, new_account_id); + INSERT INTO draft_users (draft_id, name) VALUES (new_draft_id, CASE WHEN player_2 = '' THEN 'Player 2' ELSE player_2 END) RETURNING draft_user_id INTO player_2_id; + INSERT INTO draft_user_invites (draft_user_id, name) VALUES (player_2_id, player_2); + + RETURN QUERY SELECT new_session_id, new_draft_id; +END; +$$; + +CREATE OR REPLACE FUNCTION current_draft_picks(_draft_id int) + RETURNS TABLE (team_id int, + rank smallint, + name text, + picked boolean, + pick_user text) + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +DECLARE + draft_season_id integer; +BEGIN + SELECT season_id FROM drafts WHERE draft_id = _draft_id INTO draft_season_id; +RETURN QUERY + WITH season_rankings AS ( + SELECT + teams.team_id, + rankings.rank, + teams.name + FROM + teams + LEFT JOIN rankings ON rankings.team_id = teams.team_id + AND rankings.season_id = draft_season_id + ORDER BY + rank + ) + SELECT r.*, draft_users.name IS NOT NULL AS picked, draft_users.name AS pick_user + FROM season_rankings r + LEFT JOIN picks ON r.team_id = picks.team_id AND picks.draft_id = _draft_id + LEFT JOIN draft_users ON draft_users.draft_user_id = picks.draft_user_id; +END; +$$; diff --git a/src/db/init.sql b/src/db/init.sql new file mode 100644 index 0000000..8d2645f --- /dev/null +++ b/src/db/init.sql @@ -0,0 +1,12 @@ +\set database_name teamdraft +\set schema :database_name +\set owner :database_name + +\i ./src/db/server.sql +\i ./src/db/database.sql +\i ./src/db/auth.sql +\i ./src/db/leagues.sql +\i ./src/db/teams.sql +\i ./src/db/seasons.sql +\i ./src/db/drafts.sql +\i ./src/db/scores.sql diff --git a/src/db/leagues.sql b/src/db/leagues.sql new file mode 100644 index 0000000..9407de8 --- /dev/null +++ b/src/db/leagues.sql @@ -0,0 +1,29 @@ +CREATE TABLE leagues ( + league_id integer NOT NULL GENERATED ALWAYS AS IDENTITY, + name text, + + CONSTRAINT leagues_pk PRIMARY KEY (league_id), + CONSTRAINT leagues_name_ck CHECK (char_length(name) <= 64) +); + +CREATE TABLE league_divisions ( + league_division_id integer NOT NULL GENERATED ALWAYS AS IDENTITY, + league_id bigint NOT NULL, + name text, + + CONSTRAINT league_divisions_pk PRIMARY KEY (league_division_id), + CONSTRAINT league_divisions_leagues_fk FOREIGN KEY (league_id) REFERENCES leagues(league_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT league_divisions_name_ck CHECK (char_length(name) <= 64) +); + +INSERT INTO leagues (name) VALUES ('NFL') RETURNING league_id nfl_league_id \gset + +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'AFC East'); +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'AFC North'); +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'AFC South'); +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'AFC West'); + +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'NFC East'); +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'NFC North'); +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'NFC South'); +INSERT INTO league_divisions (league_id, name) VALUES (:nfl_league_id, 'NFC West'); diff --git a/src/db/schema.sql b/src/db/schema.sql new file mode 100644 index 0000000..ad01727 --- /dev/null +++ b/src/db/schema.sql @@ -0,0 +1,5 @@ +\i ./src/db/auth.sql +\i ./src/db/leagues.sql +\i ./src/db/teams.sql +\i ./src/db/seasons.sql +\i ./src/db/drafts.sql diff --git a/src/db/scores.sql b/src/db/scores.sql new file mode 100644 index 0000000..8572a66 --- /dev/null +++ b/src/db/scores.sql @@ -0,0 +1,106 @@ +CREATE TABLE score_categories ( + score_category_id smallint NOT NULL, + league_id int NOT NULL, + name text NOT NULL, + value int NOT NULL, + + UNIQUE (league_id, name), + CONSTRAINT score_categories_pk PRIMARY KEY (score_category_id) +); +SELECT league_id AS nfl_league_id FROM leagues where name = 'NFL' LIMIT 1 \gset + +INSERT INTO score_categories (score_category_id, league_id, name, value) VALUES (1, :nfl_league_id, 'Win', 1); +INSERT INTO score_categories (score_category_id, league_id, name, value) VALUES (2, :nfl_league_id, 'Playoffs', 5); +INSERT INTO score_categories (score_category_id, league_id, name, value) VALUES (3, :nfl_league_id, 'Divisional', 5); +INSERT INTO score_categories (score_category_id, league_id, name, value) VALUES (4, :nfl_league_id, 'Conference', 10); +INSERT INTO score_categories (score_category_id, league_id, name, value) VALUES (5, :nfl_league_id, 'Superbowl', 10); +INSERT INTO score_categories (score_category_id, league_id, name, value) VALUES (6, :nfl_league_id, 'Champion', 5); + +CREATE TABLE scores ( + score_id bigint NOT NULL GENERATED ALWAYS AS IDENTITY, + season_id bigint NOT NULL, + team_id integer NOT NULL, + game int NOT NULL, + score_category_id smallint NOT NULL DEFAULT 1, + scored_at timestamptz NOT NULL DEFAULT now(), + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + UNIQUE (season_id, team_id, game, score_category_id), + CONSTRAINT scores_pk PRIMARY KEY (score_id), + CONSTRAINT scores_seasons_fk FOREIGN KEY (season_id) REFERENCES seasons(season_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT scores_teams_fk FOREIGN KEY (team_id) REFERENCES teams(team_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT scores_score_categories_fk FOREIGN KEY (score_category_id) REFERENCES score_categories(score_category_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION +); + +CREATE OR REPLACE FUNCTION record_score(team_id int, game int, season_id int, score_category_id int, scored_at timestamptz) + RETURNS int + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +DECLARE + new_score_id integer; +BEGIN + INSERT INTO scores(season_id, team_id, game, score_category_id, scored_at) + VALUES (season_id, team_id, game, score_category_id, scored_at) + ON CONFLICT ON CONSTRAINT scores_season_id_team_id_game_score_category_id_key + DO UPDATE SET modified_at = NOW(), scored_at = EXCLUDED.scored_at + RETURNING + score_id INTO new_score_id; + RETURN new_score_id; +END; +$$; + +CREATE OR REPLACE VIEW season_scores AS +WITH score_totals AS ( + SELECT + scores.season_id, + scores.team_id, + sc.name, + sum(value) AS total + FROM + scores + JOIN score_categories sc USING(score_category_id) + GROUP BY + scores.season_id, + scores.team_id, + sc.name +) +SELECT + st.season_id, + teams.name, + array_agg(st.name || ': ' || st.total ORDER BY st.total DESC) AS scores +FROM + teams + LEFT JOIN score_totals st USING(team_id) +GROUP BY + st.season_id, teams.name; + +-- CREATE OR REPLACE VIEW draft_scores AS +-- WITH season_scores AS ( +-- SELECT +-- season_id, +-- team_id, +-- coalesce(SUM(value), 0)::smallint AS total +-- FROM +-- scores +-- JOIN score_categories ON category_id = score_categories.score_category_id +-- GROUP BY +-- season_id, +-- team_id +-- ) +-- SELECT +-- season_id, +-- draft_id, +-- team, +-- player, +-- coalesce(win, 0) AS win, +-- playoffs, +-- divisional, +-- conference, +-- superbowl, +-- champion, +-- coalesce(total, 0) AS total +-- FROM +-- draft_picks +-- LEFT OUTER JOIN season_scores USING (team_id); diff --git a/src/db/seasons.sql b/src/db/seasons.sql new file mode 100644 index 0000000..ce492f0 --- /dev/null +++ b/src/db/seasons.sql @@ -0,0 +1,69 @@ +CREATE TABLE seasons ( + season_id bigint NOT NULL GENERATED ALWAYS AS IDENTITY, + league_id integer NOT NULL, + started_at timestamptz NOT NULL, + ended_at timestamptz NOT NULL, + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + CONSTRAINT seasons_pk PRIMARY KEY (season_id), + CONSTRAINT seasons_leagues_fk FOREIGN KEY (league_id) REFERENCES leagues(league_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION +); + +CREATE TABLE rankings( + ranking_id integer NOT NULL GENERATED ALWAYS AS IDENTITY, + season_id integer NOT NULL, + team_id integer NOT NULL, + rank smallint NOT NULL, + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + UNIQUE(season_id, team_id, rank), + CONSTRAINT rankings_pk PRIMARY KEY (ranking_id), + CONSTRAINT rankings_seasons_fk FOREIGN KEY (season_id) REFERENCES seasons(season_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION +); + +CREATE OR REPLACE VIEW current_rankings AS +SELECT teams.team_id, rankings.season_id, seasons.league_id, rank, name +FROM teams +JOIN rankings ON rankings.team_id = teams.team_id +JOIN seasons ON rankings.season_id = seasons.season_id +ORDER BY rank; + +CREATE OR REPLACE FUNCTION current_season_id(_league_id int) + RETURNS int + AS $$ + SELECT + season_id + FROM + seasons + WHERE + started_at <= now() + AND ended_at > now() + AND league_id = _league_id + ORDER BY started_at DESC + LIMIT 1; +$$ +LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION create_season(_league_id int, started_at timestamptz, ended_at timestamptz) + RETURNS int + LANGUAGE plpgsql + SECURITY INVOKER + AS $$ +DECLARE + new_season_id int; + team record; + rank smallint := 1; +BEGIN + INSERT INTO seasons (league_id, started_at, ended_at) VALUES (_league_id, started_at, ended_at) RETURNING season_id INTO new_season_id; + + FOR team in SELECT team_id FROM teams JOIN league_divisions USING (league_division_id) WHERE league_id = _league_id + LOOP + INSERT INTO rankings (season_id, team_id, rank) VALUES (new_season_id, team.team_id, rank); + rank := rank + 1; + END LOOP; + + RETURN new_season_id; +END; +$$; diff --git a/src/db/seed.sql b/src/db/seed.sql new file mode 100644 index 0000000..1f4f9de --- /dev/null +++ b/src/db/seed.sql @@ -0,0 +1,4 @@ +\c teamdraft +SET ROLE teamdraft_rw; + +select create_season(1, now(), now() + interval '1 day'); diff --git a/src/db/server.sql b/src/db/server.sql new file mode 100644 index 0000000..66410a9 --- /dev/null +++ b/src/db/server.sql @@ -0,0 +1,13 @@ +-- this should only be needed per postgresql instance + +-- can modify the database schema +DROP ROLE IF EXISTS ddl; +CREATE ROLE ddl WITH NOLOGIN; + +-- can modify database data +DROP ROLE IF EXISTS dml; +CREATE ROLE dml WITH NOLOGIN; + +-- can only read database data +DROP ROLE IF EXISTS read_only; +CREATE ROLE read_only WITH NOLOGIN; diff --git a/src/db/teams.sql b/src/db/teams.sql new file mode 100644 index 0000000..5f08434 --- /dev/null +++ b/src/db/teams.sql @@ -0,0 +1,52 @@ +CREATE TABLE teams( + team_id int NOT NULL GENERATED ALWAYS AS IDENTITY, + league_division_id int NOT NULL, + name text NOT NULL, + external_id text, + created_at timestamptz NOT NULL DEFAULT now(), + modified_at timestamptz NOT NULL DEFAULT now(), + + CONSTRAINT teams_pk PRIMARY KEY (team_id), + CONSTRAINT teams_league_divisions_fk FOREIGN KEY (league_division_id) REFERENCES league_divisions(league_division_id) MATCH SIMPLE ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT teams_name_ck CHECK (char_length(name) <= 255) +); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Buffalo', 1, '134918'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Miami', 1, '134919'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('New England', 1, '134920'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('NYJ', 1, '134921'); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Baltimore', 2, '134922'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Cincinnati', 2, '134923'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Cleveland', 2, '134924'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Pittsburgh', 2, '134925'); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Houston', 3, '134926'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Indianapolis', 3, '134927'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Jacksonville', 3, '134928'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Tennessee', 3, '134929'); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Denver', 4, '134930'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Kansas City', 4, '134931'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Las Vegas', 4, '134932'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('LAC', 4, '135908'); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Dallas', 5, '134934'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('NYG', 5, '134935'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Philadelphia', 5, '134936'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Washington', 5, '134937'); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Chicago', 6, '134938'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Detroit', 6, '134939'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Green Bay', 6, '134940'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Minnesota', 6, '134941'); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Atlanta', 7, '134942'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Carolina', 7, '134943'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('New Orleans', 7, '134944'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Tampa Bay', 7, '134945'); + +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Arizona', 8, '134946'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('LAR', 8, '135907'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('San Francisco', 8, '134948'); +INSERT INTO teams(name, league_division_id, external_id) VALUES ('Seattle', 8, '134949'); diff --git a/src/main.zig b/src/main.zig new file mode 100644 index 0000000..063c191 --- /dev/null +++ b/src/main.zig @@ -0,0 +1,26 @@ +pub fn main() !void { + var gpa = std.heap.GeneralPurposeAllocator(.{}){}; + const allocator = gpa.allocator(); + + // initialize a logging pool + try logz.setup(allocator, .{ + .level = .Info, + .pool_size = 100, + .buffer_size = 4096, + .large_buffer_count = 8, + .large_buffer_size = 16384, + .output = .stdout, + .encoding = .logfmt, + }); + + defer logz.deinit(); + + var app = try App.init(allocator); + defer app.deinit(); + + try @import("web/web.zig").start(&app); +} + +const logz = @import("logz"); +const std = @import("std"); +const App = @import("App.zig"); diff --git a/src/root.zig b/src/root.zig new file mode 100644 index 0000000..ecfeade --- /dev/null +++ b/src/root.zig @@ -0,0 +1,10 @@ +const std = @import("std"); +const testing = std.testing; + +export fn add(a: i32, b: i32) i32 { + return a + b; +} + +test "basic add functionality" { + try testing.expect(add(3, 7) == 10); +} diff --git a/src/scorecorder.zig b/src/scorecorder.zig new file mode 100644 index 0000000..9d83d1f --- /dev/null +++ b/src/scorecorder.zig @@ -0,0 +1,174 @@ +const Round = struct { + events: []Event, +}; + +const Event = struct { + strEvent: []u8, + idHomeTeam: []u8, + strHomeTeam: []u8, + idAwayTeam: []u8, + strAwayTeam: []u8, + intHomeScore: ?[]u8 = null, + intAwayScore: ?[]u8 = null, + strStatus: []u8, + dateEvent: []u8, + strTimestamp: []u8, +}; + +const Config = struct { + round: i16, + season: i16, + all_rounds: bool = false, +}; + +var app: App = undefined; +var config: Config = undefined; +var client: zul.http.Client = undefined; + +pub fn main() !void { + var gpa = std.heap.GeneralPurposeAllocator(.{}){}; + const allocator = gpa.allocator(); + + // initialize a logging pool + try logz.setup(allocator, .{ + .level = .Info, + .pool_size = 100, + .buffer_size = 4096, + .large_buffer_count = 8, + .large_buffer_size = 16384, + .output = .stdout, + .encoding = .logfmt, + }); + defer logz.deinit(); + + app = try App.init(allocator); + defer app.deinit(); + + var args = try zul.CommandLineArgs.parse(allocator); + defer args.deinit(); + + client = zul.http.Client.init(allocator); + defer client.deinit(); + + config = .{ + .round = try weekToRound(if (args.contains("round")) try std.fmt.parseInt(i16, args.get("round").?, 10) else currentRound()), + .season = if (args.contains("season")) try std.fmt.parseInt(i16, args.get("season").?, 10) else currentSeason(), + .all_rounds = args.contains("all"), + }; + logz.info() + .int("round", config.round) + .int("season", config.season) + .boolean("all_rounds", config.all_rounds) + .log(); + + if (config.all_rounds) { + for (1..23) |round| { + try recordRound(allocator, @intCast(round)); + std.posix.nanosleep(1, 0); + } + } else { + try recordRound(allocator, config.round); + } +} + +fn recordRound(allocator: std.mem.Allocator, round: i16) !void { + var req = try client.request("https://www.thesportsdb.com/api/v1/json/3/eventsround.php"); + defer req.deinit(); + // id is the league id, with 4391 == NFL + try req.query("id", "4391"); + // the round - which corresponds to the week or playoff id + try req.query("r", try std.fmt.allocPrint(allocator, "{d}", .{round})); + // the season year + try req.query("s", try std.fmt.allocPrint(allocator, "{d}", .{config.season})); + + const res = try req.getResponse(.{}); + if (res.status != 200) { + // TODO: handle error + return; + } + var managed = try res.json(Round, allocator, .{ .ignore_unknown_fields = true }); + defer managed.deinit(); + const parsed_round = managed.value; + for (parsed_round.events) |event| { + logz.info() + .fmt("event", "{s} {s} @ {s} {s}", .{ event.strAwayTeam, event.intAwayScore orelse "0", event.strHomeTeam, event.intHomeScore orelse "0" }) + .string("date", event.strTimestamp) + .string("status", event.strStatus) + .log(); + + if (!std.mem.eql(u8, "", event.strStatus) and !std.mem.eql(u8, "FT", event.strStatus) and !std.mem.eql(u8, "AOT", event.strStatus) and !std.mem.eql(u8, "Match Finished", event.strStatus)) { + logz.info().fmt("event", "{s} in progress or not started.", .{event.strEvent}).log(); + continue; + } + + const home_score = try std.fmt.parseInt(usize, event.intHomeScore orelse "0", 10); + const away_score = try std.fmt.parseInt(usize, event.intAwayScore orelse "0", 10); + + const winner_id: []u8 = if (home_score > away_score) event.idHomeTeam else event.idAwayTeam; + + logz.info().string("winner", winner_id).log(); + try insertScore(winner_id, "win", event.strTimestamp); + + if (config.round == 160 or config.round == 125) { + logz.info().string("playoffs", "Home Team made it to the playoffs").log(); + try insertScore(event.idHomeTeam, "playoffs", event.strTimestamp); + logz.info().string("playoffs", "Away Team made it to the playoffs").log(); + try insertScore(event.idAwayTeam, "playoffs", event.strTimestamp); + } + + const category = switch (config.round) { + 160 => "divisional", + 125 => "conference", + 150 => "superbowl", + 200 => "champion", + else => null, + }; + + if (category) |c| { + logz.info().fmt("category", "Inserting {s} score for winner", .{c}).log(); + try insertScore(winner_id, c, event.strTimestamp); + } + } +} + +pub fn insertScore(external_id: []u8, category: []const u8, timestamp: []const u8) !void { + _ = app.pool.exec("SELECT record_external_score_by_year($1, $2, $3, $4, $5)", .{ external_id, config.season, config.round, @constCast(category), timestamp }) catch @panic("failed to record score"); +} + +fn currentRound() i16 { + var current_week = (app.pool.row("SELECT current_week()", .{}) catch @panic("can't fetch current week")) orelse unreachable; + defer current_week.deinit() catch {}; + return current_week.get(i16, 0); +} + +fn currentSeason() i16 { + const season_query = + \\SELECT date_part('year', started_at)::smallint + \\FROM seasons WHERE season_id = current_season_id(); + ; + var current_season = (app.pool.row(season_query, .{}) catch @panic("can't fetch current season")) orelse unreachable; + defer current_season.deinit() catch {}; + return current_season.get(i16, 0); +} + +fn weekToRound(week: i16) !i16 { + return switch (week) { + 1...18 => |w| w, + 19 => 160, + 20 => 125, + 21 => 150, + 23 => 200, + else => return error.UnknownRound, + }; +} + +test "weekToRound" { + try std.testing.expect(try weekToRound(1) == 1); + try std.testing.expectError(error.UnknownRound, weekToRound(55)); +} + +const App = @import("App.zig"); + +const logz = @import("logz"); +const std = @import("std"); +const zul = @import("zul"); diff --git a/src/templates/404.zmpl b/src/templates/404.zmpl new file mode 100644 index 0000000..03332b4 --- /dev/null +++ b/src/templates/404.zmpl @@ -0,0 +1 @@ +<h1>Not Found</h1> diff --git a/src/templates/_nav.zmpl b/src/templates/_nav.zmpl new file mode 100644 index 0000000..37ea090 --- /dev/null +++ b/src/templates/_nav.zmpl @@ -0,0 +1,8 @@ +<nav> +<ul> + <li><a href="/" class="logo">team<span>draft</span></a></li> +</ul> +<ul> + <li><a href="/about">about</a></li> +</ul> +</nav> diff --git a/src/templates/about.zmpl b/src/templates/about.zmpl new file mode 100644 index 0000000..9d13f89 --- /dev/null +++ b/src/templates/about.zmpl @@ -0,0 +1,37 @@ +<p>Team Draft is a 2-person sports team fantasy scoring league, where you draft sports teams and score points throughout the season based on wins and playoff appearances. The idea was taken from Mina Kimes's yearly NFL Team Draft with Mike Golic, Jr. on the <a href="https://www.espn.com/radio/play/_/id/32078500">Mina Kimes Show podcast</a>.</p> +<h2><a id="scoring">Scoring</a></h2> +<h3>NFL</h3> +<table class="striped"> + <thead> + <tr> + <th scope="col">When your team:</th> + <th scope="col">You score:</th> + </tr> + </thead> + <tbody> + <tr> + <td>Wins</th> + <td>1</td> + </tr> + <tr> + <td>Makes the playoffs</th> + <td>5</td> + </tr> + <tr> + <td>Makes the divisional round</th> + <td>5</td> + </tr> + <tr> + <td>Makes the conference championship game</th> + <td>10</td> + </tr> + <tr> + <td>Wins the Super Bowl</th> + <td>5</td> + </tr> + </tbody> +</table> +<h2>Who made this?</h2> +<p>This was made by <a href="https://sadbeast.com">me</a>, and it's <a href="https://git.sadbeast.com/teamdraft">open source</a>.</p> +<p>If you have any issues, reach out at <a href="mailto:sadbeast@sadbeast.com">sadbeast@sadbeast.com</a>.</p> + diff --git a/src/templates/body.zmpl b/src/templates/body.zmpl new file mode 100644 index 0000000..e3f8387 --- /dev/null +++ b/src/templates/body.zmpl @@ -0,0 +1,6 @@ +<main class="container"> + @partial nav +<div> + {{zmpl.content}} +</div> +</main> diff --git a/src/templates/draft.zmpl b/src/templates/draft.zmpl new file mode 100644 index 0000000..8957b29 --- /dev/null +++ b/src/templates/draft.zmpl @@ -0,0 +1,60 @@ +@zig { +const can_pick = data.getPresence("can_pick"); + if (!data.getPresence("running")) { + <div id="qrcode"></div> + @html HTML + <script> + const qrcode = new QRCode(document.getElementById('qrcode'), { + text: 'https://teamdraft.net/invite/{{.code}}', + width: 128, + height: 128, + colorDark : '#000', + colorLight : '#fff', + correctLevel : QRCode.CorrectLevel.H + }); + </script> + HTML + <p><pre>https://teamdraft.net/invite/{{.code}}</pre></p> + } +<div hx-select="#draft" id="draft" hx-get="/drafts/{{.draft_id}}" hx-trigger="every 1s"> +<div class="sticky"> + if (data.getPresence("running")) { + <br/>Round Time Remaining: {{.round_time_remaining}} + } + <br/>{{.message}} +</div> + if (zmpl.get("teams")) |teams| { + <div> + <table class="striped"> + <thead> + <tr> + <th>Team</th> + <th colspan="2">Rank</th> + </tr> + </thead> + <tbody> + for (teams.items(.array)) |team| { + const id = team.get("id"); + const rank = team.get("rank"); + const name = team.get("name"); + const picked: bool = team.getT(.boolean, "picked") orelse true; + const pick_user = team.get("pick_user"); + <tr> + <td>{{name}}</td> + <td>{{rank}}</td> + <td> + {{pick_user}} + if (can_pick and !picked) { + <form method="post" action="/drafts/{{.draft_id}}/pick?team_id={{id}}"> + <input type="submit" value="Pick" style="" /> + </form> + } + </td> + </tr> + } + </tbody> + </table> + </div> + } +} +</div> diff --git a/src/templates/index.zmpl b/src/templates/index.zmpl new file mode 100644 index 0000000..78127d3 --- /dev/null +++ b/src/templates/index.zmpl @@ -0,0 +1,39 @@ +<div class="grid"> + <div> + <p>Welcome! Team Draft is a 2-person fantasy sports league, where you draft entire sports teams - not players - and score points throughout the season based on wins and playoff appearances. The idea was taken from the <a href="https://www.espn.com/radio/play/_/id/32078500">Mina Kimes Show's yearly NFL Team Draft</a>.</p> + <ul> + <li>Create a live draft to pick your teams!</li> + <li>Manage players' teams manually to support trades</li> + <li>Keeps track of <a hx-boost="false" href="/leagues/2?season=2023">game scores</a> automatically</li> + </ul> + <p>Create a league now, and schedule a live draft!</p> + </div> + <div> + <article> + <header>2024 NFL Season</header> + <form method="post" action="/drafts"> + <fieldset class="grid"> + <label> + Your name + <input name="player1" placeholder="Kim" autocomplete="given-name" required /> + </label> + <label> + Opponent's name + <input name="player2" placeholder="Pat" /> + </label> + </fieldset> + <fieldset class="grid"> + @zig { + <label> + <input id="draft" type="checkbox" name="draft" role="switch" hx-on:change="document.getElementById('draft-label').style.visibility = document.getElementById('draft').checked ? 'inherit' : 'hidden'" checked/> Schedule a draft + </label> + <label id="draft-label"> + <input type="datetime-local" name="draft_time" aria-label="Draft start time" value="{{.draft_time}}" /> + </label> + } + </fieldset> + <input type="submit" value="Create league" /> + </form> + </article> + </div> +</div> diff --git a/src/templates/layout.zmpl b/src/templates/layout.zmpl new file mode 100644 index 0000000..75cda7e --- /dev/null +++ b/src/templates/layout.zmpl @@ -0,0 +1,69 @@ +<!doctype html> +<html lang="en"> + <head> + <meta charset="utf-8"> + <meta name="viewport" content="width=device-width, initial-scale=1"> + <meta name="color-scheme" content="light dark" /> + <meta name="htmx-config" content='{"refreshOnHistoryMiss":true}'> + <link rel="preconnect" href="https://fonts.googleapis.com"> + <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> + <link href="https://fonts.googleapis.com/css2?family=Atkinson+Hyperlegible:ital,wght@0,400;0,700;1,400;1,700&family=Teko:wght@300..700&display=swap" rel="stylesheet"> + + <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.red.min.css" /> + <script src="https://unpkg.com/htmx.org@2.0.2" integrity="sha384-Y7hw+L/jvKeWIRRkqWYfPcvVxHzVzn5REgzbawhxAuQGwX1XWe70vji+VSeHOThJ" crossorigin="anonymous"></script> + <script src="https://cdn.jsdelivr.net/gh/davidshimjs/qrcodejs/qrcode.min.js"></script> + <title>Team Draft</title> +<style> +:root { + --pico-font-family: "Atkinson Hyperlegible", sans-serif; + } +.logo { +font-family: "Teko", sans-serif; +font-optical-sizing: auto; +font-weight: 300; +font-style: normal; +font-size: 40px; +line-height: 1em; +color: inherit; +} + +.logo:hover { +color: inherit; +text-decoration: none; +} + +.logo span { +color: indianred; +} +body { + --pico-font-family: "Atkinson Hyperlegible", sans-serif; + --pico-font-weight: 400; +font-style: normal; +} +.sticky, th { + position: -webkit-sticky; /* Safari */ + position: sticky; +} +th { + top: 1.5em; +} +.sticky { + top: 0; + background-color: var(--pico-background-color); + color: var(--pico-color); +} +</style> + </head> + <body hx-boost="true"> + <main class="container"> + @partial nav + <div> + {{zmpl.content}} + </div> + </main> + <script> + htmx.config.refreshOnHistoryMiss = true; + </script> + </body> +</html> + diff --git a/src/web/Cookie.zig b/src/web/Cookie.zig new file mode 100644 index 0000000..c44bbd2 --- /dev/null +++ b/src/web/Cookie.zig @@ -0,0 +1,418 @@ +const std = @import("std"); + +allocator: std.mem.Allocator, +cookie_string: []const u8, +cookies: std.StringArrayHashMap(*Cookie), +modified: bool = false, +arena: std.heap.ArenaAllocator, + +const Self = @This(); + +const SameSite = enum { strict, lax, none }; +pub const CookieOptions = struct { + domain: []const u8 = "localhost", + path: []const u8 = "/", + same_site: ?SameSite = null, + secure: bool = false, + expires: ?i64 = null, // if used, set to time in seconds to be added to std.time.timestamp() + http_only: bool = false, + max_age: ?i64 = null, + partitioned: bool = false, +}; + +pub const Cookie = struct { + name: []const u8, + value: []const u8, + domain: ?[]const u8 = null, + path: ?[]const u8 = null, + same_site: ?SameSite = null, + secure: ?bool = null, + expires: ?i64 = null, // if used, set to time in seconds to be added to std.time.timestamp() + http_only: ?bool = null, + max_age: ?i64 = null, + partitioned: ?bool = null, + + /// Build a cookie string. + pub fn bufPrint(self: Cookie, buf: *[4096]u8) ![]const u8 { + var options: CookieOptions = .{}; + inline for (std.meta.fields(CookieOptions)) |field| { + @field(options, field.name) = @field(self, field.name) orelse @field(options, field.name); + } + + // secure is required if samesite is set to none + const require_secure = if (options.same_site) |same_site| same_site == .none else false; + + var stream = std.io.fixedBufferStream(buf); + const writer = stream.writer(); + + try writer.print("{s}={s}; path={s}; domain={s};", .{ + self.name, + self.value, + options.path, + options.domain, + }); + + if (options.same_site) |same_site| try writer.print(" SameSite={s};", .{@tagName(same_site)}); + if (options.secure or require_secure) try writer.writeAll(" Secure;"); + if (options.expires) |expires| try writer.print(" Expires={d};", .{std.time.timestamp() + expires}); + if (options.max_age) |max_age| try writer.print(" Max-Age={d};", .{max_age}); + if (options.http_only) try writer.writeAll(" HttpOnly;"); + if (options.partitioned) try writer.writeAll(" Partitioned;"); + + return stream.getWritten(); + } + + pub fn applyFlag(self: *Cookie, allocator: std.mem.Allocator, flag: Flag) !void { + switch (flag) { + .domain => |domain| self.domain = try allocator.dupe(u8, domain), + .path => |path| self.path = try allocator.dupe(u8, path), + .same_site => |same_site| self.same_site = same_site, + .secure => |secure| self.secure = secure, + .expires => |expires| self.expires = expires, + .http_only => |http_only| self.http_only = http_only, + .max_age => |max_age| self.max_age = max_age, + .partitioned => |partitioned| self.partitioned = partitioned, + } + } +}; + +pub fn init(allocator: std.mem.Allocator, cookie_string: []const u8) Self { + return .{ + .allocator = allocator, + .cookie_string = cookie_string, + .cookies = std.StringArrayHashMap(*Cookie).init(allocator), + .arena = std.heap.ArenaAllocator.init(allocator), + }; +} + +pub fn deinit(self: *Self) void { + var it = self.cookies.iterator(); + while (it.next()) |item| { + self.allocator.free(item.key_ptr.*); + self.allocator.free(item.value_ptr.*.value); + self.allocator.destroy(item.value_ptr.*); + } + self.cookies.deinit(); + self.arena.deinit(); +} + +pub fn get(self: *Self, key: []const u8) ?*Cookie { + return self.cookies.get(key); +} + +pub fn put(self: *Self, cookie: Cookie) !void { + self.modified = true; + + if (self.cookies.fetchSwapRemove(cookie.name)) |entry| { + self.allocator.free(entry.key); + self.allocator.free(entry.value.value); + self.allocator.destroy(entry.value); + } + const key = try self.allocator.dupe(u8, cookie.name); + const ptr = try self.allocator.create(Cookie); + ptr.* = cookie; + ptr.name = key; + ptr.value = try self.allocator.dupe(u8, cookie.value); + try self.cookies.put(key, ptr); +} + +pub const HeaderIterator = struct { + allocator: std.mem.Allocator, + cookies_iterator: std.StringArrayHashMap(*Cookie).Iterator, + buf: *[4096]u8, + + pub fn init(allocator: std.mem.Allocator, cookies: *Self, buf: *[4096]u8) HeaderIterator { + return .{ .allocator = allocator, .cookies_iterator = cookies.cookies.iterator(), .buf = buf }; + } + + pub fn next(self: *HeaderIterator) !?[]const u8 { + if (self.cookies_iterator.next()) |entry| { + const cookie = entry.value_ptr.*; + return try cookie.bufPrint(self.buf); + } else { + return null; + } + } +}; + +pub fn headerIterator(self: *Self, buf: *[4096]u8) HeaderIterator { + return HeaderIterator.init(self.allocator, self, buf); +} + +// https://datatracker.ietf.org/doc/html/rfc6265#section-4.2.1 +// cookie-header = "Cookie:" OWS cookie-string OWS +// cookie-string = cookie-pair *( ";" SP cookie-pair ) +pub fn parse(self: *Self) !void { + var key_buf = std.ArrayList(u8).init(self.allocator); + var value_buf = std.ArrayList(u8).init(self.allocator); + var key_terminated = false; + var value_started = false; + var cookie_buf = std.ArrayList(Cookie).init(self.allocator); + + defer key_buf.deinit(); + defer value_buf.deinit(); + defer cookie_buf.deinit(); + defer self.modified = false; + + for (self.cookie_string, 0..) |char, index| { + if (char == '=') { + key_terminated = true; + continue; + } + + if (char == ';' or index == self.cookie_string.len - 1) { + if (char != ';') try value_buf.append(char); + if (parseFlag(key_buf.items, value_buf.items)) |flag| { + for (cookie_buf.items) |*cookie| try cookie.applyFlag(self.arena.allocator(), flag); + } else { + try cookie_buf.append(.{ + .name = try self.arena.allocator().dupe(u8, key_buf.items), + .value = try self.arena.allocator().dupe(u8, value_buf.items), + }); + } + key_buf.clearAndFree(); + value_buf.clearAndFree(); + value_started = false; + key_terminated = false; + continue; + } + + if (!key_terminated and char == ' ') continue; + + if (!key_terminated) { + try key_buf.append(char); + continue; + } + + if (char == ' ' and !value_started) continue; + if (char != ' ' and !value_started) value_started = true; + + if (key_terminated and value_started) { + try value_buf.append(char); + continue; + } + + return error.JetzigInvalidCookieHeader; + } + + for (cookie_buf.items) |cookie| try self.put(cookie); +} + +const Flag = union(enum) { + domain: []const u8, + path: []const u8, + same_site: SameSite, + secure: bool, + expires: i64, + max_age: i64, + http_only: bool, + partitioned: bool, +}; + +fn parseFlag(key: []const u8, value: []const u8) ?Flag { + if (key.len > 64) return null; + if (value.len > 64) return null; + + var key_buf: [64]u8 = undefined; + + const normalized_key = std.ascii.lowerString(&key_buf, strip(key)); + const normalized_value = strip(value); + + if (std.mem.eql(u8, normalized_key, "domain")) { + return .{ .domain = normalized_value }; + } else if (std.mem.eql(u8, normalized_key, "path")) { + return .{ .path = normalized_value }; + } else if (std.mem.eql(u8, normalized_key, "samesite")) { + return if (std.mem.eql(u8, normalized_value, "strict")) + .{ .same_site = .strict } + else if (std.mem.eql(u8, normalized_value, "lax")) + .{ .same_site = .lax } + else + .{ .same_site = .none }; + } else if (std.mem.eql(u8, normalized_key, "secure")) { + return .{ .secure = true }; + } else if (std.mem.eql(u8, normalized_key, "httponly")) { + return .{ .http_only = true }; + } else if (std.mem.eql(u8, normalized_key, "partitioned")) { + return .{ .partitioned = true }; + } else if (std.mem.eql(u8, normalized_key, "expires")) { + return .{ .expires = std.fmt.parseInt(i64, normalized_value, 10) catch return null }; + } else if (std.mem.eql(u8, normalized_key, "max-age")) { + return .{ .max_age = std.fmt.parseInt(i64, normalized_value, 10) catch return null }; + } else { + return null; + } +} + +inline fn strip(input: []const u8) []const u8 { + return std.mem.trim(u8, input, &std.ascii.whitespace); +} + +test "basic cookie string" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux;"); + defer cookies.deinit(); + try cookies.parse(); + try std.testing.expectEqualStrings("bar", cookies.get("foo").?.value); + try std.testing.expectEqualStrings("qux", cookies.get("baz").?.value); +} + +test "empty cookie string" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, ""); + defer cookies.deinit(); + try cookies.parse(); +} + +test "cookie string with irregular spaces" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo= bar; baz= qux;"); + defer cookies.deinit(); + try cookies.parse(); + try std.testing.expectEqualStrings("bar", cookies.get("foo").?.value); + try std.testing.expectEqualStrings("qux", cookies.get("baz").?.value); +} + +test "headerIterator" { + const allocator = std.testing.allocator; + var buf = std.ArrayList(u8).init(allocator); + defer buf.deinit(); + + const writer = buf.writer(); + + var cookies = Self.init(allocator, "foo=bar; baz=qux;"); + defer cookies.deinit(); + try cookies.parse(); + + var it_buf: [4096]u8 = undefined; + var it = cookies.headerIterator(&it_buf); + while (try it.next()) |*header| { + try writer.writeAll(header.*); + try writer.writeAll("\n"); + } + + try std.testing.expectEqualStrings( + \\foo=bar; path=/; domain=localhost; + \\baz=qux; path=/; domain=localhost; + \\ + , buf.items); +} + +test "modified" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux;"); + defer cookies.deinit(); + + try cookies.parse(); + try std.testing.expect(cookies.modified == false); + + try cookies.put(.{ .name = "quux", .value = "corge" }); + try std.testing.expect(cookies.modified == true); +} + +test "domain=example.com" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; Domain=example.com;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expectEqualStrings(cookie.domain.?, "example.com"); +} + +test "path=/example_path" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; Path=/example_path;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expectEqualStrings(cookie.path.?, "/example_path"); +} + +test "SameSite=lax" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; SameSite=lax;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.same_site == .lax); +} + +test "SameSite=none" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; SameSite=none;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.same_site == .none); +} + +test "SameSite=strict" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; SameSite=strict;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.same_site == .strict); +} + +test "Secure" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; Secure;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.secure.?); +} + +test "Partitioned" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; Partitioned;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.partitioned.?); +} + +test "Max-Age" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; Max-Age=123123123;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.max_age.? == 123123123); +} + +test "Expires" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux; Expires=123123123;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.expires.? == 123123123); +} + +test "default flags" { + const allocator = std.testing.allocator; + var cookies = Self.init(allocator, "foo=bar; baz=qux;"); + defer cookies.deinit(); + + try cookies.parse(); + const cookie = cookies.get("foo").?; + try std.testing.expect(cookie.domain == null); + try std.testing.expect(cookie.path == null); + try std.testing.expect(cookie.same_site == null); + try std.testing.expect(cookie.secure == null); + try std.testing.expect(cookie.expires == null); + try std.testing.expect(cookie.http_only == null); + try std.testing.expect(cookie.max_age == null); + try std.testing.expect(cookie.partitioned == null); +} diff --git a/src/web/handler.zig b/src/web/handler.zig new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/src/web/handler.zig diff --git a/src/web/middleware/Logger.zig b/src/web/middleware/Logger.zig new file mode 100644 index 0000000..f2b1637 --- /dev/null +++ b/src/web/middleware/Logger.zig @@ -0,0 +1,51 @@ +const Logger = @This(); + +query: bool, + +// Must define an `init` method, which will accept your Config +// Alternatively, you can define a init(config: Config, mc: httpz.MiddlewareConfig) +// here mc will give you access to the server's allocator and arena +pub fn init(config: Config) !Logger { + return .{ + .query = config.query, + }; +} + +// optionally you can define an "deinit" method +// pub fn deinit(self: *Logger) void { + +// } + +// Must define an `execute` method. `self` doesn't have to be `const`, but +// you're responsible for making your middleware thread-safe. +pub fn execute(self: *const Logger, req: *httpz.Request, res: *httpz.Response, executor: anytype) !void { + const start = std.time.microTimestamp(); + + defer { + const elapsed = std.time.microTimestamp() - start; + var logger = logz.logger().multiuse() + .stringSafe("@l", "REQ") + .stringSafe("method", @tagName(req.method)) + .int("status", res.status) + .string("path", req.url.path); + + if (self.query) { + _ = logger.string("query", req.url.query); + } + + logger.int("us", elapsed).log(); + } + + // If you don't call executor.next(), there will be no further processing of + // the request and we'll go straight to writing the response. + return executor.next(); +} + +// Must defined a pub config structure, even if it's empty +pub const Config = struct { + query: bool, +}; + +const std = @import("std"); +const httpz = @import("httpz"); +const logz = @import("logz"); diff --git a/src/web/web.zig b/src/web/web.zig new file mode 100644 index 0000000..e0882a0 --- /dev/null +++ b/src/web/web.zig @@ -0,0 +1,241 @@ +pub fn start(app: *App) !void { + const allocator = app.allocator; + + var server = try httpz.Server(*App).init(allocator, .{ + .address = "0.0.0.0", + .port = 5882, + .request = .{ + .max_form_count = 4, + }, + }, app); + defer server.deinit(); + + const router = server.router(); + { + // publicly accessible + var routes = router.group("/", .{}); + + routes.get("/", index); + routes.get("/about", about); + routes.get("/invite/:code", acceptInvite); + + routes.post("/drafts", createDraft); + routes.get("/drafts/:id", showDraft); + routes.post("/drafts/:id/pick", createPick); + } + + const http_address = try std.fmt.allocPrint(allocator, "http://{s}:{d}", .{ "0.0.0.0", 5882 }); + logz.info().ctx("http").string("address", http_address).log(); + allocator.free(http_address); + + // blocks + try server.listen(); +} + +fn index(_: *RequestContext, req: *httpz.Request, res: *httpz.Response) !void { + var data = Data.init(res.arena); + defer data.deinit(); + + const dt = try zul.DateTime.fromUnix(std.time.timestamp(), .seconds); + const tomorrow = try dt.add(1, .hours); + + var root = try data.object(); + try root.put("draft_time", try std.fmt.allocPrint(req.arena, "{s}T{s}", .{ tomorrow.date(), tomorrow.time() })); + + try renderData("index", &data, req, res); +} + +fn about(_: *RequestContext, req: *httpz.Request, res: *httpz.Response) !void { + try render("about", req, res); +} + +fn acceptInvite(ctx: *RequestContext, req: *httpz.Request, res: *httpz.Response) !void { + var data = Data.init(res.arena); + defer data.deinit(); + + if (req.params.get("code")) |code| { + var accept_invite_result = (try ctx.app.pool.row("SELECT * FROM accept_invite($1)", .{code})) orelse { + res.status = 500; + return; + }; + defer accept_invite_result.deinit() catch {}; + + const session_id = accept_invite_result.get([]const u8, 0); + const draft_id = accept_invite_result.get(i64, 1); + res.header("Set-Cookie", try std.fmt.allocPrint(res.arena, "s={s}; Path=/", .{session_id})); + res.header("Location", try std.fmt.allocPrint(res.arena, "/drafts/{}", .{draft_id})); + + res.status = 302; + } +} + +fn createPick(ctx: *RequestContext, req: *httpz.Request, res: *httpz.Response) !void { + const query = try req.query(); + if (query.get("team_id")) |team_id| { + if (req.params.get("id")) |draft_id| { + _ = try ctx.app.pool.exec("CALL auto_draft($1)", .{draft_id}); + _ = try ctx.app.pool.exec("INSERT INTO picks (draft_user_id, draft_id, team_id) VALUES (current_picker_id($1), $1, $2)", .{ draft_id, team_id }); + + res.header("Location", try std.fmt.allocPrint(res.arena, "/drafts/{s}", .{draft_id})); + res.header("HX-Location", try std.fmt.allocPrint(res.arena, "/drafts/{s}", .{draft_id})); + + res.status = 302; + } + } +} + +fn createDraft(ctx: *RequestContext, req: *httpz.Request, res: *httpz.Response) !void { + const formData = try req.formData(); + + if (formData.get("player1")) |player1| { + logz.info().string("player1", player1).log(); + const player2 = formData.get("player2") orelse "Player 2"; + var create_draft_result: pg.QueryRow = undefined; + + if (formData.get("draft")) |_| { + if (formData.get("draft_time")) |draft_time| { + logz.info().string("draft_time", draft_time).log(); + create_draft_result = (try ctx.app.pool.row("SELECT * FROM create_draft(1, $1, $2, $3)", .{ player1, player2, draft_time })) orelse { + res.status = 500; + return; + }; + } + } else { + create_draft_result = (try ctx.app.pool.row("SELECT * FROM create_draft(1, $1, $2, null)", .{ player1, player2 })) orelse { + res.status = 500; + return; + }; + } + defer create_draft_result.deinit() catch {}; + + const session_id = create_draft_result.get([]const u8, 0); + const draft_id = create_draft_result.get(i64, 1); + res.header("Set-Cookie", try std.fmt.allocPrint(res.arena, "s={s}; Path=/", .{session_id})); + res.header("Location", try std.fmt.allocPrint(res.arena, "/drafts/{}", .{draft_id})); + + res.status = 302; + return; + } + + var data = Data.init(res.arena); + defer data.deinit(); + if (formData.get("draft")) |_| { + var root = try data.root(.object); + try root.put("draft", true); + } + try renderData("index", &data, req, res); +} + +pub const Team = struct { + team_id: i32, + rank: ?i16, + name: []const u8, + picked: bool, + pick_user: ?[]const u8, +}; + +pub const DraftInfo = struct { + draft_id: i64, + current_player_id: ?i64, + current_player_name: ?[]const u8, + draft_status_id: i16, + round_time_remaining: ?i32, + message: ?[]const u8, + can_pick: bool, +}; + +fn showDraft(ctx: *RequestContext, req: *httpz.Request, res: *httpz.Response) !void { + if (req.params.get("id")) |draft_id| { + var draft_count = (try ctx.app.pool.row("SELECT count(1) FROM drafts WHERE draft_id = $1", .{draft_id})) orelse @panic("oh no"); + defer draft_count.deinit() catch {}; + if (draft_count.get(i64, 0) == 0) { + return ctx.app.notFound(req, res); + } + + var data = Data.init(res.arena); + var root = try data.root(.object); + + _ = try ctx.app.pool.exec("CALL auto_draft($1)", .{draft_id}); + + const picks_query = "SELECT * FROM current_draft_picks($1)"; + var picks_result = try ctx.app.pool.query(picks_query, .{draft_id}); + defer picks_result.deinit(); + + var teams = try data.array(); + while (try picks_result.next()) |row| { + const team = try row.to(Team, .{}); + var team_data = try data.object(); + try team_data.put("name", team.name); + try team_data.put("pick_user", team.pick_user); + try team_data.put("picked", team.picked); + try team_data.put("rank", team.rank); + try team_data.put("id", team.team_id); + try teams.append(team_data); + } + try root.put("teams", teams); + + var draft_info: DraftInfo = undefined; + const current_picker_query = + \\SELECT * FROM draft_info WHERE draft_id = $1 + ; + var row = try ctx.app.pool.row(current_picker_query, .{draft_id}); + if (row) |r| { + // defer r.denit(); + draft_info = try r.to(DraftInfo, .{}); + var can_pick = false; + if (draft_info.current_player_id) |current_player_id| { + if (ctx.user) |user| { + can_pick = draft_info.can_pick and current_player_id == user.id; + } + } + try root.put("can_pick", can_pick); + try root.put("running", draft_info.draft_status_id == 2); + try root.put("draft_id", draft_info.draft_id); + try root.put("message", draft_info.message); + try root.put("round_time_remaining", draft_info.round_time_remaining); + try root.put("current_picker", draft_info.current_player_name); + } + + const code_query = + \\SELECT code FROM draft_user_invites + \\ JOIN draft_users USING(draft_user_id) + \\ WHERE draft_id = $1 + ; + var code_result = (try ctx.app.pool.row(code_query, .{draft_id})) orelse @panic("oh no"); + const code = code_result.get([]const u8, 0); + defer code_result.deinit() catch {}; + try root.put("code", code); + + defer data.deinit(); + + try renderData("draft", &data, req, res); + + try row.?.deinit(); + } +} + +pub fn render(template_name: []const u8, req: *httpz.Request, res: *httpz.Response) !void { + var data = Data.init(res.arena); + defer data.deinit(); + try renderData(template_name, &data, req, res); +} + +pub fn renderData(template_name: []const u8, data: *Data, req: *httpz.Request, res: *httpz.Response) !void { + if (zmpl.find(template_name)) |template| { + res.body = try template.renderWithOptions(data, .{ .layout = if (req.header("hx-request")) |_| zmpl.find("body") else zmpl.find("layout") }); + } +} + +const App = @import("../App.zig"); +const RequestContext = @import("../RequestContext.zig"); + +const std = @import("std"); +const Allocator = std.mem.Allocator; +const config = @import("config"); + +const httpz = @import("httpz"); +const logz = @import("logz"); +const pg = @import("pg"); +const zmpl = @import("zmpl"); +const zul = @import("zul"); +const Data = zmpl.Data; diff --git a/teamdraft.service b/teamdraft.service new file mode 100644 index 0000000..4137698 --- /dev/null +++ b/teamdraft.service @@ -0,0 +1,17 @@ +[Unit] +Description=Team Draft Website +After=multi-user.target + +[Service] +ExecStart=sops exec-env /home/teamdraft/production.secrets.yaml /home/teamdraft/teamdraft +WorkingDirectory=/home/teamdraft +EnvironmentFile=/home/teamdraft/production.env +User=teamdraft +Group=teamdraft +Type=simple +StandardOutput=journal +StandardError=journal +SyslogIdentifier=teamdraft + +[Install] +WantedBy=multi-user.target diff --git a/test/db/00-setup.sql b/test/db/00-setup.sql new file mode 100644 index 0000000..93ceb03 --- /dev/null +++ b/test/db/00-setup.sql @@ -0,0 +1,10 @@ +SET client_min_messages TO WARNING; +CREATE EXTENSION IF NOT EXISTS pgtap; + +BEGIN; +SELECT plan(1); + +SELECT has_schema('teamdraft'); + +SELECT * FROM finish(); +ROLLBACK; diff --git a/test/db/auth.sql b/test/db/auth.sql new file mode 100644 index 0000000..f19aff1 --- /dev/null +++ b/test/db/auth.sql @@ -0,0 +1,18 @@ +BEGIN; +SELECT plan(8); + +SELECT has_table('accounts'); +SELECT columns_are('accounts', ARRAY[ 'account_id', 'status_id', 'email', 'name', 'modified_at', 'created_at']); +SELECT has_sequence('accounts_account_id_seq'); +SELECT has_index('teamdraft', 'accounts', 'accounts_pk', 'account_id'); + +SELECT create_account('sadbEASt@sadbeast.com', 'hunter2') new_user_id \gset + +SELECT is(0, (select count(1) FROM accounts WHERE email = 'sadbEASt@sadbeast.com')::int); +SELECT is(1, (select count(1) FROM accounts WHERE email = 'sadbeast@sadbeast.com')::int); +SELECT is(:'new_user_id', (select account_id FROM accounts WHERE email = 'sadbeast@sadbeast.com')::int); + +SELECT has_extension('pgsodium', 'pgsodium', 'pgsodium extension installed'); + +SELECT * FROM finish(); +ROLLBACK; diff --git a/test/db/drafts.sql b/test/db/drafts.sql new file mode 100644 index 0000000..1e63362 --- /dev/null +++ b/test/db/drafts.sql @@ -0,0 +1,15 @@ +BEGIN; +SELECT plan(5); + +SELECT has_table('drafts'); +SELECT columns_are('drafts', ARRAY[ 'draft_id', 'season_id', 'draft_status_id', 'name', 'account_id', 'started_at', 'modified_at', 'created_at']); +SELECT has_sequence('drafts_draft_id_seq'); +SELECT has_index('teamdraft', 'drafts', 'drafts_pk', 'draft_id'); + +INSERT INTO seasons (league_id, started_at, ended_at) VALUES (1, now() - interval '1 day', now() + interval '3 months') RETURNING season_id new_season_id \gset +SELECT create_draft(1, 'sadbeast', 'freckles', now() + interval '3 days') new_session_id \gset +SELECT is(1, (SELECT count(1)::int FROM account_active_session_keys WHERE session_id = :'new_session_id')); + +SELECT * FROM finish(); +ROLLBACK; + diff --git a/test/db/scores.sql b/test/db/scores.sql new file mode 100644 index 0000000..2e1fee3 --- /dev/null +++ b/test/db/scores.sql @@ -0,0 +1,19 @@ +BEGIN; +SELECT plan(2); + +SELECT has_table('scores'); +SELECT has_table('score_categories'); + +SELECT league_id AS nfl_league_id FROM leagues WHERE name = 'NFL' LIMIT 1 \gset +INSERT INTO seasons (league_id, started_at, ended_at) VALUES (:nfl_league_id, now() - interval '1 day', now() + interval '3 months') RETURNING season_id new_season_id \gset + +SELECT league_id AS nfl_league_id FROM leagues WHERE name = 'NFL' LIMIT 1 \gset +SELECT team_id AS seattle FROM teams JOIN league_divisions USING (league_division_id) WHERE teams.name = 'Seattle' AND league_id = :nfl_league_id \gset +SELECT team_id AS buffalo FROM teams JOIN league_divisions USING (league_division_id) WHERE teams.name = 'Buffalo' AND league_id = :nfl_league_id \gset + +SELECT record_score(:seattle, 1, :new_season_id, 1, now()); +SELECT record_score(:seattle, 2, :new_season_id, 1, now()); +SELECT record_score(:buffalo, 2, :new_season_id, 1, now()); + +SELECT * FROM finish(); +ROLLBACK; diff --git a/test/db/seasons.sql b/test/db/seasons.sql new file mode 100644 index 0000000..605f842 --- /dev/null +++ b/test/db/seasons.sql @@ -0,0 +1,28 @@ +BEGIN; +SELECT plan(12); + +SELECT has_table('seasons'); +SELECT columns_are('seasons', ARRAY[ 'season_id', 'league_id', 'started_at', 'ended_at', 'created_at', 'modified_at']); +SELECT has_sequence('seasons_season_id_seq'); +SELECT has_index('teamdraft', 'seasons', 'seasons_pk', 'season_id'); + +SELECT league_id AS nfl_league_id FROM leagues WHERE name = 'NFL' LIMIT 1 \gset +SELECT create_season(:nfl_league_id, now() - interval '1 day', now() + interval '3 months') new_season_id \gset + +SELECT is(1, (SELECT count(1)::int FROM seasons WHERE season_id = :new_season_id)); +SELECT is(:new_season_id, (SELECT current_season_id(:nfl_league_id))); + +-- rankings + +SELECT has_table('rankings'); +SELECT columns_are('rankings', ARRAY[ 'ranking_id', 'season_id', 'team_id', 'rank', 'created_at', 'modified_at']); +SELECT has_sequence('rankings_ranking_id_seq'); +SELECT has_index('teamdraft', 'rankings', 'rankings_pk', 'ranking_id primary key'); + +SELECT is((SELECT count(1) FROM teams JOIN league_divisions USING(league_division_id) WHERE league_id = :nfl_league_id), (SELECT count(1)::bigint FROM current_rankings WHERE season_id = :new_season_id)); +SELECT results_eq('SELECT season_id, league_id, name FROM current_rankings WHERE season_id = current_season_id(' || :'nfl_league_id' || ') ORDER BY rank LIMIT 2', + 'VALUES (' || :'new_season_id' || ',' || :'nfl_league_id' || $$, 'Buffalo'), ($$ || :'new_season_id' || ',' || :'nfl_league_id' || $$, 'Miami')$$, + 'current nfl season rankings'); + +SELECT * FROM finish(); +ROLLBACK; |