feat: initial commit

This commit is contained in:
Cory Dransfeldt 2025-03-27 16:46:02 -07:00
commit e214116e40
No known key found for this signature in database
253 changed files with 17406 additions and 0 deletions

9
.editorconfig Normal file
View file

@ -0,0 +1,9 @@
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
charset = utf-8

12
.gitignore vendored Normal file
View file

@ -0,0 +1,12 @@
# build output
.cache
node_modules
vendor
generated
dist
# local dependencies
.env
# system files
.DS_Store

7
.markdownlint.json Normal file
View file

@ -0,0 +1,7 @@
{
"default": true,
"MD013": false,
"MD033": false,
"MD041": false,
"MD047": false
}

2
.npmrc Normal file
View file

@ -0,0 +1,2 @@
save-exact=true
cache=~/.npm

1
.nvmrc Normal file
View file

@ -0,0 +1 @@
22

42
README.md Normal file
View file

@ -0,0 +1,42 @@
## Local dev setup
`npm run setup`
This will generate the required `.env` file, `apache` configs, commands and php extensions to install and enable on the server (if needed).
## Local dev workflow
1. `npm start`
2. Open `http://localhost:8080`
To debug and develop php components, run `npm run php`. This will start the PHP server on `http://localhost:8000` and inject required environment variables from `.env`. It will also serve the static 11ty files from `dist`, so you can test the full site locally while leaving 11ty running to generate updates to files it watches.
## Commands
- `npm run start`: starts 11ty.
- `npm run start:quick`: starts 11ty a bit quicker (provided it's already been built).
- `npm run build`: builds static site output.
- `npm run debug`: runs 11ty with additional debug output.
- `npm run php`: starts a PHP server for local development.
- `npm run update:deps`: checks for dependency updates and updates 11ty.
- `npm run setup`: populates `.env` from 1Password and installs dependencies using `npm` and `composer`.
- `npm run clean`: removes the `dist` and `.cache` folders.
- `npm run clean:cache`: removes the `.cache` folder.
- `npm run clean:dist`: removes the `dist` folder.
## Required environment variables
```plaintext
POSTGREST_URL # client + server
POSTGREST_API_KEY # client + server
MASTODON_ACCESS_TOKEN # server
MASTODON_SYNDICATION_TOKEN # server
FORWARDEMAIL_API_KEY # server
BOOK_IMPORT_TOKEN # server
WATCHING_IMPORT_TOKEN # server
TMDB_API_KEY # server
NAVIDROME_SCROBBLE_TOKEN # server
NAVIDROME_API_URL # server
NAVIDROME_API_TOKEN # server
ARTIST_IMPORT_TOKEN # server
```

View file

@ -0,0 +1,72 @@
<?php
namespace App\Classes;
use GuzzleHttp\Client;
require __DIR__ . "/../../vendor/autoload.php";
abstract class ApiHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
public function __construct()
{
$this->loadEnvironment();
}
private function loadEnvironment(): void
{
$this->postgrestUrl =
$_ENV["POSTGREST_URL"] ?? getenv("POSTGREST_URL") ?: "";
$this->postgrestApiKey =
$_ENV["POSTGREST_API_KEY"] ?? getenv("POSTGREST_API_KEY") ?: "";
}
protected function ensureCliAccess(): void
{
if (php_sapi_name() !== "cli" && $_SERVER["REQUEST_METHOD"] !== "POST") {
$this->redirectNotFound();
}
}
protected function redirectNotFound(): void
{
header("Location: /404", true, 302);
exit();
}
protected function fetchFromPostgREST(
string $endpoint,
string $query = "",
string $method = "GET",
?array $body = null
): array {
$url = "{$this->postgrestUrl}/{$endpoint}?{$query}";
$options = [
"headers" => [
"Content-Type" => "application/json",
"Authorization" => "Bearer {$this->postgrestApiKey}",
],
];
if ($method === "POST" && $body) $options["json"] = $body;
$response = (new Client())->request($method, $url, $options);
return json_decode($response->getBody(), true) ?? [];
}
protected function sendResponse(string $message, int $statusCode): void
{
http_response_code($statusCode);
header("Content-Type: application/json");
echo json_encode(["message" => $message]);
exit();
}
protected function sendErrorResponse(string $message, int $statusCode): void
{
$this->sendResponse($message, $statusCode);
}
}

129
api/Classes/BaseHandler.php Normal file
View file

@ -0,0 +1,129 @@
<?php
namespace App\Classes;
require __DIR__ . "/../../vendor/autoload.php";
use GuzzleHttp\Client;
use GuzzleHttp\Exception\RequestException;
abstract class BaseHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
protected ?\Redis $cache = null;
public function __construct()
{
$this->loadEnvironment();
}
private function loadEnvironment(): void
{
$this->postgrestUrl =
$_ENV["POSTGREST_URL"] ?? getenv("POSTGREST_URL") ?: "";
$this->postgrestApiKey =
$_ENV["POSTGREST_API_KEY"] ?? getenv("POSTGREST_API_KEY") ?: "";
}
protected function makeRequest(
string $method,
string $endpoint,
array $options = []
): array {
$client = new Client();
$url = rtrim($this->postgrestUrl, "/") . "/" . ltrim($endpoint, "/");
try {
$response = $client->request(
$method,
$url,
array_merge($options, [
"headers" => [
"Authorization" => "Bearer {$this->postgrestApiKey}",
"Content-Type" => "application/json",
],
])
);
$responseBody = $response->getBody()->getContents();
if (empty($responseBody)) return [];
$responseData = json_decode($responseBody, true);
if (json_last_error() !== JSON_ERROR_NONE) throw new \Exception("Invalid JSON response: {$responseBody}");
return $responseData;
} catch (RequestException $e) {
$response = $e->getResponse();
$statusCode = $response ? $response->getStatusCode() : "N/A";
$responseBody = $response
? $response->getBody()->getContents()
: "No response body";
throw new \Exception(
"Request to {$url} failed with status {$statusCode}. Response: {$responseBody}"
);
} catch (\Exception $e) {
throw new \Exception("Request to {$url} failed: " . $e->getMessage());
}
}
protected function sendResponse(array $data, int $statusCode = 200): void
{
http_response_code($statusCode);
header("Content-Type: application/json");
echo json_encode($data);
exit();
}
protected function sendErrorResponse(
string $message,
int $statusCode = 500
): void {
$this->sendResponse(["error" => $message], $statusCode);
}
protected function fetchFromApi(string $endpoint, string $query): array
{
$client = new Client();
$url =
rtrim($this->postgrestUrl, "/") .
"/" .
ltrim($endpoint, "/") .
"?" .
$query;
try {
$response = $client->request("GET", $url, [
"headers" => [
"Content-Type" => "application/json",
"Authorization" => "Bearer {$this->postgrestApiKey}",
],
]);
if ($response->getStatusCode() !== 200) throw new Exception("API call to {$url} failed with status code " . $response->getStatusCode());
return json_decode($response->getBody(), true);
} catch (RequestException $e) {
throw new Exception("Error fetching from API: " . $e->getMessage());
}
}
protected function initializeCache(): void
{
if (class_exists("Redis")) {
$redis = new \Redis();
try {
$redis->connect("127.0.0.1", 6379);
$this->cache = $redis;
} catch (Exception $e) {
error_log("Redis connection failed: " . $e->getMessage());
$this->cache = null;
}
} else {
$this->cache = null;
}
}
}

3
api/Utils/init.php Normal file
View file

@ -0,0 +1,3 @@
<?php
require_once "media.php";
?>

14
api/Utils/media.php Normal file
View file

@ -0,0 +1,14 @@
<?php
function sanitizeMediaString(string $str): string
{
$sanitizedString = preg_replace(
"/[^a-zA-Z0-9\s-]/",
"",
iconv("UTF-8", "ASCII//TRANSLIT", $str)
);
return strtolower(
trim(preg_replace("/[\s-]+/", "-", $sanitizedString), "-")
);
}
?>

207
api/artist-import.php Normal file
View file

@ -0,0 +1,207 @@
<?php
require __DIR__ . "/Classes/ApiHandler.php";
require __DIR__ . "/Utils/init.php";
use App\Classes\ApiHandler;
use GuzzleHttp\Client;
class ArtistImportHandler extends ApiHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
private string $artistImportToken;
private string $placeholderImageId = "4cef75db-831f-4f5d-9333-79eaa5bb55ee";
private string $navidromeApiUrl;
private string $navidromeAuthToken;
public function __construct()
{
parent::__construct();
$this->ensureCliAccess();
$this->loadEnvironment();
}
private function loadEnvironment(): void
{
$this->postgrestUrl = getenv("POSTGREST_URL");
$this->postgrestApiKey = getenv("POSTGREST_API_KEY");
$this->artistImportToken = getenv("ARTIST_IMPORT_TOKEN");
$this->navidromeApiUrl = getenv("NAVIDROME_API_URL");
$this->navidromeAuthToken = getenv("NAVIDROME_API_TOKEN");
}
public function handleRequest(): void
{
$input = json_decode(file_get_contents("php://input"), true);
if (!$input) $this->sendJsonResponse("error", "Invalid or missing JSON body", 400);
$providedToken = $input["token"] ?? null;
$artistId = $input["artistId"] ?? null;
if (!$providedToken || $providedToken !== $this->artistImportToken) {
$this->sendJsonResponse("error", "Unauthorized access", 401);
}
if (!$artistId) $this->sendJsonResponse("error", "Artist ID is required", 400);
try {
$artistData = $this->fetchNavidromeArtist($artistId);
$artistExists = $this->processArtist($artistData);
if ($artistExists) $this->processAlbums($artistId, $artistData->name);
$this->sendJsonResponse("message", "Artist and albums synced successfully", 200);
} catch (Exception $e) {
$this->sendJsonResponse("error", "Error: " . $e->getMessage(), 500);
}
}
private function sendJsonResponse(string $key, string $message, int $statusCode): void
{
http_response_code($statusCode);
header("Content-Type: application/json");
echo json_encode([$key => $message]);
exit();
}
private function fetchNavidromeArtist(string $artistId)
{
$client = new Client();
$response = $client->get("{$this->navidromeApiUrl}/api/artist/{$artistId}", [
"headers" => [
"x-nd-authorization" => "Bearer {$this->navidromeAuthToken}",
"Accept" => "application/json"
]
]);
return json_decode($response->getBody(), false);
}
private function fetchNavidromeAlbums(string $artistId): array
{
$client = new Client();
$response = $client->get("{$this->navidromeApiUrl}/api/album", [
"query" => [
"_end" => 0,
"_order" => "ASC",
"_sort" => "max_year",
"_start" => 0,
"artist_id" => $artistId
],
"headers" => [
"x-nd-authorization" => "Bearer {$this->navidromeAuthToken}",
"Accept" => "application/json"
]
]);
return json_decode($response->getBody(), true);
}
private function processArtist(object $artistData): bool
{
$artistName = $artistData->name ?? "";
if (!$artistName) throw new Exception("Artist name is missing from Navidrome data.");
$existingArtist = $this->getArtistByName($artistName);
if ($existingArtist) return true;
$artistKey = sanitizeMediaString($artistName);
$slug = "/music/artists/{$artistKey}";
$description = strip_tags($artistData->biography) ?? "";
$genre = $this->resolveGenreId($artistData->genres[0]->name ?? "");
$starred = $artistData->starred ?? false;
$artistPayload = [
"name_string" => $artistName,
"slug" => $slug,
"description" => $description,
"tentative" => true,
"art" => $this->placeholderImageId,
"favorite" => $starred,
"genres" => $genre,
];
$this->saveArtist($artistPayload);
return true;
}
private function processAlbums(string $artistId, string $artistName): void
{
$artist = $this->getArtistByName($artistName);
if (!$artist) throw new Exception("Artist not found in the database.");
$existingAlbums = $this->getExistingAlbums($artist["id"]);
$existingAlbumKeys = array_column($existingAlbums, "key");
$navidromeAlbums = $this->fetchNavidromeAlbums($artistId);
foreach ($navidromeAlbums as $album) {
$albumName = $album["name"];
$releaseYear = $album["date"];
$artistKey = sanitizeMediaString($artistName);
$albumKey = $artistKey . "-" . sanitizeMediaString($albumName);
if (in_array($albumKey, $existingAlbumKeys)) {
error_log("Skipping existing album: " . $albumName);
continue;
}
try {
$albumPayload = [
"name" => $albumName,
"key" => $albumKey,
"release_year" => $releaseYear,
"artist" => $artist["id"],
"artist_name" => $artistName,
"art" => $this->placeholderImageId,
"tentative" => true,
];
$this->saveAlbum($albumPayload);
} catch (Exception $e) {
error_log("Error adding album '{$albumName}': " . $e->getMessage());
}
}
}
private function getArtistByName(string $nameString): ?array
{
$query = "name_string=eq." . urlencode($nameString);
$response = $this->fetchFromPostgREST("artists", $query, "GET");
return $response[0] ?? null;
}
private function saveArtist(array $artistPayload): void
{
$this->fetchFromPostgREST("artists", "", "POST", $artistPayload);
}
private function saveAlbum(array $albumPayload): void
{
$this->fetchFromPostgREST("albums", "", "POST", $albumPayload);
}
private function resolveGenreId(string $genreName): ?string
{
$genres = $this->fetchFromPostgREST("genres", "name=eq." . urlencode(strtolower($genreName)), "GET");
if (!empty($genres)) return $genres[0]["id"];
return null;
}
private function getExistingAlbums(string $artistId): array
{
return $this->fetchFromPostgREST("albums", "artist=eq." . urlencode($artistId), "GET");
}
}
$handler = new ArtistImportHandler();
$handler->handleRequest();

116
api/book-import.php Normal file
View file

@ -0,0 +1,116 @@
<?php
require __DIR__ . "/Classes/ApiHandler.php";
use App\Classes\ApiHandler;
use GuzzleHttp\Client;
class BookImportHandler extends ApiHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
private string $bookImportToken;
public function __construct()
{
parent::__construct();
$this->ensureCliAccess();
$this->loadEnvironment();
}
private function loadEnvironment(): void
{
$this->postgrestUrl = $_ENV["POSTGREST_URL"] ?? getenv("POSTGREST_URL");
$this->postgrestApiKey =
$_ENV["POSTGREST_API_KEY"] ?? getenv("POSTGREST_API_KEY");
$this->bookImportToken =
$_ENV["BOOK_IMPORT_TOKEN"] ?? getenv("BOOK_IMPORT_TOKEN");
}
public function handleRequest(): void
{
$input = json_decode(file_get_contents("php://input"), true);
if (!$input) $this->sendErrorResponse("Invalid or missing JSON body", 400);
$providedToken = $input["token"] ?? null;
$isbn = $input["isbn"] ?? null;
if (!$providedToken || $providedToken !== $this->bookImportToken) $this->sendErrorResponse("Unauthorized access", 401);
if (!$isbn) $this->sendErrorResponse("isbn parameter is required", 400);
try {
$bookData = $this->fetchBookData($isbn);
$this->processBook($bookData);
$this->sendResponse("Book imported successfully", 200);
} catch (Exception $e) {
$this->sendErrorResponse("Error: " . $e->getMessage(), 500);
}
}
private function fetchBookData(string $isbn): array
{
$client = new Client();
$response = $client->get("https://openlibrary.org/api/books", [
"query" => [
"bibkeys" => "ISBN:{$isbn}",
"format" => "json",
"jscmd" => "data",
],
"headers" => ["Accept" => "application/json"],
]);
$data = json_decode($response->getBody(), true);
$bookKey = "ISBN:{$isbn}";
if (empty($data[$bookKey])) throw new Exception("Book data not found for ISBN: {$isbn}");
return $data[$bookKey];
}
private function processBook(array $bookData): void
{
$isbn =
$bookData["identifiers"]["isbn_13"][0] ??
($bookData["identifiers"]["isbn_10"][0] ?? null);
$title = $bookData["title"] ?? null;
$author = $bookData["authors"][0]["name"] ?? null;
$description = $bookData["description"] ?? ($bookData["notes"] ?? "");
if (!$isbn || !$title || !$author) throw new Exception("Missing essential book data (title, author, or ISBN).");
$existingBook = $this->getBookByISBN($isbn);
if ($existingBook) throw new Exception("Book with ISBN {$isbn} already exists.");
$bookPayload = [
"isbn" => $isbn,
"title" => $title,
"author" => $author,
"description" => $description,
"read_status" => "want to read",
"slug" => "/books/" . $isbn,
];
$this->saveBook($bookPayload);
}
private function saveBook(array $bookPayload): void
{
$this->fetchFromPostgREST("books", "", "POST", $bookPayload);
}
private function getBookByISBN(string $isbn): ?array
{
$query = "isbn=eq." . urlencode($isbn);
$response = $this->fetchFromPostgREST("books", $query, "GET");
return $response[0] ?? null;
}
}
$handler = new BookImportHandler();
$handler->handleRequest();

228
api/contact.php Normal file
View file

@ -0,0 +1,228 @@
<?php
require __DIR__ . "/Classes/BaseHandler.php";
use App\Classes\BaseHandler;
use GuzzleHttp\Client;
class ContactHandler extends BaseHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
private string $forwardEmailApiKey;
private Client $httpClient;
public function __construct(?Client $httpClient = null)
{
$this->httpClient = $httpClient ?? new Client();
$this->loadEnvironment();
}
private function loadEnvironment(): void
{
$this->postgrestUrl = $_ENV["POSTGREST_URL"] ?? getenv("POSTGREST_URL");
$this->postgrestApiKey =
$_ENV["POSTGREST_API_KEY"] ?? getenv("POSTGREST_API_KEY");
$this->forwardEmailApiKey =
$_ENV["FORWARDEMAIL_API_KEY"] ?? getenv("FORWARDEMAIL_API_KEY");
}
public function handleRequest(): void
{
try {
$this->validateReferer();
$this->checkRateLimit();
$this->enforceHttps();
$contentType = $_SERVER["CONTENT_TYPE"] ?? "";
$formData = null;
if (strpos($contentType, "application/json") !== false) {
$rawBody = file_get_contents("php://input");
$formData = json_decode($rawBody, true);
if (!$formData || !isset($formData["data"])) {
throw new Exception("Invalid JSON payload.");
}
$formData = $formData["data"];
} elseif (
strpos($contentType, "application/x-www-form-urlencoded") !== false
) {
$formData = $_POST;
} else {
$this->sendErrorResponse(
"Unsupported Content-Type. Use application/json or application/x-www-form-urlencoded.",
400
);
}
if (!empty($formData["hp_name"])) $this->sendErrorResponse("Invalid submission.", 400);
$name = htmlspecialchars(
trim($formData["name"] ?? ""),
ENT_QUOTES,
"UTF-8"
);
$email = filter_var($formData["email"] ?? "", FILTER_VALIDATE_EMAIL);
$message = htmlspecialchars(
trim($formData["message"] ?? ""),
ENT_QUOTES,
"UTF-8"
);
if (empty($name)) $this->sendErrorResponse("Name is required.", 400);
if (!$email) $this->sendErrorResponse("Valid email is required.", 400);
if (empty($message)) $this->sendErrorResponse("Message is required.", 400);
if (strlen($name) > 100) $this->sendErrorResponse(
"Name is too long. Max 100 characters allowed.",
400
);
if (strlen($message) > 1000) $this->sendErrorResponse(
"Message is too long. Max 1000 characters allowed.",
400
);
if ($this->isBlockedDomain($email)) $this->sendErrorResponse("Submission from blocked domain.", 400);
$contactData = [
"name" => $name,
"email" => $email,
"message" => $message,
"replied" => false,
];
$this->saveToDatabase($contactData);
$this->sendNotificationEmail($contactData);
$this->sendRedirect("/contact/success");
} catch (Exception $e) {
error_log("Error handling contact form submission: " . $e->getMessage());
$this->sendErrorResponse($e->getMessage(), 400);
}
}
private function validateReferer(): void
{
$referer = $_SERVER["HTTP_REFERER"] ?? "";
$allowedDomain = "coryd.dev";
if (!str_contains($referer, $allowedDomain)) throw new Exception("Invalid submission origin.");
}
private function checkRateLimit(): void
{
$ipAddress = $_SERVER["REMOTE_ADDR"] ?? "unknown";
$cacheFile = sys_get_temp_dir() . "/rate_limit_" . md5($ipAddress);
$rateLimitDuration = 60;
$maxRequests = 5;
if (file_exists($cacheFile)) {
$data = json_decode(file_get_contents($cacheFile), true);
if (
$data["timestamp"] + $rateLimitDuration > time() &&
$data["count"] >= $maxRequests
) {
header("Location: /429", true, 302);
exit();
}
$data["count"]++;
} else {
$data = ["count" => 1, "timestamp" => time()];
}
file_put_contents($cacheFile, json_encode($data));
}
private function enforceHttps(): void
{
if (empty($_SERVER["HTTPS"]) || $_SERVER["HTTPS"] !== "on") throw new Exception("Secure connection required. Use HTTPS.");
}
private function isBlockedDomain(string $email): bool
{
$domain = substr(strrchr($email, "@"), 1);
if (!$domain) return false;
$response = $this->httpClient->get(
"{$this->postgrestUrl}/blocked_domains",
[
"headers" => [
"Content-Type" => "application/json",
"Authorization" => "Bearer {$this->postgrestApiKey}",
],
"query" => [
"domain_name" => "eq.{$domain}",
"limit" => 1,
],
]
);
$blockedDomains = json_decode($response->getBody(), true);
return !empty($blockedDomains);
}
private function saveToDatabase(array $contactData): void
{
$response = $this->httpClient->post("{$this->postgrestUrl}/contacts", [
"headers" => [
"Content-Type" => "application/json",
"Authorization" => "Bearer {$this->postgrestApiKey}",
],
"json" => $contactData,
]);
if ($response->getStatusCode() >= 400) {
$errorResponse = json_decode($response->getBody(), true);
throw new Exception(
"PostgREST error: " . ($errorResponse["message"] ?? "Unknown error")
);
}
}
private function sendNotificationEmail(array $contactData): void
{
$authHeader = "Basic " . base64_encode("{$this->forwardEmailApiKey}:");
$emailSubject = "Contact form submission";
$emailText = sprintf(
"Name: %s\nEmail: %s\nMessage: %s\n",
$contactData["name"],
$contactData["email"],
$contactData["message"]
);
$response = $this->httpClient->post(
"https://api.forwardemail.net/v1/emails",
[
"headers" => [
"Content-Type" => "application/x-www-form-urlencoded",
"Authorization" => $authHeader,
],
"form_params" => [
"from" => "coryd.dev <hi@admin.coryd.dev>",
"to" => "hi@coryd.dev",
"subject" => $emailSubject,
"text" => $emailText,
"replyTo" => $contactData["email"],
],
]
);
if ($response->getStatusCode() >= 400) throw new Exception("Failed to send email notification.");
}
private function sendRedirect(string $path): void
{
$protocol = (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off') ? "https" : "http";
$host = $_SERVER['HTTP_HOST'];
$redirectUrl = "{$protocol}://{$host}{$path}";
header("Location: $redirectUrl", true, 302);
exit();
}
}
try {
$handler = new ContactHandler();
$handler->handleRequest();
} catch (Exception $e) {
error_log("Contact form error: " . $e->getMessage());
echo json_encode(["error" => $e->getMessage()]);
http_response_code(500);
}

279
api/mastodon.php Normal file
View file

@ -0,0 +1,279 @@
<?php
require __DIR__ . "/Classes/ApiHandler.php";
use App\Classes\ApiHandler;
use GuzzleHttp\Client;
class MastodonPostHandler extends ApiHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
private string $mastodonAccessToken;
private string $rssFeedUrl;
private string $baseUrl;
private const MASTODON_API_STATUS = "https://follow.coryd.dev/api/v1/statuses";
private Client $httpClient;
public function __construct(?Client $httpClient = null)
{
parent::__construct();
$this->ensureCliAccess();
$this->loadEnvironment();
$this->validateAuthorization();
$this->httpClient = $httpClient ?: new Client();
}
private function loadEnvironment(): void
{
$this->postgrestUrl =
getenv("POSTGREST_URL") ?: $_ENV["POSTGREST_URL"] ?? "";
$this->postgrestApiKey =
getenv("POSTGREST_API_KEY") ?: $_ENV["POSTGREST_API_KEY"] ?? "";
$this->mastodonAccessToken =
getenv("MASTODON_ACCESS_TOKEN") ?: $_ENV["MASTODON_ACCESS_TOKEN"] ?? "";
$this->rssFeedUrl = "https://www.coryd.dev/feeds/syndication.xml";
$this->baseUrl = "https://www.coryd.dev";
}
private function validateAuthorization(): void
{
$authHeader = $_SERVER["HTTP_AUTHORIZATION"] ?? "";
$expectedToken = "Bearer " . getenv("MASTODON_SYNDICATION_TOKEN");
if ($authHeader !== $expectedToken) {
http_response_code(401);
echo json_encode(["error" => "Unauthorized."]);
exit();
}
}
public function handlePost(): void
{
if (!$this->isDatabaseAvailable()) {
echo "Database is unavailable. Exiting.\n";
return;
}
$latestItems = $this->fetchRSSFeed($this->rssFeedUrl);
foreach (array_reverse($latestItems) as $item) {
$existingPost = $this->fetchFromPostgREST("mastodon_posts", "link=eq." . urlencode($item["link"]));
if (!empty($existingPost)) continue;
$content = $this->truncateContent(
$item["title"],
str_replace(array("\n", "\r"), '', strip_tags($item["description"])),
$item["link"],
500
);
$timestamp = date("Y-m-d H:i:s");
if (!$this->storeInDatabase($item["link"], $timestamp)) {
echo "Skipping post: database write failed for {$item["link"]}\n";
continue;
}
$mastodonPostUrl = $this->postToMastodon($content, $item["image"] ?? null);
if ($mastodonPostUrl) {
if (strpos($item["link"], $this->baseUrl . "/posts") !== false) {
$slug = str_replace($this->baseUrl, "", $item["link"]);
$this->updatePostWithMastodonUrl($slug, $mastodonPostUrl);
echo "Posted and stored URL: {$item["link"]}\n";
}
} else {
echo "Failed to post to Mastodon. Skipping database update.\n";
}
}
echo "RSS processed successfully.\n";
}
private function fetchRSSFeed(string $rssFeedUrl): array
{
$rssText = file_get_contents($rssFeedUrl);
if (!$rssText) throw new Exception("Failed to fetch RSS feed.");
$rss = new \SimpleXMLElement($rssText);
$items = [];
foreach ($rss->channel->item as $item) {
$imageUrl = null;
if ($item->enclosure && isset($item->enclosure['url'])) $imageUrl = (string) $item->enclosure['url'];
$items[] = [
"title" => (string) $item->title,
"link" => (string) $item->link,
"description" => (string) $item->description,
"image" => $imageUrl,
];
}
return $items;
}
private function uploadImageToMastodon(string $imageUrl): ?string
{
$headers = [
"Authorization" => "Bearer {$this->mastodonAccessToken}"
];
$tempFile = tempnam(sys_get_temp_dir(), "mastodon_img");
file_put_contents($tempFile, file_get_contents($imageUrl));
$response = $this->httpClient->request("POST", "https://follow.coryd.dev/api/v2/media", [
"headers" => $headers,
"multipart" => [
[
"name" => "file",
"contents" => fopen($tempFile, "r"),
"filename" => basename($imageUrl)
]
]
]);
unlink($tempFile);
$statusCode = $response->getStatusCode();
if ($statusCode !== 200) throw new Exception("Image upload failed with status $statusCode.");
$responseBody = json_decode($response->getBody()->getContents(), true);
return $responseBody["id"] ?? null;
}
private function postToMastodon(string $content, ?string $imageUrl = null): ?string
{
$headers = [
"Authorization" => "Bearer {$this->mastodonAccessToken}",
"Content-Type" => "application/json",
];
$mediaIds = [];
if ($imageUrl) {
try {
$mediaId = $this->uploadImageToMastodon($imageUrl);
if ($mediaId) $mediaIds[] = $mediaId;
} catch (Exception $e) {
echo "Image upload failed: " . $e->getMessage() . "\n";
}
}
$postData = ["status" => $content];
if (!empty($mediaIds)) $postData["media_ids"] = $mediaIds;
$response = $this->httpRequest(
self::MASTODON_API_STATUS,
"POST",
$headers,
$postData
);
return $response["url"] ?? null;
}
private function storeInDatabase(string $link, string $timestamp): bool
{
$data = [
"link" => $link,
"created_at" => $timestamp,
];
try {
$this->fetchFromPostgREST("mastodon_posts", "", "POST", $data);
return true;
} catch (Exception $e) {
echo "Error storing post in database: " . $e->getMessage() . "\n";
return false;
}
}
private function isDatabaseAvailable(): bool
{
try {
$response = $this->fetchFromPostgREST("mastodon_posts", "limit=1");
return is_array($response);
} catch (Exception $e) {
echo "Database check failed: " . $e->getMessage() . "\n";
return false;
}
}
private function updatePostWithMastodonUrl(
string $slug,
string $mastodonPostUrl
): void {
$data = ["mastodon_url" => $mastodonPostUrl];
$this->fetchFromPostgREST("posts", "slug=eq.{$slug}&mastodon_url=is.null", "PATCH", $data);
}
private function truncateContent(
string $title,
string $description,
string $link,
int $maxLength
): string {
$baseLength = strlen("$title\n\n$link");
$availableSpace = $maxLength - $baseLength - 4;
if (strlen($description) > $availableSpace) {
$description = substr($description, 0, $availableSpace);
$description = preg_replace('/\s+\S*$/', "", $description) . "...";
}
return "$title\n\n$description\n\n$link";
}
private function httpRequest(
string $url,
string $method = "GET",
array $headers = [],
?array $data = null
): array {
$options = ["headers" => $headers];
if ($data) $options["json"] = $data;
$response = $this->httpClient->request($method, $url, $options);
$statusCode = $response->getStatusCode();
if ($statusCode >= 400) throw new Exception("HTTP error $statusCode: " . $response->getBody());
$responseBody = $response->getBody()->getContents();
if (empty($responseBody)) return [];
$decodedResponse = json_decode($responseBody, true);
if (!is_array($decodedResponse)) return [];
return $decodedResponse;
}
private function getPostgRESTHeaders(): array
{
return [
"Authorization" => "Bearer {$this->postgrestApiKey}",
"Content-Type" => "application/json",
];
}
}
try {
$handler = new MastodonPostHandler();
$handler->handlePost();
} catch (Exception $e) {
http_response_code(500);
echo json_encode(["error" => $e->getMessage()]);
}

82
api/playing.php Normal file
View file

@ -0,0 +1,82 @@
<?php
namespace App\Handlers;
require __DIR__ . "/Classes/BaseHandler.php";
use App\Classes\BaseHandler;
class LatestListenHandler extends BaseHandler
{
protected int $cacheTTL = 60;
public function __construct()
{
parent::__construct();
$this->initializeCache();
}
public function handleRequest(): void
{
try {
$cachedData = $this->cache ? $this->cache->get("latest_listen") : null;
if ($cachedData) {
$this->sendResponse(json_decode($cachedData, true));
return;
}
$data = $this->makeRequest("GET", "optimized_latest_listen?select=*");
if (!is_array($data) || empty($data[0])) {
$this->sendResponse(["message" => "No recent tracks found"], 404);
return;
}
$latestListen = $this->formatLatestListen($data[0]);
if ($this->cache) $this->cache->set(
"latest_listen",
json_encode($latestListen),
$this->cacheTTL
);
$this->sendResponse($latestListen);
} catch (Exception $e) {
error_log("LatestListenHandler Error: " . $e->getMessage());
$this->sendErrorResponse(
"Internal Server Error: " . $e->getMessage(),
500
);
}
}
private function formatLatestListen(array $latestListen): array
{
$emoji =
$latestListen["artist_emoji"] ?? ($latestListen["genre_emoji"] ?? "🎧");
$trackName = htmlspecialchars(
$latestListen["track_name"] ?? "Unknown Track",
ENT_QUOTES,
"UTF-8"
);
$artistName = htmlspecialchars(
$latestListen["artist_name"] ?? "Unknown Artist",
ENT_QUOTES,
"UTF-8"
);
$url = htmlspecialchars($latestListen["url"] ?? "/", ENT_QUOTES, "UTF-8");
return [
"content" => sprintf(
'%s %s by <a href="https://www.coryd.dev%s">%s</a>',
$emoji,
$trackName,
$url,
$artistName
),
];
}
}
$handler = new LatestListenHandler();
$handler->handleRequest();

279
api/scrobble.php Normal file
View file

@ -0,0 +1,279 @@
<?php
namespace App\Handlers;
require __DIR__ . "/Classes/ApiHandler.php";
require __DIR__ . "/Utils/init.php";
use App\Classes\ApiHandler;
use GuzzleHttp\Client;
header("Content-Type: application/json");
$authHeader = $_SERVER["HTTP_AUTHORIZATION"] ?? "";
$expectedToken = "Bearer " . getenv("NAVIDROME_SCROBBLE_TOKEN");
class NavidromeScrobbleHandler extends ApiHandler
{
private string $postgrestApiUrl;
private string $postgrestApiToken;
private string $navidromeApiUrl;
private string $navidromeAuthToken;
private string $forwardEmailApiKey;
private array $artistCache = [];
private array $albumCache = [];
public function __construct()
{
parent::__construct();
$this->ensureCliAccess();
$this->loadEnvironment();
$this->validateAuthorization();
}
private function loadEnvironment(): void
{
$this->postgrestApiUrl = getenv("POSTGREST_URL");
$this->postgrestApiToken = getenv("POSTGREST_API_KEY");
$this->navidromeApiUrl = getenv("NAVIDROME_API_URL");
$this->navidromeAuthToken = getenv("NAVIDROME_API_TOKEN");
$this->forwardEmailApiKey = getenv("FORWARDEMAIL_API_KEY");
}
private function validateAuthorization(): void
{
$authHeader = $_SERVER["HTTP_AUTHORIZATION"] ?? "";
$expectedToken = "Bearer " . getenv("NAVIDROME_SCROBBLE_TOKEN");
if ($authHeader !== $expectedToken) {
http_response_code(401);
echo json_encode(["error" => "Unauthorized."]);
exit();
}
}
public function runScrobbleCheck(): void
{
$recentTracks = $this->fetchRecentlyPlayed();
if (empty($recentTracks)) return;
foreach ($recentTracks as $track) {
if ($this->isTrackAlreadyScrobbled($track)) continue;
$this->handleTrackScrobble($track);
}
}
private function fetchRecentlyPlayed(): array
{
$client = new Client();
try {
$response = $client->request("GET", "{$this->navidromeApiUrl}/api/song", [
"query" => [
"_end" => 20,
"_order" => "DESC",
"_sort" => "play_date",
"_start" => 0,
"recently_played" => "true"
],
"headers" => [
"x-nd-authorization" => "Bearer {$this->navidromeAuthToken}",
"Accept" => "application/json"
]
]);
$data = json_decode($response->getBody()->getContents(), true);
return $data ?? [];
} catch (\Exception $e) {
error_log("Error fetching tracks: " . $e->getMessage());
return [];
}
}
private function isTrackAlreadyScrobbled(array $track): bool
{
$playDate = strtotime($track["playDate"]);
$existingListen = $this->fetchFromPostgREST("listens", "listened_at=eq.{$playDate}&limit=1");
return !empty($existingListen);
}
private function handleTrackScrobble(array $track): void
{
$artistData = $this->getOrCreateArtist($track["artist"]);
if (empty($artistData)) {
error_log("Failed to retrieve or create artist: " . $track["artist"]);
return;
}
$albumData = $this->getOrCreateAlbum($track["album"], $artistData);
if (empty($albumData)) {
error_log("Failed to retrieve or create album: " . $track["album"]);
return;
}
$this->insertListen($track, $albumData["key"]);
}
private function getOrCreateArtist(string $artistName): array
{
if (!$this->isDatabaseAvailable()) {
error_log("Skipping artist insert: database is unavailable.");
return [];
}
if (isset($this->artistCache[$artistName])) return $this->artistCache[$artistName];
$encodedArtist = rawurlencode($artistName);
$existingArtist = $this->fetchFromPostgREST("artists", "name_string=eq.{$encodedArtist}&limit=1");
if (!empty($existingArtist)) {
$this->artistCache[$artistName] = $existingArtist[0];
return $existingArtist[0];
}
$this->fetchFromPostgREST("artists", "", "POST", [
"mbid" => null,
"art" => "4cef75db-831f-4f5d-9333-79eaa5bb55ee",
"name" => $artistName,
"slug" => "/music",
"tentative" => true,
"total_plays" => 0
]);
$this->sendFailureEmail("New tentative artist record", "A new tentative artist record was inserted:\n\nArtist: $artistName");
$artistData = $this->fetchFromPostgREST("artists", "name_string=eq.{$encodedArtist}&limit=1");
$this->artistCache[$artistName] = $artistData[0] ?? [];
return $this->artistCache[$artistName];
}
private function getOrCreateAlbum(string $albumName, array $artistData): array
{
if (!$this->isDatabaseAvailable()) {
error_log("Skipping album insert: database is unavailable.");
return [];
}
$albumKey = $this->generateAlbumKey($artistData["name_string"], $albumName);
if (isset($this->albumCache[$albumKey])) return $this->albumCache[$albumKey];
$encodedAlbumKey = rawurlencode($albumKey);
$existingAlbum = $this->fetchFromPostgREST("albums", "key=eq.{$encodedAlbumKey}&limit=1");
if (!empty($existingAlbum)) {
$this->albumCache[$albumKey] = $existingAlbum[0];
return $existingAlbum[0];
}
$artistId = $artistData["id"] ?? null;
if (!$artistId) {
error_log("Artist ID missing for album creation: " . $albumName);
return [];
}
$this->fetchFromPostgREST("albums", "", "POST", [
"mbid" => null,
"art" => "4cef75db-831f-4f5d-9333-79eaa5bb55ee",
"key" => $albumKey,
"name" => $albumName,
"tentative" => true,
"total_plays" => 0,
"artist" => $artistId
]);
$this->sendFailureEmail("New tentative album record", "A new tentative album record was inserted:\n\nAlbum: $albumName\nKey: $albumKey");
$albumData = $this->fetchFromPostgREST("albums", "key=eq.{$encodedAlbumKey}&limit=1");
$this->albumCache[$albumKey] = $albumData[0] ?? [];
return $this->albumCache[$albumKey];
}
private function insertListen(array $track, string $albumKey): void
{
$playDate = strtotime($track["playDate"]);
$this->fetchFromPostgREST("listens", "", "POST", [
"artist_name" => $track["artist"],
"album_name" => $track["album"],
"track_name" => $track["title"],
"listened_at" => $playDate,
"album_key" => $albumKey
]);
}
private function generateAlbumKey(string $artistName, string $albumName): string
{
$artistKey = sanitizeMediaString($artistName);
$albumKey = sanitizeMediaString($albumName);
return "{$artistKey}-{$albumKey}";
}
private function sendFailureEmail(string $subject, string $message): void
{
if (!$this->isDatabaseAvailable()) {
error_log("Skipping email: database is unavailable.");
return;
}
$authHeader = "Basic " . base64_encode($this->forwardEmailApiKey . ":");
$client = new Client([
"base_uri" => "https://api.forwardemail.net/",
]);
try {
$response = $client->post("v1/emails", [
"headers" => [
"Authorization" => $authHeader,
"Content-Type" => "application/x-www-form-urlencoded",
],
"form_params" => [
"from" => "coryd.dev <hi@admin.coryd.dev>",
"to" => "hi@coryd.dev",
"subject" => $subject,
"text" => $message,
],
]);
} catch (\GuzzleHttp\Exception\RequestException $e) {
error_log("Request Exception: " . $e->getMessage());
if ($e->hasResponse()) {
$errorResponse = (string) $e->getResponse()->getBody();
error_log("Error Response: " . $errorResponse);
}
} catch (\Exception $e) {
error_log("General Exception: " . $e->getMessage());
}
}
private function isDatabaseAvailable(): bool
{
try {
$response = $this->fetchFromPostgREST("listens", "limit=1");
return is_array($response);
} catch (Exception $e) {
error_log("Database check failed: " . $e->getMessage());
return false;
}
}
}
try {
$handler = new NavidromeScrobbleHandler();
$handler->runScrobbleCheck();
} catch (Exception $e) {
http_response_code(500);
echo json_encode(["error" => $e->getMessage()]);
}

162
api/search.php Normal file
View file

@ -0,0 +1,162 @@
<?php
namespace App\Handlers;
require __DIR__ . "/Classes/BaseHandler.php";
use App\Classes\BaseHandler;
class SearchHandler extends BaseHandler
{
protected int $cacheTTL = 300;
public function __construct()
{
parent::__construct();
$this->initializeCache();
}
public function handleRequest(): void
{
try {
$query = $this->validateAndSanitizeQuery($_GET["q"] ?? null);
$types = $this->validateAndSanitizeTypes($_GET["type"] ?? "");
$page = isset($_GET["page"]) ? intval($_GET["page"]) : 1;
$pageSize = isset($_GET["pageSize"]) ? intval($_GET["pageSize"]) : 10;
$offset = ($page - 1) * $pageSize;
$cacheKey = $this->generateCacheKey($query, $types, $page, $pageSize);
$results = [];
$results =
$this->getCachedResults($cacheKey) ??
$this->fetchSearchResults($query, $types, $pageSize, $offset);
if (empty($results) || empty($results["data"])) {
$this->sendResponse(["results" => [], "total" => 0, "page" => $page, "pageSize" => $pageSize], 200);
return;
}
$this->cacheResults($cacheKey, $results);
$this->sendResponse(
[
"results" => $results["data"],
"total" => $results["total"],
"page" => $page,
"pageSize" => $pageSize,
],
200
);
} catch (Exception $e) {
error_log("Search API Error: " . $e->getMessage());
$this->sendErrorResponse("Invalid request. Please check your query and try again.", 400);
}
}
private function validateAndSanitizeQuery(?string $query): string
{
if (empty($query) || !is_string($query)) throw new Exception("Invalid 'q' parameter. Must be a non-empty string.");
$query = trim($query);
if (strlen($query) > 255) throw new Exception(
"Invalid 'q' parameter. Exceeds maximum length of 255 characters."
);
if (!preg_match('/^[a-zA-Z0-9\s\-_\'"]+$/', $query)) throw new Exception(
"Invalid 'q' parameter. Contains unsupported characters."
);
$query = preg_replace("/\s+/", " ", $query);
return $query;
}
private function validateAndSanitizeTypes(string $rawTypes): ?array
{
$allowedTypes = ["post", "artist", "genre", "book", "movie", "show"];
if (empty($rawTypes)) return null;
$types = array_map(
fn($type) => strtolower(
trim(htmlspecialchars($type, ENT_QUOTES, "UTF-8"))
),
explode(",", $rawTypes)
);
$invalidTypes = array_diff($types, $allowedTypes);
if (!empty($invalidTypes)) throw new Exception(
"Invalid 'type' parameter. Unsupported types: " .
implode(", ", $invalidTypes)
);
return $types;
}
private function fetchSearchResults(
string $query,
?array $types,
int $pageSize,
int $offset
): array {
$typesParam =
$types && count($types) > 0 ? "%7B" . implode(",", $types) . "%7D" : "";
$endpoint = "rpc/search_optimized_index";
$queryString =
"search_query=" .
urlencode($query) .
"&page_size={$pageSize}&page_offset={$offset}" .
($typesParam ? "&types={$typesParam}" : "");
$data = $this->makeRequest("GET", "{$endpoint}?{$queryString}");
$total = count($data) > 0 ? $data[0]["total_count"] : 0;
$results = array_map(function ($item) {
unset($item["total_count"]);
return $item;
}, $data);
return ["data" => $results, "total" => $total];
}
private function generateCacheKey(
string $query,
?array $types,
int $page,
int $pageSize
): string {
$typesKey = $types ? implode(",", $types) : "all";
return sprintf(
"search:%s:types:%s:page:%d:pageSize:%d",
md5($query),
$typesKey,
$page,
$pageSize
);
}
private function getCachedResults(string $cacheKey): ?array
{
if ($this->cache instanceof \Redis) {
$cachedData = $this->cache->get($cacheKey);
return $cachedData ? json_decode($cachedData, true) : null;
} elseif (is_array($this->cache)) {
return $this->cache[$cacheKey] ?? null;
}
return null;
}
private function cacheResults(string $cacheKey, array $results): void
{
if ($this->cache instanceof \Redis) {
$this->cache->set($cacheKey, json_encode($results));
$this->cache->expire($cacheKey, $this->cacheTTL);
} elseif (is_array($this->cache)) {
$this->cache[$cacheKey] = $results;
}
}
}
$handler = new SearchHandler();
$handler->handleRequest();

205
api/seasons-import.php Normal file
View file

@ -0,0 +1,205 @@
<?php
require __DIR__ . "/Classes/ApiHandler.php";
use App\Classes\ApiHandler;
use GuzzleHttp\Client;
class SeasonImportHandler extends ApiHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
private string $tmdbApiKey;
private string $seasonsImportToken;
public function __construct()
{
parent::__construct();
$this->ensureCliAccess();
$this->loadEnvironment();
$this->authenticateRequest();
}
private function loadEnvironment(): void
{
$this->postgrestUrl = getenv("POSTGREST_URL") ?: $_ENV["POSTGREST_URL"];
$this->postgrestApiKey = getenv("POSTGREST_API_KEY") ?: $_ENV["POSTGREST_API_KEY"];
$this->tmdbApiKey = getenv("TMDB_API_KEY") ?: $_ENV["TMDB_API_KEY"];
$this->seasonsImportToken = getenv("SEASONS_IMPORT_TOKEN") ?: $_ENV["SEASONS_IMPORT_TOKEN"];
}
private function authenticateRequest(): void
{
if ($_SERVER["REQUEST_METHOD"] !== "POST") {
http_response_code(405);
echo json_encode(["error" => "Method Not Allowed"]);
exit();
}
$authHeader = $_SERVER["HTTP_AUTHORIZATION"] ?? "";
if (!preg_match('/Bearer\s+(.+)/', $authHeader, $matches)) {
http_response_code(401);
echo json_encode(["error" => "Unauthorized"]);
exit();
}
$providedToken = trim($matches[1]);
if ($providedToken !== $this->seasonsImportToken) {
http_response_code(403);
echo json_encode(["error" => "Forbidden"]);
exit();
}
}
public function importSeasons(): void
{
$ongoingShows = $this->fetchOngoingShows();
if (empty($ongoingShows)) {
http_response_code(200);
echo json_encode(["message" => "No ongoing shows to update"]);
return;
}
foreach ($ongoingShows as $show) {
$this->processShowSeasons($show);
}
http_response_code(200);
echo json_encode(["message" => "Season import completed"]);
}
private function fetchOngoingShows(): array
{
return $this->fetchFromPostgREST("optimized_shows", "ongoing=eq.true", "GET");
}
private function processShowSeasons(array $show): void
{
$tmdbId = $show["tmdb_id"] ?? null;
$showId = $show["id"] ?? null;
if (!$tmdbId || !$showId) return;
$tmdbShowData = $this->fetchShowDetails($tmdbId);
$seasons = $tmdbShowData["seasons"] ?? [];
$status = $tmdbShowData["status"] ?? "Unknown";
if (empty($seasons) && !$this->shouldKeepOngoing($status)) {
$this->disableOngoingStatus($showId);
return;
}
foreach ($seasons as $season) {
$this->processSeasonEpisodes($showId, $tmdbId, $season);
}
}
private function shouldKeepOngoing(string $status): bool
{
$validStatuses = ["Returning Series", "In Production"];
return in_array($status, $validStatuses);
}
private function fetchShowDetails(string $tmdbId): array
{
$client = new Client();
$url = "https://api.themoviedb.org/3/tv/{$tmdbId}?api_key={$this->tmdbApiKey}&append_to_response=seasons";
try {
$response = $client->get($url, ["headers" => ["Accept" => "application/json"]]);
return json_decode($response->getBody(), true) ?? [];
} catch (\Exception $e) {
return [];
}
}
private function fetchWatchedEpisodes(int $showId): array
{
$watchedEpisodes = $this->fetchFromPostgREST(
"optimized_last_watched_episodes",
"show_id=eq.{$showId}&order=last_watched_at.desc&limit=1",
"GET"
);
if (empty($watchedEpisodes)) return [];
$lastWatched = $watchedEpisodes[0] ?? null;
if ($lastWatched) return [
"season_number" => (int) $lastWatched["season_number"],
"episode_number" => (int) $lastWatched["episode_number"]
];
return [];
}
private function processSeasonEpisodes(int $showId, string $tmdbId, array $season): void
{
$seasonNumber = $season["season_number"] ?? null;
if ($seasonNumber === null || $seasonNumber == 0) return;
$episodes = $this->fetchSeasonEpisodes($tmdbId, $seasonNumber);
if (empty($episodes)) return;
$watchedEpisodes = $this->fetchWatchedEpisodes($showId);
$lastWatchedSeason = $watchedEpisodes["season_number"] ?? null;
$lastWatchedEpisode = $watchedEpisodes["episode_number"] ?? null;
$scheduledEpisodes = $this->fetchFromPostgREST(
"optimized_scheduled_episodes",
"show_id=eq.{$showId}&season_number=eq.{$seasonNumber}",
"GET"
);
$scheduledEpisodeNumbers = array_column($scheduledEpisodes, "episode_number");
foreach ($episodes as $episode) {
$episodeNumber = $episode["episode_number"] ?? null;
if ($episodeNumber === null) continue;
if (in_array($episodeNumber, $scheduledEpisodeNumbers)) continue;
if ($lastWatchedSeason !== null && $seasonNumber < $lastWatchedSeason) return;
if ($seasonNumber == $lastWatchedSeason && $episodeNumber <= $lastWatchedEpisode) continue;
$this->addEpisodeToSchedule($showId, $seasonNumber, $episode);
}
}
private function fetchSeasonEpisodes(string $tmdbId, int $seasonNumber): array
{
$client = new Client();
$url = "https://api.themoviedb.org/3/tv/{$tmdbId}/season/{$seasonNumber}?api_key={$this->tmdbApiKey}";
try {
$response = $client->get($url, ["headers" => ["Accept" => "application/json"]]);
return json_decode($response->getBody(), true)["episodes"] ?? [];
} catch (\Exception $e) {
return [];
}
}
private function addEpisodeToSchedule(int $showId, int $seasonNumber, array $episode): void
{
$airDate = $episode["air_date"] ?? null;
if (!$airDate) return;
$currentDate = date("Y-m-d");
$status = ($airDate && $airDate < $currentDate) ? "aired" : "upcoming";
$payload = [
"show_id" => $showId,
"season_number" => $seasonNumber,
"episode_number" => $episode["episode_number"],
"air_date" => $airDate,
"status" => $status,
];
$this->fetchFromPostgREST("scheduled_episodes", "", "POST", $payload);
}
}
$handler = new SeasonImportHandler();
$handler->importSeasons();

176
api/watching-import.php Normal file
View file

@ -0,0 +1,176 @@
<?php
require __DIR__ . "/Classes/ApiHandler.php";
use App\Classes\ApiHandler;
use GuzzleHttp\Client;
class WatchingImportHandler extends ApiHandler
{
protected string $postgrestUrl;
protected string $postgrestApiKey;
private string $tmdbApiKey;
private string $tmdbImportToken;
public function __construct()
{
parent::__construct();
$this->ensureCliAccess();
$this->loadEnvironment();
}
private function loadEnvironment(): void
{
$this->postgrestUrl = $_ENV["POSTGREST_URL"] ?? getenv("POSTGREST_URL");
$this->postgrestApiKey =
$_ENV["POSTGREST_API_KEY"] ?? getenv("POSTGREST_API_KEY");
$this->tmdbApiKey = $_ENV["TMDB_API_KEY"] ?? getenv("TMDB_API_KEY");
$this->tmdbImportToken =
$_ENV["WATCHING_IMPORT_TOKEN"] ?? getenv("WATCHING_IMPORT_TOKEN");
}
public function handleRequest(): void
{
$input = json_decode(file_get_contents("php://input"), true);
if (!$input) $this->sendErrorResponse("Invalid or missing JSON body", 400);
$providedToken = $input["token"] ?? null;
if (!$providedToken || $providedToken !== $this->tmdbImportToken) $this->sendErrorResponse("Unauthorized access", 401);
$tmdbId = $input["tmdb_id"] ?? null;
$mediaType = $input["media_type"] ?? null;
if (!$tmdbId || !$mediaType) $this->sendErrorResponse("tmdb_id and media_type are required", 400);
try {
$mediaData = $this->fetchTMDBData($tmdbId, $mediaType);
$this->processMedia($mediaData, $mediaType);
$this->sendResponse("Media imported successfully", 200);
} catch (Exception $e) {
$this->sendErrorResponse("Error: " . $e->getMessage(), 500);
}
}
private function fetchTMDBData(string $tmdbId, string $mediaType): array
{
$client = new Client();
$url = "https://api.themoviedb.org/3/{$mediaType}/{$tmdbId}";
$response = $client->get($url, [
"query" => ["api_key" => $this->tmdbApiKey],
"headers" => ["Accept" => "application/json"],
]);
$data = json_decode($response->getBody(), true);
if (empty($data)) throw new Exception("No data found for TMDB ID: {$tmdbId}");
return $data;
}
private function processMedia(array $mediaData, string $mediaType): void
{
$id = $mediaData["id"];
$title = $mediaType === "movie" ? $mediaData["title"] : $mediaData["name"];
$year =
$mediaData["release_date"] ?? ($mediaData["first_air_date"] ?? null);
$year = $year ? substr($year, 0, 4) : null;
$description = $mediaData["overview"] ?? "";
$tags = array_map(
fn($genre) => strtolower(trim($genre["name"])),
$mediaData["genres"]
);
$slug =
$mediaType === "movie"
? "/watching/movies/{$id}"
: "/watching/shows/{$id}";
$payload = [
"title" => $title,
"year" => $year,
"description" => $description,
"tmdb_id" => $id,
"slug" => $slug,
];
$response = $this->fetchFromPostgREST(
$mediaType === "movie" ? "movies" : "shows",
"",
"POST",
$payload
);
if (empty($response["id"])) {
$queryResponse = $this->fetchFromPostgREST(
$mediaType === "movie" ? "movies" : "shows",
"tmdb_id=eq.{$id}",
"GET"
);
$response = $queryResponse[0] ?? [];
}
if (!empty($response["id"])) {
$mediaId = $response["id"];
$existingTagMap = $this->getTagIds($tags);
$updatedTagMap = $this->insertMissingTags($tags, $existingTagMap);
$this->associateTagsWithMedia(
$mediaType,
$mediaId,
array_values($updatedTagMap)
);
}
}
private function getTagIds(array $tags): array
{
$existingTagMap = [];
foreach ($tags as $tag) {
$query = "name=ilike." . urlencode($tag);
$existingTags = $this->fetchFromPostgREST("tags", $query, "GET");
if (!empty($existingTags[0]["id"])) $existingTagMap[strtolower($tag)] = $existingTags[0]["id"];
}
return $existingTagMap;
}
private function insertMissingTags(array $tags, array $existingTagMap): array
{
$newTags = array_diff($tags, array_keys($existingTagMap));
foreach ($newTags as $newTag) {
try {
$response = $this->fetchFromPostgREST("tags", "", "POST", [
"name" => $newTag,
]);
if (!empty($response["id"])) $existingTagMap[$newTag] = $response["id"];
} catch (Exception $e) {
$queryResponse = $this->fetchFromPostgREST(
"tags",
"name=eq.{$newTag}",
"GET"
);
if (!empty($queryResponse[0]["id"])) $existingTagMap[$newTag] = $queryResponse[0]["id"];
}
}
return $existingTagMap;
}
private function associateTagsWithMedia(
string $mediaType,
int $mediaId,
array $tagIds
): void {
$junctionTable = $mediaType === "movie" ? "movies_tags" : "shows_tags";
$mediaColumn = $mediaType === "movie" ? "movies_id" : "shows_id";
foreach ($tagIds as $tagId) {
$this->fetchFromPostgREST($junctionTable, "", "POST", [
$mediaColumn => $mediaId,
"tags_id" => $tagId,
]);
}
}
}
$handler = new WatchingImportHandler();
$handler->handleRequest();

28
composer.json Normal file
View file

@ -0,0 +1,28 @@
{
"name": "coryd/coryd-dev",
"description": "PHP APIs and server-rendered pages for my personal site.",
"type": "project",
"require": {
"php": "^8.1",
"guzzlehttp/guzzle": "^7.9",
"kaoken/markdown-it-php": "^14.1",
"sokil/php-isocodes": "^4.2",
"sokil/php-isocodes-db-only": "^4.0"
},
"scripts": {
"start": [
"@php -S localhost:8000 -t dist"
]
},
"autoload": {
"psr-4": {
"App\\": "src/"
}
},
"config": {
"optimize-autoloader": true,
"sort-packages": true
},
"minimum-stability": "stable",
"license": "MIT"
}

768
composer.lock generated Normal file
View file

@ -0,0 +1,768 @@
{
"_readme": [
"This file locks the dependencies of your project to a known state",
"Read more about it at https://getcomposer.org/doc/01-basic-usage.md#installing-dependencies",
"This file is @generated automatically"
],
"content-hash": "6f62ebb63bb51c04310e829e19beeab5",
"packages": [
{
"name": "guzzlehttp/guzzle",
"version": "7.9.3",
"source": {
"type": "git",
"url": "https://github.com/guzzle/guzzle.git",
"reference": "7b2f29fe81dc4da0ca0ea7d42107a0845946ea77"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/guzzle/guzzle/zipball/7b2f29fe81dc4da0ca0ea7d42107a0845946ea77",
"reference": "7b2f29fe81dc4da0ca0ea7d42107a0845946ea77",
"shasum": ""
},
"require": {
"ext-json": "*",
"guzzlehttp/promises": "^1.5.3 || ^2.0.3",
"guzzlehttp/psr7": "^2.7.0",
"php": "^7.2.5 || ^8.0",
"psr/http-client": "^1.0",
"symfony/deprecation-contracts": "^2.2 || ^3.0"
},
"provide": {
"psr/http-client-implementation": "1.0"
},
"require-dev": {
"bamarni/composer-bin-plugin": "^1.8.2",
"ext-curl": "*",
"guzzle/client-integration-tests": "3.0.2",
"php-http/message-factory": "^1.1",
"phpunit/phpunit": "^8.5.39 || ^9.6.20",
"psr/log": "^1.1 || ^2.0 || ^3.0"
},
"suggest": {
"ext-curl": "Required for CURL handler support",
"ext-intl": "Required for Internationalized Domain Name (IDN) support",
"psr/log": "Required for using the Log middleware"
},
"type": "library",
"extra": {
"bamarni-bin": {
"bin-links": true,
"forward-command": false
}
},
"autoload": {
"files": [
"src/functions_include.php"
],
"psr-4": {
"GuzzleHttp\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Graham Campbell",
"email": "hello@gjcampbell.co.uk",
"homepage": "https://github.com/GrahamCampbell"
},
{
"name": "Michael Dowling",
"email": "mtdowling@gmail.com",
"homepage": "https://github.com/mtdowling"
},
{
"name": "Jeremy Lindblom",
"email": "jeremeamia@gmail.com",
"homepage": "https://github.com/jeremeamia"
},
{
"name": "George Mponos",
"email": "gmponos@gmail.com",
"homepage": "https://github.com/gmponos"
},
{
"name": "Tobias Nyholm",
"email": "tobias.nyholm@gmail.com",
"homepage": "https://github.com/Nyholm"
},
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com",
"homepage": "https://github.com/sagikazarmark"
},
{
"name": "Tobias Schultze",
"email": "webmaster@tubo-world.de",
"homepage": "https://github.com/Tobion"
}
],
"description": "Guzzle is a PHP HTTP client library",
"keywords": [
"client",
"curl",
"framework",
"http",
"http client",
"psr-18",
"psr-7",
"rest",
"web service"
],
"support": {
"issues": "https://github.com/guzzle/guzzle/issues",
"source": "https://github.com/guzzle/guzzle/tree/7.9.3"
},
"funding": [
{
"url": "https://github.com/GrahamCampbell",
"type": "github"
},
{
"url": "https://github.com/Nyholm",
"type": "github"
},
{
"url": "https://tidelift.com/funding/github/packagist/guzzlehttp/guzzle",
"type": "tidelift"
}
],
"time": "2025-03-27T13:37:11+00:00"
},
{
"name": "guzzlehttp/promises",
"version": "2.2.0",
"source": {
"type": "git",
"url": "https://github.com/guzzle/promises.git",
"reference": "7c69f28996b0a6920945dd20b3857e499d9ca96c"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/guzzle/promises/zipball/7c69f28996b0a6920945dd20b3857e499d9ca96c",
"reference": "7c69f28996b0a6920945dd20b3857e499d9ca96c",
"shasum": ""
},
"require": {
"php": "^7.2.5 || ^8.0"
},
"require-dev": {
"bamarni/composer-bin-plugin": "^1.8.2",
"phpunit/phpunit": "^8.5.39 || ^9.6.20"
},
"type": "library",
"extra": {
"bamarni-bin": {
"bin-links": true,
"forward-command": false
}
},
"autoload": {
"psr-4": {
"GuzzleHttp\\Promise\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Graham Campbell",
"email": "hello@gjcampbell.co.uk",
"homepage": "https://github.com/GrahamCampbell"
},
{
"name": "Michael Dowling",
"email": "mtdowling@gmail.com",
"homepage": "https://github.com/mtdowling"
},
{
"name": "Tobias Nyholm",
"email": "tobias.nyholm@gmail.com",
"homepage": "https://github.com/Nyholm"
},
{
"name": "Tobias Schultze",
"email": "webmaster@tubo-world.de",
"homepage": "https://github.com/Tobion"
}
],
"description": "Guzzle promises library",
"keywords": [
"promise"
],
"support": {
"issues": "https://github.com/guzzle/promises/issues",
"source": "https://github.com/guzzle/promises/tree/2.2.0"
},
"funding": [
{
"url": "https://github.com/GrahamCampbell",
"type": "github"
},
{
"url": "https://github.com/Nyholm",
"type": "github"
},
{
"url": "https://tidelift.com/funding/github/packagist/guzzlehttp/promises",
"type": "tidelift"
}
],
"time": "2025-03-27T13:27:01+00:00"
},
{
"name": "guzzlehttp/psr7",
"version": "2.7.1",
"source": {
"type": "git",
"url": "https://github.com/guzzle/psr7.git",
"reference": "c2270caaabe631b3b44c85f99e5a04bbb8060d16"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/guzzle/psr7/zipball/c2270caaabe631b3b44c85f99e5a04bbb8060d16",
"reference": "c2270caaabe631b3b44c85f99e5a04bbb8060d16",
"shasum": ""
},
"require": {
"php": "^7.2.5 || ^8.0",
"psr/http-factory": "^1.0",
"psr/http-message": "^1.1 || ^2.0",
"ralouphie/getallheaders": "^3.0"
},
"provide": {
"psr/http-factory-implementation": "1.0",
"psr/http-message-implementation": "1.0"
},
"require-dev": {
"bamarni/composer-bin-plugin": "^1.8.2",
"http-interop/http-factory-tests": "0.9.0",
"phpunit/phpunit": "^8.5.39 || ^9.6.20"
},
"suggest": {
"laminas/laminas-httphandlerrunner": "Emit PSR-7 responses"
},
"type": "library",
"extra": {
"bamarni-bin": {
"bin-links": true,
"forward-command": false
}
},
"autoload": {
"psr-4": {
"GuzzleHttp\\Psr7\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Graham Campbell",
"email": "hello@gjcampbell.co.uk",
"homepage": "https://github.com/GrahamCampbell"
},
{
"name": "Michael Dowling",
"email": "mtdowling@gmail.com",
"homepage": "https://github.com/mtdowling"
},
{
"name": "George Mponos",
"email": "gmponos@gmail.com",
"homepage": "https://github.com/gmponos"
},
{
"name": "Tobias Nyholm",
"email": "tobias.nyholm@gmail.com",
"homepage": "https://github.com/Nyholm"
},
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com",
"homepage": "https://github.com/sagikazarmark"
},
{
"name": "Tobias Schultze",
"email": "webmaster@tubo-world.de",
"homepage": "https://github.com/Tobion"
},
{
"name": "Márk Sági-Kazár",
"email": "mark.sagikazar@gmail.com",
"homepage": "https://sagikazarmark.hu"
}
],
"description": "PSR-7 message implementation that also provides common utility methods",
"keywords": [
"http",
"message",
"psr-7",
"request",
"response",
"stream",
"uri",
"url"
],
"support": {
"issues": "https://github.com/guzzle/psr7/issues",
"source": "https://github.com/guzzle/psr7/tree/2.7.1"
},
"funding": [
{
"url": "https://github.com/GrahamCampbell",
"type": "github"
},
{
"url": "https://github.com/Nyholm",
"type": "github"
},
{
"url": "https://tidelift.com/funding/github/packagist/guzzlehttp/psr7",
"type": "tidelift"
}
],
"time": "2025-03-27T12:30:47+00:00"
},
{
"name": "kaoken/markdown-it-php",
"version": "14.1.0.0",
"source": {
"type": "git",
"url": "https://github.com/kaoken/markdown-it-php.git",
"reference": "938f2b6cf71e490f9cd77dce58e79a91df13e33b"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/kaoken/markdown-it-php/zipball/938f2b6cf71e490f9cd77dce58e79a91df13e33b",
"reference": "938f2b6cf71e490f9cd77dce58e79a91df13e33b",
"shasum": ""
},
"require": {
"ext-json": "*",
"ext-mbstring": "*",
"php": ">=7.4.0"
},
"require-dev": {
"ext-pthreads": "*",
"symfony/yaml": "5.0.*"
},
"type": "library",
"autoload": {
"psr-4": {
"Kaoken\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "kaoken",
"homepage": "https://github.com/kaoken/markdown-it-php"
}
],
"description": "PHP version makdown-it",
"keywords": [
"markdown",
"markdown-it"
],
"support": {
"issues": "https://github.com/kaoken/markdown-it-php/issues",
"source": "https://github.com/kaoken/markdown-it-php/tree/14.1.0.0"
},
"time": "2024-03-23T05:33:58+00:00"
},
{
"name": "psr/http-client",
"version": "1.0.3",
"source": {
"type": "git",
"url": "https://github.com/php-fig/http-client.git",
"reference": "bb5906edc1c324c9a05aa0873d40117941e5fa90"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-fig/http-client/zipball/bb5906edc1c324c9a05aa0873d40117941e5fa90",
"reference": "bb5906edc1c324c9a05aa0873d40117941e5fa90",
"shasum": ""
},
"require": {
"php": "^7.0 || ^8.0",
"psr/http-message": "^1.0 || ^2.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.0.x-dev"
}
},
"autoload": {
"psr-4": {
"Psr\\Http\\Client\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "PHP-FIG",
"homepage": "https://www.php-fig.org/"
}
],
"description": "Common interface for HTTP clients",
"homepage": "https://github.com/php-fig/http-client",
"keywords": [
"http",
"http-client",
"psr",
"psr-18"
],
"support": {
"source": "https://github.com/php-fig/http-client"
},
"time": "2023-09-23T14:17:50+00:00"
},
{
"name": "psr/http-factory",
"version": "1.1.0",
"source": {
"type": "git",
"url": "https://github.com/php-fig/http-factory.git",
"reference": "2b4765fddfe3b508ac62f829e852b1501d3f6e8a"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-fig/http-factory/zipball/2b4765fddfe3b508ac62f829e852b1501d3f6e8a",
"reference": "2b4765fddfe3b508ac62f829e852b1501d3f6e8a",
"shasum": ""
},
"require": {
"php": ">=7.1",
"psr/http-message": "^1.0 || ^2.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.0.x-dev"
}
},
"autoload": {
"psr-4": {
"Psr\\Http\\Message\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "PHP-FIG",
"homepage": "https://www.php-fig.org/"
}
],
"description": "PSR-17: Common interfaces for PSR-7 HTTP message factories",
"keywords": [
"factory",
"http",
"message",
"psr",
"psr-17",
"psr-7",
"request",
"response"
],
"support": {
"source": "https://github.com/php-fig/http-factory"
},
"time": "2024-04-15T12:06:14+00:00"
},
{
"name": "psr/http-message",
"version": "2.0",
"source": {
"type": "git",
"url": "https://github.com/php-fig/http-message.git",
"reference": "402d35bcb92c70c026d1a6a9883f06b2ead23d71"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/php-fig/http-message/zipball/402d35bcb92c70c026d1a6a9883f06b2ead23d71",
"reference": "402d35bcb92c70c026d1a6a9883f06b2ead23d71",
"shasum": ""
},
"require": {
"php": "^7.2 || ^8.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "2.0.x-dev"
}
},
"autoload": {
"psr-4": {
"Psr\\Http\\Message\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "PHP-FIG",
"homepage": "https://www.php-fig.org/"
}
],
"description": "Common interface for HTTP messages",
"homepage": "https://github.com/php-fig/http-message",
"keywords": [
"http",
"http-message",
"psr",
"psr-7",
"request",
"response"
],
"support": {
"source": "https://github.com/php-fig/http-message/tree/2.0"
},
"time": "2023-04-04T09:54:51+00:00"
},
{
"name": "ralouphie/getallheaders",
"version": "3.0.3",
"source": {
"type": "git",
"url": "https://github.com/ralouphie/getallheaders.git",
"reference": "120b605dfeb996808c31b6477290a714d356e822"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/ralouphie/getallheaders/zipball/120b605dfeb996808c31b6477290a714d356e822",
"reference": "120b605dfeb996808c31b6477290a714d356e822",
"shasum": ""
},
"require": {
"php": ">=5.6"
},
"require-dev": {
"php-coveralls/php-coveralls": "^2.1",
"phpunit/phpunit": "^5 || ^6.5"
},
"type": "library",
"autoload": {
"files": [
"src/getallheaders.php"
]
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Ralph Khattar",
"email": "ralph.khattar@gmail.com"
}
],
"description": "A polyfill for getallheaders.",
"support": {
"issues": "https://github.com/ralouphie/getallheaders/issues",
"source": "https://github.com/ralouphie/getallheaders/tree/develop"
},
"time": "2019-03-08T08:55:37+00:00"
},
{
"name": "sokil/php-isocodes",
"version": "4.2.1",
"source": {
"type": "git",
"url": "https://github.com/sokil/php-isocodes.git",
"reference": "6f2b7fb168840983c74804e7f5cb59cfc427bbbd"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/sokil/php-isocodes/zipball/6f2b7fb168840983c74804e7f5cb59cfc427bbbd",
"reference": "6f2b7fb168840983c74804e7f5cb59cfc427bbbd",
"shasum": ""
},
"require": {
"ext-json": "*",
"php": ">=7.1"
},
"require-dev": {
"ext-gettext": "*",
"infection/infection": ">=0.11.5",
"php-coveralls/php-coveralls": "^2.1",
"phpmd/phpmd": "@stable",
"phpunit/phpunit": ">=7.5.20",
"sokil/php-isocodes-db-i18n": "^4.0.0",
"squizlabs/php_codesniffer": "^3.4",
"symfony/translation": "^4.4.17|^5.2",
"vimeo/psalm": "^4.3"
},
"suggest": {
"ext-gettext": "Required for gettext translation driver",
"phpbench/phpbench": "Required to run benchmarks",
"sokil/php-isocodes-db-i18n": "If frequent database updates is not necessary, and database with localization is required.",
"sokil/php-isocodes-db-only": "If frequent database updates is not necessary, and only database without localization is required.",
"symfony/translation": "Translation driver by Symfont project"
},
"type": "library",
"autoload": {
"psr-4": {
"Sokil\\IsoCodes\\": [
"src/"
]
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Dmytro Sokil",
"email": "dmytro.sokil@gmail.com"
}
],
"description": "ISO country, subdivision, language, currency and script definitions and their translations. Based on pythons pycountry and Debian's iso-codes.",
"support": {
"issues": "https://github.com/sokil/php-isocodes/issues",
"source": "https://github.com/sokil/php-isocodes/tree/4.2.1"
},
"time": "2024-12-11T09:35:28+00:00"
},
{
"name": "sokil/php-isocodes-db-only",
"version": "4.0.7",
"source": {
"type": "git",
"url": "https://github.com/sokil/php-isocodes-db-only.git",
"reference": "4b8978dea994de6b03fe892108248a914e42b3a5"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/sokil/php-isocodes-db-only/zipball/4b8978dea994de6b03fe892108248a914e42b3a5",
"reference": "4b8978dea994de6b03fe892108248a914e42b3a5",
"shasum": ""
},
"require": {
"ext-json": "*",
"php": ">=7.1"
},
"require-dev": {
"phpunit/phpunit": "^9.5",
"sokil/php-isocodes": "^4.1.1"
},
"type": "library",
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Dmytro Sokil",
"email": "dmytro.sokil@gmail.com"
}
],
"description": "Database for ISO country, subdivision, language, currency and script definitions and their translations. Based on pythons pycountry and Debian's iso-codes.",
"support": {
"issues": "https://github.com/sokil/php-isocodes-db-only/issues",
"source": "https://github.com/sokil/php-isocodes-db-only/tree/4.0.7"
},
"time": "2024-02-02T08:24:43+00:00"
},
{
"name": "symfony/deprecation-contracts",
"version": "v3.5.1",
"source": {
"type": "git",
"url": "https://github.com/symfony/deprecation-contracts.git",
"reference": "74c71c939a79f7d5bf3c1ce9f5ea37ba0114c6f6"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/symfony/deprecation-contracts/zipball/74c71c939a79f7d5bf3c1ce9f5ea37ba0114c6f6",
"reference": "74c71c939a79f7d5bf3c1ce9f5ea37ba0114c6f6",
"shasum": ""
},
"require": {
"php": ">=8.1"
},
"type": "library",
"extra": {
"thanks": {
"url": "https://github.com/symfony/contracts",
"name": "symfony/contracts"
},
"branch-alias": {
"dev-main": "3.5-dev"
}
},
"autoload": {
"files": [
"function.php"
]
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Nicolas Grekas",
"email": "p@tchwork.com"
},
{
"name": "Symfony Community",
"homepage": "https://symfony.com/contributors"
}
],
"description": "A generic function and convention to trigger deprecation notices",
"homepage": "https://symfony.com",
"support": {
"source": "https://github.com/symfony/deprecation-contracts/tree/v3.5.1"
},
"funding": [
{
"url": "https://symfony.com/sponsor",
"type": "custom"
},
{
"url": "https://github.com/fabpot",
"type": "github"
},
{
"url": "https://tidelift.com/funding/github/packagist/symfony/symfony",
"type": "tidelift"
}
],
"time": "2024-09-25T14:20:29+00:00"
}
],
"packages-dev": [],
"aliases": [],
"minimum-stability": "stable",
"stability-flags": {},
"prefer-stable": false,
"prefer-lowest": false,
"platform": {
"php": "^8.1"
},
"platform-dev": {},
"plugin-api-version": "2.6.0"
}

View file

@ -0,0 +1,41 @@
import ics from "ics";
export const albumReleasesCalendar = (collection) => {
const collectionData = collection.getAll()[0];
const { data } = collectionData;
const { albumReleases: { all }, globals: { url } } = data;
if (!all || all.length === 0) return "";
const events = all
.map((album) => {
const date = new Date(album.release_date);
if (isNaN(date.getTime())) return null;
const albumUrl = album.url?.includes("http") ? album.url : `${url}${album.url}`;
const artistUrl = album.artist.url?.includes("http") ? album.artust.url : `${url}${album.artist.url}`;
return {
start: [date.getFullYear(), date.getMonth() + 1, date.getDate()],
startInputType: "local",
startOutputType: "local",
title: `Release: ${album.artist.name} - ${album.title}`,
description: `Check out this new album release: ${albumUrl}. Read more about ${album.artist.name} at ${artistUrl}`,
url: albumUrl,
uid: `${album.release_timestamp}-${album.artist.name}-${album.title}`,
};
})
.filter((event) => event !== null);
const { error, value } = ics.createEvents(events, {
calName: "Album releases calendar • coryd.dev",
});
if (error) {
console.error("Error creating events: ", error);
return "";
}
return value;
};

View file

@ -0,0 +1,31 @@
import fs from "fs";
import path from "path";
import { minify } from "terser";
export const minifyJsComponents = async () => {
const scriptsDir = "dist/assets/scripts";
const minifyJsFilesInDir = async (dir) => {
const files = fs.readdirSync(dir);
for (const fileName of files) {
const filePath = path.join(dir, fileName);
const stat = fs.statSync(filePath);
if (stat.isDirectory()) {
await minifyJsFilesInDir(filePath);
} else if (fileName.endsWith(".js")) {
const fileContent = fs.readFileSync(filePath, "utf8");
const minified = await minify(fileContent);
if (minified.error) {
console.error(`Error minifying ${filePath}:`, minified.error);
} else {
fs.writeFileSync(filePath, minified.code);
}
} else {
console.log(`No .js files to minify in ${filePath}`);
}
}
};
await minifyJsFilesInDir(scriptsDir);
};

21
config/filters/dates.js Normal file
View file

@ -0,0 +1,21 @@
export default {
stringToRFC822Date: (dateString) => {
const date = new Date(dateString);
if (isNaN(date.getTime())) return "";
const options = {
timeZone: "America/Los_Angeles",
weekday: "short",
day: "2-digit",
month: "short",
year: "numeric",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
timeZoneName: "short",
};
return new Intl.DateTimeFormat("en-US", options).format(date);
},
};

28
config/filters/feeds.js Normal file
View file

@ -0,0 +1,28 @@
import { JSDOM } from "jsdom";
export default {
convertRelativeLinks: (htmlContent, domain) => {
if (!htmlContent || !domain) return htmlContent;
const dom = new JSDOM(htmlContent);
const document = dom.window.document;
document.querySelectorAll("a[href]").forEach(link => {
let href = link.getAttribute("href");
if (href.startsWith("#")) {
link.remove();
return;
}
if (!href.startsWith("http://") && !href.startsWith("https://"))
link.setAttribute("href", `${domain.replace(/\/$/, '')}/${href.replace(/^\/+/, '')}`);
});
return document.body.innerHTML;
},
generatePermalink: (url, baseUrl) => {
if (url?.includes("http") || !baseUrl) return url
return `${baseUrl}${url}`
}
}

24
config/filters/general.js Normal file
View file

@ -0,0 +1,24 @@
import truncateHtml from "truncate-html";
import { shuffleArray } from "../utilities/index.js";
export default {
encodeAmp: (string) => {
if (!string) return;
const pattern = /&(?!(?:[a-zA-Z]+|#[0-9]+|#x[0-9a-fA-F]+);)/g;
const replacement = "&amp;";
return string.replace(pattern, replacement);
},
replaceQuotes: (string) => string.replace(/"/g, "&quot;"),
htmlTruncate: (content, limit = 50) =>
truncateHtml(content, limit, {
byWords: true,
ellipsis: "...",
}),
shuffleArray,
pluralize: (count, string, trailing) => {
const countStr = String(count).replace(/,/g, "");
if (parseInt(countStr, 10) === 1) return string;
return `${string}s${trailing ? `${trailing}` : ''}`;
},
jsonEscape: (string) => JSON.stringify(string),
};

13
config/filters/index.js Normal file
View file

@ -0,0 +1,13 @@
import dates from "./dates.js";
import feeds from "./feeds.js"
import general from "./general.js";
import media from "./media.js";
import navigation from "./navigation.js";
export default {
...dates,
...feeds,
...general,
...media,
...navigation,
};

43
config/filters/media.js Normal file
View file

@ -0,0 +1,43 @@
export default {
filterBooksByStatus: (books, status) =>
books.filter((book) => book.status === status),
findFavoriteBooks: (books) =>
books.filter((book) => book.favorite === true),
bookYearLinks: (years) =>
years
.sort((a, b) => b.value - a.value)
.map(
(year, index) =>
`<a href="/books/years/${year.value}">${year.value}</a>${
index < years.length - 1 ? " • " : ""
}`
)
.join(""),
mediaLinks: (data, type, count = 10) => {
if (!data || !type) return "";
const dataSlice = data.slice(0, count);
if (dataSlice.length === 0) return null;
const buildLink = (item) => {
switch (type) {
case "genre":
return `<a href="${item.genre_url}">${item.genre_name}</a>`;
case "artist":
return `<a href="${item.url}">${item.name}</a>`;
case "book":
return `<a href="${item.url}">${item.title}</a>`;
default:
return "";
}
};
if (dataSlice.length === 1) return buildLink(dataSlice[0]);
const links = dataSlice.map(buildLink);
const allButLast = links.slice(0, -1).join(", ");
const last = links[links.length - 1];
return `${allButLast} and ${last}`;
},
};

View file

@ -0,0 +1,5 @@
export default {
isLinkActive: (category, page) =>
page.includes(category) &&
page.split("/").filter((a) => a !== "").length <= 1,
};

View file

@ -0,0 +1,33 @@
import fs from "node:fs/promises";
import path from "node:path";
import postcss from "postcss";
import postcssImport from "postcss-import";
import postcssImportExtGlob from "postcss-import-ext-glob";
import autoprefixer from "autoprefixer";
import cssnano from "cssnano";
export const cssConfig = (eleventyConfig) => {
eleventyConfig.addTemplateFormats("css");
eleventyConfig.addExtension("css", {
outputFileExtension: "css",
compile: async (inputContent, inputPath) => {
const outputPath = "dist/assets/css/index.css";
if (inputPath.endsWith("index.css")) {
return async () => {
let result = await postcss([
postcssImportExtGlob,
postcssImport,
autoprefixer,
cssnano,
]).process(inputContent, { from: inputPath });
await fs.mkdir(path.dirname(outputPath), { recursive: true });
await fs.writeFile(outputPath, result.css);
return result.css;
};
}
},
});
};

4
config/plugins/index.js Normal file
View file

@ -0,0 +1,4 @@
import { cssConfig } from "./css-config.js";
import { markdownLib } from "./markdown.js";
export default { cssConfig, markdownLib };

View file

@ -0,0 +1,25 @@
import markdownIt from "markdown-it";
import markdownItAnchor from "markdown-it-anchor";
import markdownItFootnote from "markdown-it-footnote";
import markdownItLinkAttributes from "markdown-it-link-attributes";
import markdownItPrism from "markdown-it-prism";
export const markdownLib = markdownIt({ html: true, linkify: true })
.use(markdownItAnchor, {
level: [1, 2],
permalink: markdownItAnchor.permalink.headerLink({
safariReaderFix: true,
}),
})
.use(markdownItLinkAttributes, [
{
matcher(href) {
return href.match(/^https?:\/\//);
},
attrs: {
rel: "noopener",
},
},
])
.use(markdownItFootnote)
.use(markdownItPrism);

10
config/utilities/index.js Normal file
View file

@ -0,0 +1,10 @@
export const shuffleArray = (array) => {
const shuffled = [...array];
for (let i = shuffled.length - 1; i > 0; i--) {
let j = Math.floor(Math.random() * (i + 1));
let temp = shuffled[i];
shuffled[i] = shuffled[j];
shuffled[j] = temp;
}
return shuffled;
};

59
eleventy.config.js Normal file
View file

@ -0,0 +1,59 @@
import { createRequire } from "module";
import "dotenv/config";
import filters from "./config/filters/index.js";
import tablerIcons from "@cdransf/eleventy-plugin-tabler-icons";
import { minifyJsComponents } from "./config/events/minify-js.js";
import { albumReleasesCalendar } from "./config/collections/index.js";
import plugins from "./config/plugins/index.js";
const require = createRequire(import.meta.url);
const appVersion = require("./package.json").version;
export default async function (eleventyConfig) {
eleventyConfig.addPlugin(tablerIcons);
eleventyConfig.addPlugin(plugins.cssConfig);
eleventyConfig.setQuietMode(true);
eleventyConfig.configureErrorReporting({ allowMissingExtensions: true });
eleventyConfig.setLiquidOptions({ jsTruthy: true });
eleventyConfig.watchIgnores.add("queries/**");
eleventyConfig.addPassthroughCopy("src/assets");
eleventyConfig.addPassthroughCopy("api");
eleventyConfig.addPassthroughCopy("vendor");
eleventyConfig.addPassthroughCopy("server");
eleventyConfig.addPassthroughCopy({
"node_modules/minisearch/dist/umd/index.js":
"assets/scripts/components/minisearch.js",
"node_modules/youtube-video-element/dist/youtube-video-element.js":
"assets/scripts/components/youtube-video-element.js",
});
eleventyConfig.addCollection("albumReleasesCalendar", albumReleasesCalendar);
eleventyConfig.setLibrary("md", plugins.markdownLib);
eleventyConfig.addLiquidFilter("markdown", (content) => {
if (!content) return;
return plugins.markdownLib.render(content);
});
Object.keys(filters).forEach((filterName) => {
eleventyConfig.addLiquidFilter(filterName, filters[filterName]);
});
eleventyConfig.addShortcode("appVersion", () => appVersion);
eleventyConfig.on("afterBuild", minifyJsComponents);
return {
dir: {
input: "src",
includes: "includes",
layouts: "layouts",
data: "data",
output: "dist",
}
};
}

15
nixpacks.toml Normal file
View file

@ -0,0 +1,15 @@
[phases.setup]
aptPkgs = [
"curl",
"wget",
"zip",
"unzip",
"php-cli",
"php-mbstring",
"openssh-client",
"rsync",
"jq"
]
cmds = [
"curl -sS https://getcomposer.org/installer | php && mv composer.phar /usr/bin/composer && chmod +x /usr/bin/composer",
]

5216
package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

57
package.json Normal file
View file

@ -0,0 +1,57 @@
{
"name": "coryd.dev",
"version": "1.0.0",
"description": "The source for my personal site. Built using 11ty (and other tools).",
"type": "module",
"engines": {
"node": "22.x"
},
"scripts": {
"start": "eleventy --serve",
"start:quick": "eleventy --serve --incremental --ignore-initial",
"build": "eleventy",
"debug": "DEBUG=Eleventy* npx @11ty/eleventy --serve",
"php": "export $(grep -v '^#' .env | xargs) && php -d error_reporting=E_ALL^E_DEPRECATED -S localhost:8000 -t dist",
"update:deps": "composer update && npm upgrade && ncu",
"setup": "sh ./scripts/setup.sh",
"clean": "rimraf dist .cache",
"clean:cache": "rimraf .cache",
"clean:dist": "rimraf dist"
},
"keywords": [
"11ty",
"Eleventy",
"Web components",
"Blog",
"Directus",
"PHP",
"API"
],
"author": "Cory Dransfeldt",
"license": "MIT",
"dependencies": {
"minisearch": "^7.1.2",
"youtube-video-element": "^1.5.1"
},
"devDependencies": {
"@11ty/eleventy": "v3.0.0",
"@11ty/eleventy-fetch": "5.0.2",
"@cdransf/eleventy-plugin-tabler-icons": "^2.11.0",
"autoprefixer": "^10.4.21",
"cssnano": "^7.0.6",
"dotenv": "16.4.7",
"ics": "^3.8.1",
"jsdom": "26.0.0",
"markdown-it": "^14.1.0",
"markdown-it-anchor": "^9.2.0",
"markdown-it-footnote": "^4.0.0",
"markdown-it-link-attributes": "4.0.1",
"markdown-it-prism": "^2.3.1",
"postcss": "^8.5.3",
"postcss-import": "^16.1.0",
"postcss-import-ext-glob": "^2.1.1",
"rimraf": "^6.0.1",
"terser": "^5.39.0",
"truncate-html": "^1.2.1"
}
}

View file

@ -0,0 +1,27 @@
CREATE OR REPLACE FUNCTION get_feed_data(feed_key TEXT)
RETURNS JSON AS $$
DECLARE
result JSON;
sql_query TEXT;
BEGIN
CASE feed_key
WHEN 'movies' THEN
sql_query := 'SELECT json_agg(feed ORDER BY (feed->>''date'')::timestamp DESC) FROM optimized_movies WHERE feed IS NOT NULL';
WHEN 'books' THEN
sql_query := 'SELECT json_agg(feed ORDER BY (feed->>''date'')::timestamp DESC) FROM optimized_books WHERE feed IS NOT NULL';
WHEN 'posts' THEN
sql_query := 'SELECT json_agg(feed ORDER BY (feed->>''date'')::timestamp DESC) FROM optimized_posts WHERE feed IS NOT NULL';
WHEN 'links' THEN
sql_query := 'SELECT json_agg(feed ORDER BY (feed->>''date'')::timestamp DESC) FROM optimized_links WHERE feed IS NOT NULL';
WHEN 'allActivity' THEN
sql_query := 'SELECT json_agg(feed ORDER BY (feed->>''date'')::timestamp DESC) FROM optimized_all_activity WHERE feed IS NOT NULL';
WHEN 'syndication' THEN
sql_query := 'SELECT json_agg(feed ORDER BY (feed->>''date'')::timestamp DESC) FROM optimized_syndication WHERE feed IS NOT NULL';
ELSE
RETURN NULL;
END CASE;
EXECUTE sql_query INTO result;
RETURN result;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,14 @@
DECLARE
delimiters TEXT[] := ARRAY[',', '/', '&', 'and'];
countries TEXT[];
result TEXT := '';
BEGIN
countries := string_to_array(countryField, ',');
FOR i IN 1..array_length(delimiters, 1) LOOP
countries := array_cat(countries, string_to_array(result, delimiters[i]));
END LOOP;
result := array_to_string(countries, ' ');
RETURN trim(result);
END

View file

@ -0,0 +1,42 @@
CREATE OR REPLACE FUNCTION public.search_optimized_index(search_query text, page_size integer, page_offset integer, types text[])
RETURNS TABLE(
result_id integer,
url text,
title text,
description text,
tags text,
genre_name text,
genre_url text,
type text,
total_plays text,
rank real,
total_count bigint
)
AS $$
BEGIN
RETURN QUERY
SELECT
s.id::integer AS result_id,
s.url,
s.title,
s.description,
array_to_string(s.tags, ', ') AS tags,
s.genre_name,
s.genre_url,
s.type,
s.total_plays,
ts_rank_cd(to_tsvector('english', s.title || ' ' || s.description || array_to_string(s.tags, ' ')), plainto_tsquery('english', search_query)) AS rank,
COUNT(*) OVER() AS total_count
FROM
optimized_search_index s
WHERE(types IS NULL
OR s.type = ANY(types))
AND plainto_tsquery('english', search_query) @@ to_tsvector('english', s.title || ' ' || s.description || array_to_string(s.tags, ' '))
ORDER BY
s.type = 'post' DESC,
s.content_date DESC NULLS LAST,
rank DESC
LIMIT page_size OFFSET page_offset;
END;
$$
LANGUAGE plpgsql;

View file

@ -0,0 +1,4 @@
SELECT lower(regexp_replace(
unaccent(regexp_replace($1, '[^\\w\\s-]', '', 'g')),
'\\s+', '-', 'g'
));

View file

@ -0,0 +1,5 @@
BEGIN
UPDATE listens
SET album_key = new_album_key
WHERE album_key = old_album_key;
END;

View file

@ -0,0 +1,26 @@
CREATE OR REPLACE FUNCTION update_days_read()
RETURNS TRIGGER AS $$
DECLARE
pacific_now TIMESTAMPTZ;
pacific_today DATE;
last_read DATE;
BEGIN
SELECT (NOW() AT TIME ZONE 'America/Los_Angeles')::DATE INTO pacific_today;
SELECT COALESCE(last_read_date::DATE, pacific_today - INTERVAL '1 day') INTO last_read FROM reading_streak LIMIT 1;
IF last_read < pacific_today - INTERVAL '1 day' THEN
UPDATE reading_streak
SET days_read = 0, last_read_date = NOW() AT TIME ZONE 'America/Los_Angeles'
WHERE id = 1;
END IF;
IF last_read IS DISTINCT FROM pacific_today THEN
UPDATE reading_streak
SET days_read = days_read + 1, last_read_date = NOW() AT TIME ZONE 'America/Los_Angeles'
WHERE id = 1;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,36 @@
BEGIN
WITH artist_plays AS (
SELECT artist_name, COUNT(*)::integer as total_plays
FROM listens
GROUP BY artist_name
)
UPDATE artists
SET total_plays = COALESCE(ap.total_plays, 0)
FROM artist_plays ap
WHERE artists.name_string = ap.artist_name;
WITH album_plays AS (
SELECT album_key, artist_name, COUNT(*)::integer as total_plays
FROM listens
GROUP BY album_key, artist_name
)
UPDATE albums
SET total_plays = COALESCE(ap.total_plays, 0)
FROM album_plays ap
WHERE albums.key = ap.album_key
AND albums.artist_name = ap.artist_name;
WITH genre_plays AS (
SELECT g.id, COUNT(*)::integer as total_plays
FROM listens l
JOIN artists a ON l.artist_name = a.name_string
JOIN genres g ON a.genres::text = g.id::text
GROUP BY g.id
)
UPDATE genres
SET total_plays = COALESCE(gp.total_plays, 0)
FROM genre_plays gp
WHERE genres.id = gp.id;
RAISE NOTICE 'All listen totals are up to date';
END;

View file

@ -0,0 +1,11 @@
CREATE OR REPLACE FUNCTION update_scheduled_episode_status()
RETURNS TRIGGER AS $$
BEGIN
UPDATE scheduled_episodes
SET status = 'aired'
WHERE air_date < CURRENT_DATE
AND status = 'upcoming';
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,12 @@
CREATE OR REPLACE FUNCTION update_scheduled_on_watch()
RETURNS TRIGGER AS $$
BEGIN
UPDATE scheduled_episodes
SET status = 'watched'
WHERE show_id = NEW.show
AND season_number = NEW.season_number
AND episode_number = NEW.episode_number;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,6 @@
SELECT cron.schedule(
'0 0 * * *',
$$ UPDATE scheduled_episodes
SET status = 'aired'
WHERE air_date < CURRENT_DATE
AND status = 'upcoming' $$);

View file

@ -0,0 +1,16 @@
SELECT
l.artist_name,
l.album_name,
TO_CHAR(COUNT(l.id), 'FM999,999,999') AS total_listens
FROM
optimized_listens l
WHERE
EXTRACT(YEAR FROM TO_TIMESTAMP(l.listened_at)) = EXTRACT(YEAR FROM CURRENT_DATE)
AND l.artist_name IS NOT NULL
AND l.album_name IS NOT NULL
GROUP BY
l.artist_name,
l.album_name
ORDER BY
COUNT(l.id) DESC
LIMIT 10;

View file

@ -0,0 +1,13 @@
SELECT
l.artist_name,
TO_CHAR(COUNT(l.id), 'FM999,999,999') AS total_listens
FROM
optimized_listens l
WHERE
EXTRACT(YEAR FROM TO_TIMESTAMP(l.listened_at)) = EXTRACT(YEAR FROM CURRENT_DATE)
AND l.artist_name IS NOT NULL
GROUP BY
l.artist_name
ORDER BY
COUNT(l.id) DESC
LIMIT 10;

View file

@ -0,0 +1,20 @@
BEGIN
UPDATE artists
SET total_plays = total_plays - 1
WHERE name_string = OLD.artist_name;
UPDATE albums
SET total_plays = total_plays - 1
WHERE name = OLD.album_name
AND artist_name = OLD.artist_name;
UPDATE genres
SET total_plays = total_plays - 1
WHERE id = (
SELECT genres
FROM artists
WHERE name_string = OLD.artist_name
);
RETURN OLD;
END;

View file

@ -0,0 +1,4 @@
CREATE TRIGGER mark_scheduled_as_watched
AFTER INSERT ON episodes
FOR EACH ROW
EXECUTE FUNCTION update_scheduled_on_watch();

View file

@ -0,0 +1,5 @@
CREATE TRIGGER trigger_update_days_read
AFTER UPDATE OF progress ON books
FOR EACH ROW
WHEN (OLD.progress IS DISTINCT FROM NEW.progress AND (NEW.read_status = 'started' OR NEW.read_status = 'finished'))
EXECUTE FUNCTION update_days_read();

View file

@ -0,0 +1,10 @@
CREATE OR REPLACE FUNCTION update_scheduled_episode_status()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.air_date < CURRENT_DATE AND NEW.status = 'upcoming' THEN
NEW.status := 'aired';
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,20 @@
BEGIN
UPDATE artists
SET total_plays = total_plays + 1
WHERE name_string = NEW.artist_name;
UPDATE albums
SET total_plays = total_plays + 1
WHERE key = NEW.album_key
AND artist_name = NEW.artist_name;
UPDATE genres
SET total_plays = total_plays + 1
WHERE id = (
SELECT genres
FROM artists
WHERE name_string = NEW.artist_name
);
RETURN NEW;
END;

View file

@ -0,0 +1,11 @@
CREATE OR REPLACE VIEW optimized_blogroll AS
SELECT
name,
url,
rss_feed,
json_feed,
newsletter,
mastodon
FROM authors
WHERE blogroll = true
ORDER BY LOWER(unaccent(name)) ASC;

View file

@ -0,0 +1,30 @@
CREATE OR REPLACE VIEW optimized_links AS
SELECT
l.id,
l.title,
l.date,
l.description,
l.link,
a.mastodon,
a.name,
json_build_object('name', a.name, 'url', a.url, 'mastodon', a.mastodon) AS author,
'link' AS type,
(
SELECT array_agg(t.name)
FROM links_tags lt
LEFT JOIN tags t ON lt.tags_id = t.id
WHERE lt.links_id = l.id
) AS tags,
json_build_object(
'title', CONCAT(l.title, ' via ', a.name),
'url', l.link,
'description', l.description,
'date', l.date
) AS feed
FROM
links l
JOIN authors a ON l.author = a.id
GROUP BY
l.id, l.title, l.date, l.description, l.link, a.mastodon, a.name, a.url
ORDER BY
l.date DESC;

View file

@ -0,0 +1,126 @@
CREATE OR REPLACE VIEW optimized_posts AS
SELECT
p.id,
p.date,
p.title,
p.description,
p.content,
p.featured,
p.slug AS url,
p.mastodon_url,
CASE WHEN df.filename_disk IS NOT NULL
AND df.filename_disk != ''
AND df.filename_disk != '/' THEN
CONCAT('/', df.filename_disk)
ELSE
NULL
END AS image,
p.image_alt,
CASE WHEN EXTRACT(YEAR FROM AGE(CURRENT_DATE, p.date)) > 3 THEN
TRUE
ELSE
FALSE
END AS old_post,
(
SELECT
json_agg(
CASE WHEN pb.collection = 'youtube_player' THEN
json_build_object('type', pb.collection, 'url', yp.url)
WHEN pb.collection = 'github_banner' THEN
json_build_object('type', pb.collection, 'url', gb.url)
WHEN pb.collection = 'npm_banner' THEN
json_build_object('type', pb.collection, 'url', nb.url, 'command', nb.command)
WHEN pb.collection = 'rss_banner' THEN
json_build_object('type', pb.collection, 'url', rb.url, 'text', rb.text)
WHEN pb.collection = 'calendar_banner' THEN
json_build_object('type', pb.collection, 'url', cb.url, 'text', cb.text)
WHEN pb.collection = 'hero' THEN
json_build_object('type', pb.collection, 'image', CONCAT('/', df_hero.filename_disk), 'alt_text', h.alt_text)
WHEN pb.collection = 'markdown' THEN
json_build_object('type', pb.collection, 'text', md.text)
ELSE
json_build_object('type', pb.collection)
END)
FROM
posts_blocks pb
LEFT JOIN youtube_player yp ON pb.collection = 'youtube_player'
AND yp.id = pb.item::integer
LEFT JOIN github_banner gb ON pb.collection = 'github_banner'
AND gb.id = pb.item::integer
LEFT JOIN npm_banner nb ON pb.collection = 'npm_banner'
AND nb.id = pb.item::integer
LEFT JOIN rss_banner rb ON pb.collection = 'rss_banner'
AND rb.id = pb.item::integer
LEFT JOIN calendar_banner cb ON pb.collection = 'calendar_banner'
AND cb.id = pb.item::integer
LEFT JOIN hero h ON pb.collection = 'hero'
AND h.id = pb.item::integer
LEFT JOIN directus_files df_hero ON h.image = df_hero.id
LEFT JOIN markdown md ON pb.collection = 'markdown'
AND md.id = pb.item::integer
WHERE
pb.posts_id = p.id) AS blocks,
(
SELECT
array_agg(t.name)
FROM
posts_tags pt
LEFT JOIN tags t ON pt.tags_id = t.id
WHERE
pt.posts_id = p.id) AS tags,
(
SELECT
json_agg(json_build_object('name', g.name, 'url', g.slug) ORDER BY g.name ASC)
FROM
posts_genres gp
LEFT JOIN genres g ON gp.genres_id = g.id
WHERE
gp.posts_id = p.id) AS genres,
(
SELECT
json_agg(json_build_object('name', a.name_string, 'url', a.slug, 'country', a.country, 'total_plays', a.total_plays) ORDER BY a.name_string ASC)
FROM
posts_artists pa
LEFT JOIN artists a ON pa.artists_id = a.id
WHERE
pa.posts_id = p.id) AS artists,
(
SELECT
json_agg(json_build_object('title', b.title, 'author', b.author, 'url', b.slug)
ORDER BY b.title ASC)
FROM
posts_books pbk
LEFT JOIN books b ON pbk.books_id = b.id
WHERE
pbk.posts_id = p.id) AS books,
(
SELECT
json_agg(json_build_object('title', m.title, 'year', m.year, 'url', m.slug)
ORDER BY m.year DESC)
FROM
posts_movies pm
LEFT JOIN movies m ON pm.movies_id = m.id
WHERE
pm.posts_id = p.id) AS movies,
(
SELECT
json_agg(json_build_object('title', s.title, 'year', s.year, 'url', s.slug) ORDER BY s.year DESC)
FROM
posts_shows ps
LEFT JOIN shows s ON ps.shows_id = s.id
WHERE
ps.posts_id = p.id) AS shows,
json_build_object('title', p.title, 'url', p.slug, 'description', p.description, 'content', p.content, 'date', p.date, 'image', CASE WHEN df.filename_disk IS NOT NULL
AND df.filename_disk != ''
AND df.filename_disk != '/' THEN
CONCAT('/', df.filename_disk)
ELSE
NULL
END) AS feed
FROM
posts p
LEFT JOIN directus_files df ON p.image = df.id
GROUP BY
p.id,
df.filename_disk;

View file

@ -0,0 +1,71 @@
CREATE OR REPLACE VIEW optimized_all_activity AS
WITH feed_data AS (
SELECT json_build_object(
'title', p.title,
'url', p.url,
'description', p.content,
'date', p.date,
'type', 'article',
'label', 'Post',
'content', p.content
) AS feed
FROM optimized_posts p
UNION ALL
SELECT json_build_object(
'title', CONCAT(l.title, ' via ', l.author->>'name'),
'url', l.link,
'description', l.description,
'date', l.date,
'type', 'link',
'label', 'Link',
'author', l.author
) AS feed
FROM optimized_links l
UNION ALL
SELECT CASE
WHEN LOWER(b.status) = 'finished' THEN
json_build_object(
'title', CONCAT(b.title, ' by ', b.author,
CASE WHEN b.rating IS NOT NULL THEN CONCAT(' (', b.rating, ')') ELSE '' END
),
'url', b.url,
'description', COALESCE(b.review, b.description),
'date', b.date_finished,
'type', 'books',
'label', 'Book',
'image', b.image,
'rating', b.rating
)
ELSE NULL
END AS feed
FROM optimized_books b
UNION ALL
SELECT CASE
WHEN m.last_watched IS NOT NULL THEN
json_build_object(
'title', CONCAT(m.title,
CASE WHEN m.rating IS NOT NULL THEN CONCAT(' (', m.rating, ')') ELSE '' END
),
'url', m.url,
'description', COALESCE(m.review, m.description),
'date', m.last_watched,
'type', 'movies',
'label', 'Movie',
'image', m.image,
'rating', m.rating
)
ELSE NULL
END AS feed
FROM optimized_movies m
)
SELECT feed
FROM feed_data
WHERE feed IS NOT NULL
ORDER BY (feed->>'date')::timestamp DESC
LIMIT 20;

View file

@ -0,0 +1,12 @@
CREATE OR REPLACE VIEW optimized_headers AS
SELECT
p.path AS resource_path,
json_agg(json_build_object('header_name', hr.name, 'header_value', hr.value)) AS headers
FROM
paths p
JOIN
paths_header_rules phr ON p.id = phr.paths_id
JOIN
header_rules hr ON phr.header_rules_id = hr.id
GROUP BY
p.path;

View file

@ -0,0 +1,109 @@
CREATE OR REPLACE VIEW optimized_recent_activity AS
WITH activity_data AS (
SELECT
p.date AS content_date,
p.title,
p.content AS description,
p.url AS url,
NULL AS author,
NULL AS image,
NULL AS rating,
NULL AS artist_url,
NULL AS venue_lat,
NULL AS venue_lon,
NULL AS venue_name,
NULL AS notes,
'article' AS type,
'Post' AS label
FROM optimized_posts p
UNION ALL
SELECT
l.date AS content_date,
l.title,
l.description,
l.link AS url,
l.author,
NULL AS image,
NULL AS rating,
NULL AS artist_url,
NULL AS venue_lat,
NULL AS venue_lon,
NULL AS venue_name,
NULL AS notes,
'link' AS type,
'Link' AS label
FROM optimized_links l
UNION ALL
SELECT
b.date_finished AS content_date,
CONCAT(b.title,
CASE WHEN b.rating IS NOT NULL THEN CONCAT(' (', b.rating, ')') ELSE '' END
) AS title,
b.description,
b.url AS url,
NULL AS author,
b.image,
b.rating,
NULL AS artist_url,
NULL AS venue_lat,
NULL AS venue_lon,
NULL AS venue_name,
NULL AS notes,
'books' AS type,
'Book' AS label
FROM optimized_books b
WHERE LOWER(b.status) = 'finished'
UNION ALL
SELECT
m.last_watched AS content_date,
CONCAT(m.title,
CASE WHEN m.rating IS NOT NULL THEN CONCAT(' (', m.rating, ')') ELSE '' END
) AS title,
m.description,
m.url AS url,
NULL AS author,
m.image,
m.rating,
NULL AS artist_url,
NULL AS venue_lat,
NULL AS venue_lon,
NULL AS venue_name,
NULL AS notes,
'movies' AS type,
'Movie' AS label
FROM optimized_movies m
WHERE m.last_watched IS NOT NULL
UNION ALL
SELECT
c.date AS content_date,
CONCAT(c.artist->>'name', ' at ', c.venue->>'name_short') AS title,
c.concert_notes AS description,
NULL AS url,
NULL AS author,
NULL AS image,
NULL AS rating,
c.artist->>'url' AS artist_url,
c.venue->>'latitude' AS venue_lat,
c.venue->>'longitude' AS venue_lon,
c.venue->>'name_short' AS venue_name,
c.notes AS notes,
'concerts' AS type,
'Concert' AS label
FROM optimized_concerts c
)
SELECT json_agg(recent_activity_data ORDER BY recent_activity_data.content_date DESC) AS feed
FROM (
SELECT *
FROM activity_data
WHERE content_date IS NOT NULL
ORDER BY content_date DESC
LIMIT 20
) AS recent_activity_data;

View file

@ -0,0 +1,7 @@
CREATE OR REPLACE VIEW optimized_redirects AS
SELECT
r.from AS source_url,
r.to AS destination_url,
r.status_code
FROM
redirects r;

View file

@ -0,0 +1,12 @@
CREATE OR REPLACE VIEW optimized_robots AS
SELECT
r.path,
array_agg(ua.user_agent ORDER BY ua.user_agent) AS user_agents
FROM
robots AS r
JOIN
robots_user_agents AS rua ON r.id = rua.robots_id
JOIN
user_agents AS ua ON rua.user_agents_id = ua.id
GROUP BY
r.path;

View file

@ -0,0 +1,111 @@
CREATE OR REPLACE VIEW optimized_search_index AS
WITH search_data AS (
SELECT
'post' AS type,
CONCAT('📝 ', p.title) AS title,
p.url::TEXT AS url,
p.description AS description,
p.tags,
NULL AS genre_name,
NULL AS genre_url,
NULL::TEXT AS total_plays,
p.date AS content_date
FROM
optimized_posts p
UNION ALL
SELECT
'link' AS type,
CONCAT('🔗 ', l.title, ' via ', l.name) AS title,
l.link::TEXT AS url,
l.description AS description,
l.tags,
NULL AS genre_name,
NULL AS genre_url,
NULL::TEXT AS total_plays,
l.date AS content_date
FROM
optimized_links l
UNION ALL
SELECT
'book' AS type,
CASE WHEN b.rating IS NOT NULL THEN
CONCAT('📖 ', b.title, ' (', b.rating, ')')
ELSE
CONCAT('📖 ', b.title)
END AS title,
b.url::TEXT AS url,
b.description AS description,
b.tags,
NULL AS genre_name,
NULL AS genre_url,
NULL::TEXT AS total_plays,
b.date_finished AS content_date
FROM
optimized_books b
WHERE
LOWER(b.status) = 'finished'
UNION ALL
SELECT
'artist' AS type,
CONCAT(COALESCE(ar.emoji, ar.genre_emoji, '🎧'), ' ', ar.name) AS title,
ar.url::TEXT AS url,
ar.description AS description,
ARRAY[ar.genre_name] AS tags,
ar.genre_name,
ar.genre_slug AS genre_url,
TO_CHAR(ar.total_plays::NUMERIC, 'FM999,999,999,999') AS total_plays,
NULL AS content_date
FROM
optimized_artists ar
UNION ALL
SELECT
'genre' AS type,
CONCAT(COALESCE(g.emoji, '🎵'), ' ', g.name) AS title,
g.url::TEXT AS url,
g.description AS description,
NULL AS tags,
g.name AS genre_name,
g.url AS genre_url,
NULL::TEXT AS total_plays,
NULL AS content_date
FROM
optimized_genres g
UNION ALL
SELECT
'show' AS type,
CONCAT('📺 ', s.title, ' (', s.year, ')') AS title,
s.url::TEXT AS url,
s.description AS description,
s.tags,
NULL AS genre_name,
NULL AS genre_url,
NULL::TEXT AS total_plays,
s.last_watched_at AS content_date
FROM
optimized_shows s
WHERE
s.last_watched_at IS NOT NULL
UNION ALL
SELECT
'movie' AS type,
CASE
WHEN m.rating IS NOT NULL THEN CONCAT('🎬 ', m.title, ' (', m.rating, ')')
ELSE CONCAT('🎬 ', m.title, ' (', m.year, ')')
END AS title,
m.url::TEXT AS url,
m.description AS description,
m.tags,
NULL AS genre_name,
NULL AS genre_url,
NULL::TEXT AS total_plays,
m.last_watched AS content_date
FROM
optimized_movies m
WHERE
m.last_watched IS NOT NULL
)
SELECT
ROW_NUMBER() OVER (ORDER BY url) AS id,
*
FROM
search_data;

View file

@ -0,0 +1,46 @@
CREATE OR REPLACE VIEW optimized_sitemap AS
WITH sitemap_data AS (
SELECT
p.url::TEXT AS url
FROM
optimized_posts p
UNION ALL
SELECT
b.url::TEXT AS url
FROM
optimized_books b
UNION ALL
SELECT
m.url::TEXT AS url
FROM
optimized_movies m
UNION ALL
SELECT
ar.url::TEXT AS url
FROM
optimized_artists ar
UNION ALL
SELECT
g.url::TEXT AS url
FROM
optimized_genres g
UNION ALL
SELECT
s.url::TEXT AS url
FROM
optimized_shows s
UNION ALL
SELECT
pa.permalink::TEXT AS url
FROM
optimized_pages pa
UNION ALL
SELECT
ss.slug AS url
FROM
static_slugs ss
)
SELECT
url
FROM
sitemap_data;

View file

@ -0,0 +1,190 @@
CREATE OR REPLACE VIEW optimized_stats AS
WITH artist_stats AS (
SELECT
TO_CHAR(COUNT(DISTINCT artist_name), 'FM999,999,999') AS artist_count
FROM optimized_listens
WHERE artist_name IS NOT NULL
),
track_stats AS (
SELECT
TO_CHAR(COUNT(*), 'FM999,999,999') AS listen_count
FROM optimized_listens
),
concert_stats AS (
SELECT
TO_CHAR(COUNT(*), 'FM999,999,999') AS concert_count
FROM concerts
),
venue_stats AS (
SELECT
TO_CHAR(COUNT(DISTINCT venue), 'FM999,999,999') AS venue_count
FROM concerts
),
yearly_data AS (
SELECT
EXTRACT(YEAR FROM e.last_watched_at) AS year,
0 AS artist_count,
0 AS listen_count,
0 AS genre_count,
COUNT(DISTINCT e.show) AS show_count,
COUNT(*) AS episode_count,
0 AS post_count,
0 AS link_count,
0 AS book_count,
0 AS movie_count,
0 AS concert_count,
0 AS venue_count
FROM episodes e
GROUP BY EXTRACT(YEAR FROM e.last_watched_at)
HAVING EXTRACT(YEAR FROM e.last_watched_at) >= 2023
UNION ALL
SELECT
EXTRACT(YEAR FROM p.date) AS year,
0 AS artist_count,
0 AS listen_count,
0 AS genre_count,
0 AS show_count,
0 AS episode_count,
COUNT(*) AS post_count,
0 AS link_count,
0 AS book_count,
0 AS movie_count,
0 AS concert_count,
0 AS venue_count
FROM optimized_posts p
GROUP BY EXTRACT(YEAR FROM p.date)
HAVING EXTRACT(YEAR FROM p.date) >= 2023
UNION ALL
SELECT
EXTRACT(YEAR FROM o.date) AS year,
0 AS artist_count,
0 AS listen_count,
0 AS genre_count,
0 AS show_count,
0 AS episode_count,
0 AS post_count,
COUNT(*) AS link_count,
0 AS book_count,
0 AS movie_count,
0 AS concert_count,
0 AS venue_count
FROM optimized_links o
GROUP BY EXTRACT(YEAR FROM o.date)
HAVING EXTRACT(YEAR FROM o.date) >= 2023
UNION ALL
SELECT
EXTRACT(YEAR FROM b.date_finished) AS year,
0 AS artist_count,
0 AS listen_count,
0 AS genre_count,
0 AS show_count,
0 AS episode_count,
0 AS post_count,
0 AS link_count,
COUNT(*) AS book_count,
0 AS movie_count,
0 AS concert_count,
0 AS venue_count
FROM optimized_books b
WHERE LOWER(b.status) = 'finished'
GROUP BY EXTRACT(YEAR FROM b.date_finished)
HAVING EXTRACT(YEAR FROM b.date_finished) >= 2023
UNION ALL
SELECT
EXTRACT(YEAR FROM m.last_watched) AS year,
0 AS artist_count,
0 AS listen_count,
0 AS genre_count,
0 AS show_count,
0 AS episode_count,
0 AS post_count,
0 AS link_count,
0 AS book_count,
COUNT(*) AS movie_count,
0 AS concert_count,
0 AS venue_count
FROM optimized_movies m
GROUP BY EXTRACT(YEAR FROM m.last_watched)
HAVING EXTRACT(YEAR FROM m.last_watched) >= 2023
UNION ALL
SELECT
EXTRACT(YEAR FROM TO_TIMESTAMP(l.listened_at)) AS year,
COUNT(DISTINCT l.artist_name) AS artist_count,
COUNT(l.id) AS listen_count,
COUNT(DISTINCT l.genre_name) AS genre_count,
0 AS show_count,
0 AS episode_count,
0 AS post_count,
0 AS link_count,
0 AS book_count,
0 AS movie_count,
0 AS concert_count,
0 AS venue_count
FROM optimized_listens l
GROUP BY EXTRACT(YEAR FROM TO_TIMESTAMP(l.listened_at))
HAVING EXTRACT(YEAR FROM TO_TIMESTAMP(l.listened_at)) >= 2023
UNION ALL
SELECT
EXTRACT(YEAR FROM c.date) AS year,
0 AS artist_count,
0 AS listen_count,
0 AS genre_count,
0 AS show_count,
0 AS episode_count,
0 AS post_count,
0 AS link_count,
0 AS book_count,
0 AS movie_count,
COUNT(*) AS concert_count,
COUNT(DISTINCT c.venue) AS venue_count
FROM concerts c
GROUP BY EXTRACT(YEAR FROM c.date)
HAVING EXTRACT(YEAR FROM c.date) >= 2023
),
aggregated_yearly_stats AS (
SELECT
year,
SUM(artist_count) AS artist_count,
SUM(listen_count) AS listen_count,
SUM(genre_count) AS genre_count,
SUM(show_count) AS show_count,
SUM(episode_count) AS episode_count,
SUM(post_count) AS post_count,
SUM(link_count) AS link_count,
SUM(book_count) AS book_count,
SUM(movie_count) AS movie_count,
SUM(concert_count) AS concert_count,
SUM(venue_count) AS venue_count
FROM yearly_data
GROUP BY year
ORDER BY year DESC
)
SELECT
(SELECT artist_count FROM artist_stats) AS artist_count,
(SELECT listen_count FROM track_stats) AS listen_count,
(SELECT concert_count FROM concert_stats) AS concert_count,
(SELECT venue_count FROM venue_stats) AS venue_count,
(SELECT TO_CHAR(COUNT(DISTINCT e.show), 'FM999,999,999') FROM episodes e) AS show_count,
(SELECT TO_CHAR(COUNT(*), 'FM999,999,999') FROM episodes e) AS episode_count,
(SELECT TO_CHAR(COUNT(*), 'FM999,999,999') FROM optimized_posts) AS post_count,
(SELECT TO_CHAR(COUNT(*), 'FM999,999,999') FROM optimized_links) AS link_count,
(SELECT TO_CHAR(COUNT(*), 'FM999,999,999') FROM optimized_books WHERE LOWER(status) = 'finished') AS book_count,
(SELECT TO_CHAR(COUNT(*), 'FM999,999,999') FROM optimized_movies WHERE last_watched IS NOT NULL) AS movie_count,
(SELECT TO_CHAR(COUNT(DISTINCT genre_name), 'FM999,999,999') FROM optimized_listens WHERE genre_name IS NOT NULL) AS genre_count,
JSON_AGG(
JSON_BUILD_OBJECT(
'year', ys.year,
'artist_count', CASE WHEN ys.artist_count > 0 THEN TO_CHAR(ys.artist_count, 'FM999,999,999') ELSE NULL END,
'listen_count', CASE WHEN ys.listen_count > 0 THEN TO_CHAR(ys.listen_count, 'FM999,999,999') ELSE NULL END,
'genre_count', CASE WHEN ys.genre_count > 0 THEN TO_CHAR(ys.genre_count, 'FM999,999,999') ELSE NULL END,
'show_count', CASE WHEN ys.show_count > 0 THEN TO_CHAR(ys.show_count, 'FM999,999,999') ELSE NULL END,
'episode_count', CASE WHEN ys.episode_count > 0 THEN TO_CHAR(ys.episode_count, 'FM999,999,999') ELSE NULL END,
'post_count', CASE WHEN ys.post_count > 0 THEN TO_CHAR(ys.post_count, 'FM999,999,999') ELSE NULL END,
'link_count', CASE WHEN ys.link_count > 0 THEN TO_CHAR(ys.link_count, 'FM999,999,999') ELSE NULL END,
'book_count', CASE WHEN ys.book_count > 0 THEN TO_CHAR(ys.book_count, 'FM999,999,999') ELSE NULL END,
'movie_count', CASE WHEN ys.movie_count > 0 THEN TO_CHAR(ys.movie_count, 'FM999,999,999') ELSE NULL END,
'concert_count', CASE WHEN ys.concert_count > 0 THEN TO_CHAR(ys.concert_count, 'FM999,999,999') ELSE NULL END,
'venue_count', CASE WHEN ys.venue_count > 0 THEN TO_CHAR(ys.venue_count, 'FM999,999,999') ELSE NULL END
)
) AS yearly_breakdown
FROM aggregated_yearly_stats ys;

View file

@ -0,0 +1,7 @@
CREATE OR REPLACE VIEW optimized_feeds AS
SELECT
f.title AS title,
f.data AS data,
f.permalink AS permalink
FROM
feeds f;

View file

@ -0,0 +1,99 @@
CREATE OR REPLACE VIEW optimized_syndication AS
WITH syndication_data AS (
SELECT
p.date AS content_date,
json_build_object(
'title', CONCAT('📝 ', p.title, ' ', (
SELECT array_to_string(
array_agg('#' || initcap(replace(trim(tag_part), ' ', ''))),
' '
)
FROM unnest(p.tags) AS t(name),
regexp_split_to_table(t.name, '\s*&\s*') AS tag_part
)),
'description', p.description,
'url', p.url,
'image', p.image,
'date', p.date
) AS feed
FROM optimized_posts p
UNION ALL
SELECT
l.date AS content_date,
json_build_object(
'title', CONCAT('🔗 ', l.title, CASE
WHEN l.mastodon IS NOT NULL THEN ' via @' || split_part(l.mastodon, '@', 2) || '@' || split_part(split_part(l.mastodon, 'https://', 2), '/', 1)
ELSE CONCAT(' via ', l.name)
END, ' ', (
SELECT array_to_string(
array_agg('#' || initcap(replace(trim(tag_part), ' ', ''))),
' '
)
FROM unnest(l.tags) AS t(name),
regexp_split_to_table(t.name, '\s*&\s*') AS tag_part
)),
'description', l.description,
'url', l.link,
'date', l.date
) AS feed
FROM optimized_links l
UNION ALL
SELECT
b.date_finished AS content_date,
CASE
WHEN LOWER(b.status) = 'finished' THEN
json_build_object(
'title', CONCAT('📖 ', b.title, CASE
WHEN b.rating IS NOT NULL THEN ' (' || b.rating || ')' ELSE '' END, ' ', (
SELECT array_to_string(
array_agg('#' || initcap(replace(trim(tag_part), ' ', ''))),
' '
)
FROM unnest(b.tags) AS t(name),
regexp_split_to_table(t.name, '\s*&\s*') AS tag_part
)
),
'description', b.description,
'url', b.url,
'image', b.image,
'date', b.date_finished
)
ELSE NULL
END AS feed
FROM optimized_books b
UNION ALL
SELECT
m.last_watched AS content_date,
CASE
WHEN m.last_watched IS NOT NULL THEN
json_build_object(
'title', CONCAT('🎥 ', m.title, CASE
WHEN m.rating IS NOT NULL THEN ' (' || m.rating || ')' ELSE '' END, ' ', (
SELECT array_to_string(
array_agg('#' || initcap(replace(trim(tag_part), ' ', ''))),
' '
)
FROM unnest(m.tags) AS t(name),
regexp_split_to_table(t.name, '\s*&\s*') AS tag_part
)
),
'description', m.description,
'url', m.url,
'image', m.image,
'date', m.last_watched
)
ELSE NULL
END AS feed
FROM optimized_movies m
)
SELECT feed
FROM syndication_data
WHERE feed IS NOT NULL
ORDER BY content_date DESC
LIMIT 3;

View file

@ -0,0 +1,23 @@
CREATE OR REPLACE VIEW optimized_globals AS
SELECT
g.site_name,
g.site_description,
g.intro,
g.author,
g.email,
g.mastodon,
g.url,
g.cdn_url,
g.sitemap_uri,
g.theme_color,
g.site_type,
g.locale,
g.lang,
g.webfinger_username,
g.webfinger_hostname,
CONCAT('/', df.filename_disk) AS avatar,
CONCAT('/', df2.filename_disk) AS avatar_transparent
FROM
globals g
LEFT JOIN directus_files df ON g.avatar = df.id
LEFT JOIN directus_files df2 ON g.avatar_transparent = df2.id

View file

@ -0,0 +1,14 @@
CREATE OR REPLACE VIEW optimized_navigation AS
SELECT
n.id,
n.menu_location,
n.permalink,
n.icon,
n.title,
n.sort,
p.title AS page_title,
p.permalink AS page_permalink
FROM
navigation n
LEFT JOIN pages p ON n.pages = p.id;

View file

@ -0,0 +1,54 @@
CREATE OR REPLACE VIEW optimized_pages AS
SELECT
p.id,
p.title,
p.permalink,
p.description,
CONCAT('/', df.filename_disk) AS open_graph_image,
p.updated,
(
SELECT
json_agg(
CASE WHEN pb.collection = 'youtube_player' THEN
json_build_object('type', pb.collection, 'url', yp.url)
WHEN pb.collection = 'github_banner' THEN
json_build_object('type', pb.collection, 'url', gb.url)
WHEN pb.collection = 'npm_banner' THEN
json_build_object('type', pb.collection, 'url', nb.url, 'command', nb.command)
WHEN pb.collection = 'rss_banner' THEN
json_build_object('type', pb.collection, 'url', rb.url, 'text', rb.text)
WHEN pb.collection = 'calendar_banner' THEN
json_build_object('type', pb.collection, 'url', cb.url, 'text', cb.text)
WHEN pb.collection = 'hero' THEN
json_build_object('type', pb.collection, 'image', CONCAT('/', df_hero.filename_disk), 'alt', h.alt_text)
WHEN pb.collection = 'markdown' THEN
json_build_object('type', pb.collection, 'text', md.text)
ELSE
json_build_object('type', pb.collection)
END ORDER BY pb.sort)
FROM
pages_blocks pb
LEFT JOIN youtube_player yp ON pb.collection = 'youtube_player'
AND yp.id = pb.item::integer
LEFT JOIN github_banner gb ON pb.collection = 'github_banner'
AND gb.id = pb.item::integer
LEFT JOIN npm_banner nb ON pb.collection = 'npm_banner'
AND nb.id = pb.item::integer
LEFT JOIN rss_banner rb ON pb.collection = 'rss_banner'
AND rb.id = pb.item::integer
LEFT JOIN calendar_banner cb ON pb.collection = 'calendar_banner'
AND cb.id = pb.item::integer
LEFT JOIN hero h ON pb.collection = 'hero'
AND h.id = pb.item::integer
LEFT JOIN directus_files df_hero ON h.image = df_hero.id
LEFT JOIN markdown md ON pb.collection = 'markdown'
AND md.id = pb.item::integer
WHERE
pb.pages_id = p.id) AS blocks
FROM
pages p
LEFT JOIN directus_files df ON p.open_graph_image = df.id
GROUP BY
p.id,
df.filename_disk;

View file

@ -0,0 +1,107 @@
CREATE OR REPLACE VIEW optimized_books AS
SELECT
b.date_finished,
EXTRACT(YEAR FROM b.date_finished) AS year,
b.author,
b.description,
b.title,
b.progress,
b.read_status AS status,
b.star_rating AS rating,
b.review,
b.slug AS url,
CONCAT('/', df.filename_disk) AS image,
b.favorite,
b.tattoo,
(
SELECT
array_agg(t.name)
FROM
books_tags bt
LEFT JOIN tags t ON bt.tags_id = t.id
WHERE
bt.books_id = b.id) AS tags,
(
SELECT
json_agg(json_build_object('name', a.name_string, 'url', a.slug, 'country', a.country, 'total_plays', a.total_plays)
ORDER BY a.name_string ASC)
FROM
books_artists ba
LEFT JOIN artists a ON ba.artists_id = a.id
WHERE
ba.books_id = b.id) AS artists,
(
SELECT
json_agg(json_build_object('title', m.title, 'year', m.year, 'url', m.slug)
ORDER BY m.year DESC)
FROM
movies_books mb
LEFT JOIN movies m ON mb.movies_id = m.id
WHERE
mb.books_id = b.id) AS movies,
(
SELECT
json_agg(json_build_object('name', g.name, 'url', g.slug)
ORDER BY g.name ASC)
FROM
genres_books gb
LEFT JOIN genres g ON gb.genres_id = g.id
WHERE
gb.books_id = b.id) AS genres,
(
SELECT
json_agg(json_build_object('title', s.title, 'year', s.year, 'url', s.slug)
ORDER BY s.year DESC)
FROM
shows_books sb
LEFT JOIN shows s ON sb.shows_id = s.id
WHERE
sb.books_id = b.id) AS shows,
(
SELECT
json_agg(json_build_object('title', p.title, 'date', p.date, 'url', p.slug)
ORDER BY p.date DESC)
FROM
posts_books pb
LEFT JOIN posts p ON pb.posts_id = p.id
WHERE
pb.books_id = b.id) AS posts,
(
SELECT
json_agg(json_build_object('title', rb.title, 'author', rb.author, 'url', rb.slug)
ORDER BY rb.title ASC)
FROM
related_books rbk
LEFT JOIN books rb ON rbk.related_books_id = rb.id
WHERE
rbk.books_id = b.id) AS related_books,
json_build_object(
'title', NULL,
'image', CONCAT('/', df.filename_disk),
'url', b.slug,
'alt', CONCAT('Book cover from ', b.title, ' by ', b.author),
'subtext', CASE
WHEN b.star_rating IS NOT NULL THEN b.star_rating::text
ELSE NULL
END
) AS grid,
CASE
WHEN LOWER(b.read_status) = 'finished' AND b.star_rating IS NOT NULL THEN
json_build_object(
'title', CONCAT(b.title, ' by ', b.author, ' (', b.star_rating, ')'),
'url', b.slug,
'date', b.date_finished,
'description', COALESCE(b.review, b.description),
'image', CONCAT('/', df.filename_disk),
'rating', b.star_rating
)
ELSE
NULL
END AS feed,
(SELECT TO_CHAR(days_read, 'FM999G999G999') FROM reading_streak LIMIT 1) AS days_read
FROM
books b
LEFT JOIN directus_files df ON b.art = df.id
GROUP BY
b.id,
df.filename_disk;

View file

@ -0,0 +1,119 @@
CREATE OR REPLACE VIEW optimized_movies AS
SELECT
m.id,
m.tmdb_id,
m.last_watched,
m.title,
m.year,
m.plays,
m.favorite,
m.tattoo,
m.star_rating AS rating,
m.description,
m.review,
m.slug AS url,
CONCAT('/', df.filename_disk) AS image,
CONCAT('/', df2.filename_disk) AS backdrop,
json_build_object(
'title', NULL,
'url', m.slug,
'image', CONCAT('/', df.filename_disk),
'backdrop', CONCAT('/', df2.filename_disk),
'alt', CONCAT('Poster from ', m.title),
'subtext', CASE
WHEN m.last_watched >= NOW() - INTERVAL '90 days' THEN
m.star_rating::text
ELSE
m.year::text
END
) AS grid,
(
SELECT
array_agg(t.name)
FROM
movies_tags mt
LEFT JOIN tags t ON mt.tags_id = t.id
WHERE
mt.movies_id = m.id) AS tags,
(
SELECT
json_agg(json_build_object('name', g.name, 'url', g.slug)
ORDER BY g.name ASC)
FROM
genres_movies gm
LEFT JOIN genres g ON gm.genres_id = g.id
WHERE
gm.movies_id = m.id) AS genres,
(
SELECT
json_agg(json_build_object('name', a.name_string, 'url', a.slug, 'country', a.country, 'total_plays', a.total_plays)
ORDER BY a.name_string ASC)
FROM
movies_artists ma
LEFT JOIN artists a ON ma.artists_id = a.id
WHERE
ma.movies_id = m.id) AS artists,
(
SELECT
json_agg(json_build_object('title', b.title, 'author', b.author, 'url', b.slug)
ORDER BY b.title ASC)
FROM
movies_books mb
LEFT JOIN books b ON mb.books_id = b.id
WHERE
mb.movies_id = m.id) AS books,
(
SELECT
json_agg(json_build_object('title', s.title, 'year', s.year, 'url', s.slug)
ORDER BY s.year DESC)
FROM
shows_movies sm
LEFT JOIN shows s ON sm.shows_id = s.id
WHERE
sm.movies_id = m.id) AS shows,
(
SELECT
json_agg(json_build_object('title', p.title, 'date', p.date, 'url', p.slug)
ORDER BY p.date DESC)
FROM
posts_movies pm
LEFT JOIN posts p ON pm.posts_id = p.id
WHERE
pm.movies_id = m.id) AS posts,
(
SELECT
json_agg(json_build_object('title', rm.title, 'year', rm.year, 'url', rm.slug)
ORDER BY rm.year DESC)
FROM
related_movies r
LEFT JOIN movies rm ON r.related_movies_id = rm.id
WHERE
r.movies_id = m.id) AS related_movies,
CASE
WHEN m.star_rating IS NOT NULL AND m.last_watched IS NOT NULL THEN
json_build_object(
'title', CONCAT(m.title, CASE
WHEN m.star_rating IS NOT NULL THEN CONCAT(' (', m.star_rating, ')')
ELSE ''
END),
'url', m.slug,
'date', m.last_watched,
'description', COALESCE(m.review, m.description),
'image', CONCAT('/', df.filename_disk),
'rating', m.star_rating
)
ELSE
NULL
END AS feed
FROM
movies m
LEFT JOIN directus_files df ON m.art = df.id
LEFT JOIN directus_files df2 ON m.backdrop = df2.id
GROUP BY
m.id,
df.filename_disk,
df2.filename_disk
ORDER BY
m.last_watched DESC,
m.favorite DESC,
m.title ASC;

View file

@ -0,0 +1,22 @@
CREATE OR REPLACE VIEW optimized_album_releases AS
SELECT
a.name AS title,
a.release_date,
COALESCE(a.release_link, ar.slug) AS url,
a.total_plays,
CONCAT('/', df.filename_disk) AS image,
json_build_object('name', ar.name_string, 'url', ar.slug, 'description', ar.description) AS artist,
EXTRACT(EPOCH FROM a.release_date) AS release_timestamp,
json_build_object(
'title', a.name,
'image', CONCAT('/', df.filename_disk),
'url', COALESCE(a.release_link, ar.slug),
'alt', CONCAT(a.name, ' by ', ar.name_string),
'subtext', CONCAT(ar.name_string, ' / ', TO_CHAR(a.release_date, 'Mon FMDD, YYYY'))
) AS grid
FROM
albums a
LEFT JOIN directus_files df ON a.art = df.id
LEFT JOIN artists ar ON a.artist = ar.id
WHERE
a.release_date IS NOT NULL;

View file

@ -0,0 +1,29 @@
CREATE OR REPLACE VIEW optimized_albums AS
SELECT
al.name AS name,
al.release_year,
to_char(al.total_plays, 'FM999,999,999,999') AS total_plays,
al.total_plays AS total_plays_raw,
ar.name_string AS artist_name,
ar.slug AS artist_url,
CONCAT('/', df_album.filename_disk) AS image,
json_build_object(
'title', al.name,
'image', CONCAT('/', df_album.filename_disk),
'url', ar.slug,
'alt', CONCAT('Cover for ', al.name, ' by ', ar.name_string),
'subtext', CONCAT(to_char(al.total_plays, 'FM999,999,999,999'), ' plays')
) AS grid,
json_build_object(
'title', al.name,
'artist', ar.name_string,
'plays', to_char(al.total_plays, 'FM999,999,999,999'),
'image', CONCAT('/', df_album.filename_disk),
'url', ar.slug,
'year', al.release_year,
'alt', CONCAT('Cover for ', al.name, ' by ', ar.name_string)
) AS table
FROM albums al
LEFT JOIN artists ar ON al.artist = ar.id
LEFT JOIN directus_files df_album ON al.art = df_album.id
GROUP BY al.id, ar.name_string, ar.slug, df_album.filename_disk;

View file

@ -0,0 +1,109 @@
CREATE OR REPLACE VIEW optimized_artists AS
SELECT
ar.name_string AS name,
ar.slug AS url,
ar.tentative,
to_char(ar.total_plays, 'FM999,999,999,999') AS total_plays,
ar.total_plays AS total_plays_raw,
ar.country,
ar.description,
ar.favorite,
g.name AS genre_name,
g.slug AS genre_slug,
g.emoji AS genre_emoji,
json_build_object('name', g.name, 'url', g.slug, 'emoji', g.emoji) AS genre,
ar.emoji,
ar.tattoo,
CONCAT('/', df.filename_disk) AS image,
json_build_object(
'title', ar.name_string,
'image',
CONCAT('/', df.filename_disk),
'url', ar.slug,
'alt', CONCAT(to_char(ar.total_plays, 'FM999,999,999,999'), ' plays of ', ar.name_string),
'subtext', CONCAT(to_char(ar.total_plays, 'FM999,999,999,999'), ' plays')
) AS grid,
json_build_object(
'title', ar.name_string,
'genre', g.name,
'genre_url', g.slug,
'emoji', CASE WHEN ar.emoji IS NOT NULL THEN ar.emoji ELSE g.emoji END,
'plays', to_char(ar.total_plays, 'FM999,999,999,999'),
'image', CONCAT('/', df.filename_disk),
'url', ar.slug,
'alt', CONCAT(to_char(ar.total_plays, 'FM999,999,999,999'), ' plays of ', ar.name_string)
) AS table,
(
SELECT
json_agg(json_build_object('name', a.name, 'release_year', a.release_year, 'total_plays', to_char(a.total_plays, 'FM999,999,999,999'),
'art', df_album.filename_disk)
ORDER BY a.release_year)
FROM
albums a
LEFT JOIN directus_files df_album ON a.art = df_album.id
WHERE
a.artist = ar.id) AS albums,
(
SELECT
json_agg(json_build_object('id', c.id, 'date', c.date, 'venue_name', v.name, 'venue_name_short', trim(split_part(v.name, ',', 1)), 'venue_latitude', v.latitude, 'venue_longitude', v.longitude, 'notes', c.notes)
ORDER BY c.date DESC)
FROM
concerts c
LEFT JOIN venues v ON c.venue = v.id
WHERE
c.artist = ar.id) AS concerts,
(
SELECT
json_agg(json_build_object('title', b.title, 'author', b.author, 'url', b.slug)
ORDER BY b.title ASC)
FROM
books_artists ba
LEFT JOIN books b ON ba.books_id = b.id
WHERE
ba.artists_id = ar.id) AS books,
(
SELECT
json_agg(json_build_object('title', m.title, 'year', m.year, 'url', m.slug)
ORDER BY m.year DESC)
FROM
movies_artists ma
LEFT JOIN movies m ON ma.movies_id = m.id
WHERE
ma.artists_id = ar.id) AS movies,
(
SELECT
json_agg(json_build_object('title', s.title, 'year', s.year, 'url', s.slug)
ORDER BY s.year DESC)
FROM
shows_artists sa
LEFT JOIN shows s ON sa.shows_id = s.id
WHERE
sa.artists_id = ar.id) AS shows,
(
SELECT
json_agg(json_build_object('title', p.title, 'date', p.date, 'url', p.slug)
ORDER BY p.date DESC)
FROM
posts_artists pa
LEFT JOIN posts p ON pa.posts_id = p.id
WHERE
pa.artists_id = ar.id) AS posts,
(
SELECT
json_agg(json_build_object('name', related_ar.name_string, 'url', related_ar.slug, 'country', related_ar.country, 'total_plays', to_char(related_ar.total_plays, 'FM999,999,999,999'))
ORDER BY related_ar.name_string)
FROM
related_artists ra
LEFT JOIN artists related_ar ON ra.related_artists_id = related_ar.id
WHERE
ra.artists_id = ar.id) AS related_artists
FROM
artists ar
LEFT JOIN directus_files df ON ar.art = df.id
LEFT JOIN genres g ON ar.genres = g.id
GROUP BY
ar.id,
df.filename_disk,
g.name,
g.slug,
g.emoji;

View file

@ -0,0 +1,19 @@
CREATE OR REPLACE VIEW optimized_concerts AS
SELECT
c.id,
c.date,
c.notes,
CASE WHEN c.artist IS NOT NULL THEN
json_build_object('name', a.name_string, 'url', a.slug)
ELSE
json_build_object('name', c.artist_name_string, 'url', NULL)
END AS artist,
json_build_object('name', v.name, 'name_short', trim(split_part(v.name, ',', 1)), 'latitude', v.latitude, 'longitude', v.longitude, 'notes', v.notes) AS venue,
c.notes AS concert_notes
FROM
concerts c
LEFT JOIN artists a ON c.artist = a.id
LEFT JOIN venues v ON c.venue = v.id
ORDER BY
c.date DESC;

View file

@ -0,0 +1,50 @@
CREATE OR REPLACE VIEW optimized_genres AS
SELECT
g.id,
g.name,
g.description,
g.emoji,
to_char(g.total_plays, 'FM999,999,999,999') AS total_plays,
g.wiki_link,
g.slug AS url,
(
SELECT
json_agg(json_build_object('name', a.name_string, 'url', a.slug, 'image', CONCAT('/', df_artist.filename_disk), 'total_plays', to_char(a.total_plays, 'FM999,999,999,999'))
ORDER BY a.total_plays DESC)
FROM
artists a
LEFT JOIN directus_files df_artist ON a.art = df_artist.id
WHERE
a.genres = g.id) AS artists,
(
SELECT
json_agg(json_build_object('title', b.title, 'author', b.author, 'url', b.slug)
ORDER BY b.title ASC)
FROM
books b
JOIN genres_books gb ON gb.books_id = b.id
WHERE
gb.genres_id = g.id) AS books,
(
SELECT
json_agg(json_build_object('title', m.title, 'year', m.year, 'url', m.slug)
ORDER BY m.year DESC)
FROM
movies m
JOIN genres_movies gm ON gm.movies_id = m.id
WHERE
gm.genres_id = g.id) AS movies,
(
SELECT
json_agg(json_build_object('title', p.title, 'date', p.date, 'url', p.slug)
ORDER BY p.date DESC)
FROM
posts_genres pg
LEFT JOIN posts p ON pg.posts_id = p.id
WHERE
pg.genres_id = g.id) AS posts
FROM
genres g
ORDER BY
g.id ASC;

View file

@ -0,0 +1,19 @@
CREATE OR REPLACE VIEW optimized_latest_listen AS
SELECT
l.track_name::TEXT AS track_name,
l.artist_name::TEXT AS artist_name,
a.emoji::TEXT AS artist_emoji,
g.emoji::TEXT AS genre_emoji,
a.slug::TEXT AS url,
NULL::FLOAT AS total_duration,
NULL::FLOAT AS progress_ticks
FROM
listens l
JOIN
artists a
ON l.artist_name = a.name_string
LEFT JOIN
genres g
ON a.genres = g.id
ORDER BY l.listened_at DESC
LIMIT 1;

View file

@ -0,0 +1,28 @@
CREATE OR REPLACE VIEW optimized_listens AS SELECT DISTINCT ON (l.id, l.listened_at, l.track_name, l.artist_name, l.album_name)
l.id,
l.listened_at,
l.track_name,
l.artist_name,
l.album_name,
l.album_key,
CONCAT('/', df_art.filename_disk) AS artist_art,
a.genres AS artist_genres,
g.name AS genre_name,
g.slug AS genre_url,
a.country AS artist_country,
a.slug AS artist_url,
CONCAT('/', df_album.filename_disk) AS album_art
FROM
listens l
LEFT JOIN artists a ON (l.artist_name = a.name_string)
LEFT JOIN albums al ON (l.album_key = al.key)
LEFT JOIN directus_files df_art ON (a.art = df_art.id)
LEFT JOIN directus_files df_album ON (al.art = df_album.id)
LEFT JOIN genres g ON (a.genres = g.id)
ORDER BY
l.id,
l.listened_at,
l.track_name,
l.artist_name,
l.album_name;

View file

@ -0,0 +1,20 @@
CREATE OR REPLACE VIEW month_albums AS
SELECT
ol.album_name,
ol.artist_name,
COUNT(*) AS plays,
ol.album_art,
ol.artist_url,
json_build_object('title', ol.album_name, 'image', ol.album_art, 'url', ol.artist_url, 'alt', CONCAT(ol.album_name, ' by ', ol.artist_name), 'subtext', ol.artist_name) AS grid
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '30 days'
GROUP BY
ol.album_name,
ol.artist_name,
ol.album_art,
ol.artist_url
ORDER BY
plays DESC;

View file

@ -0,0 +1,19 @@
CREATE OR REPLACE VIEW month_artists AS
SELECT
ol.artist_name,
COUNT(*) AS plays,
ol.artist_art,
ol.artist_url,
ARRAY_AGG(DISTINCT ol.genre_name) AS genres,
json_build_object('title', ol.artist_name, 'image', ol.artist_art, 'url', ol.artist_url, 'alt', CONCAT(COUNT(*), ' plays of ', ol.artist_name), 'subtext', CONCAT(COUNT(*), ' plays')) AS grid
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '30 days'
GROUP BY
ol.artist_name,
ol.artist_art,
ol.artist_url
ORDER BY
plays DESC;

View file

@ -0,0 +1,16 @@
CREATE OR REPLACE VIEW month_genres AS
SELECT
ol.genre_name,
ol.genre_url,
COUNT(*) AS plays,
json_build_object('alt', ol.genre_name, 'subtext', CONCAT(COUNT(*), ' plays')) AS grid
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '30 days'
GROUP BY
ol.genre_name,
ol.genre_url
ORDER BY
plays DESC;

View file

@ -0,0 +1,37 @@
CREATE OR REPLACE VIEW month_tracks AS
WITH track_stats AS (
SELECT
ol.track_name,
ol.artist_name,
ol.album_name,
COUNT(*) AS plays,
MAX(ol.listened_at) AS last_listened,
ol.album_art,
ol.artist_url,
MAX(COUNT(*)) OVER () AS most_played
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '30 days'
GROUP BY
ol.track_name,
ol.artist_name,
ol.album_name,
ol.album_art,
ol.artist_url
)
SELECT
track_name,
artist_name,
album_name,
plays,
last_listened,
album_art,
artist_url,
json_build_object('title', track_name, 'artist', artist_name, 'url', artist_url, 'plays', plays, 'alt', CONCAT(track_name, ' by ', artist_name), 'subtext', CONCAT(album_name, ' (', plays, ' plays)'), 'percentage', ROUND((plays::decimal / most_played) * 100, 2)) AS chart
FROM
track_stats
ORDER BY
plays DESC,
last_listened DESC;

View file

@ -0,0 +1,23 @@
CREATE OR REPLACE VIEW recent_tracks AS
SELECT
ol.id,
ol.listened_at,
ol.track_name,
ol.artist_name,
ol.album_name,
ol.album_key,
ol.artist_art,
ol.artist_genres,
ol.genre_name,
ol.artist_country,
ol.album_art,
ol.artist_url,
ol.genre_url,
json_build_object('title', ol.track_name, 'subtext', ol.artist_name, 'alt', CONCAT(ol.track_name, ' by ', ol.artist_name), 'url', ol.artist_url, 'image', ol.album_art, 'played_at', ol.listened_at) AS chart
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '7 days'
ORDER BY
TO_TIMESTAMP(ol.listened_at) DESC;

View file

@ -0,0 +1,20 @@
CREATE OR REPLACE VIEW week_albums AS
SELECT
ol.album_name,
ol.artist_name,
COUNT(*) AS plays,
ol.album_art,
ol.artist_url,
json_build_object('title', ol.album_name, 'image', ol.album_art, 'url', ol.artist_url, 'alt', CONCAT(ol.album_name, ' by ', ol.artist_name), 'subtext', ol.artist_name) AS grid
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '7 days'
GROUP BY
ol.album_name,
ol.artist_name,
ol.album_art,
ol.artist_url
ORDER BY
plays DESC;

View file

@ -0,0 +1,18 @@
CREATE OR REPLACE VIEW week_artists AS
SELECT
ol.artist_name,
COUNT(*) AS plays,
ol.artist_art,
ol.artist_url,
ARRAY_AGG(DISTINCT ol.genre_name) AS genres,
json_build_object('title', ol.artist_name, 'image', ol.artist_art, 'url', ol.artist_url, 'alt', CONCAT(COUNT(*), ' plays of ', ol.artist_name), 'subtext', CONCAT(COUNT(*), ' plays')) AS grid
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '7 days'
GROUP BY
ol.artist_name,
ol.artist_art,
ol.artist_url
ORDER BY
plays DESC;

View file

@ -0,0 +1,16 @@
CREATE OR REPLACE VIEW week_genres AS
SELECT
ol.genre_name,
ol.genre_url,
COUNT(*) AS plays,
json_build_object('alt', ol.genre_name, 'subtext', CONCAT(COUNT(*), ' plays')) AS grid
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '7 days'
GROUP BY
ol.genre_name,
ol.genre_url
ORDER BY
plays DESC;

View file

@ -0,0 +1,46 @@
CREATE OR REPLACE VIEW week_tracks AS
WITH track_stats AS (
SELECT
ol.track_name,
ol.artist_name,
ol.album_name,
COUNT(*) AS plays,
MAX(ol.listened_at) AS last_listened,
ol.album_art,
ol.artist_url,
MAX(COUNT(*)) OVER () AS most_played,
RANK() OVER (ORDER BY COUNT(*) DESC, MAX(ol.listened_at) DESC) AS rank
FROM
optimized_listens ol
WHERE
TO_TIMESTAMP(ol.listened_at) >= NOW() - INTERVAL '7 days'
GROUP BY
ol.track_name,
ol.artist_name,
ol.album_name,
ol.album_art,
ol.artist_url
)
SELECT
track_name,
artist_name,
album_name,
plays,
last_listened,
album_art,
artist_url,
json_build_object(
'title', track_name,
'artist', artist_name,
'url', artist_url,
'plays', plays,
'alt', CONCAT(track_name, ' by ', artist_name),
'subtext', CONCAT(album_name, ' (', plays, ' plays)'),
'percentage', ROUND((plays::decimal / most_played) * 100, 2),
'rank', rank
) AS chart
FROM
track_stats
ORDER BY
plays DESC,
last_listened DESC;

View file

@ -0,0 +1,248 @@
CREATE OR REPLACE VIEW optimized_recent_media AS
WITH ordered_artists AS (
SELECT
wa.artist_name,
wa.artist_art,
wa.artist_url,
wa.plays,
json_build_object(
'title', wa.artist_name,
'image', wa.artist_art,
'url', wa.artist_url,
'alt', CONCAT(wa.plays, ' plays of ', wa.artist_name),
'subtext', CONCAT(wa.plays, ' plays')
) AS grid
FROM week_artists wa
ORDER BY wa.plays DESC, wa.artist_name ASC
),
ordered_albums AS (
SELECT
wa.album_name,
wa.album_art,
wa.artist_name,
wa.artist_url,
wa.plays,
json_build_object(
'title', wa.album_name,
'image', wa.album_art,
'url', wa.artist_url,
'alt', CONCAT(wa.album_name, ' by ', wa.artist_name, ' (', wa.plays, ' plays)'),
'subtext', wa.artist_name
) AS grid
FROM week_albums wa
ORDER BY wa.plays DESC, wa.album_name ASC
),
recent_music AS (
SELECT * FROM (
(
SELECT
artist_name AS title,
artist_art AS image,
artist_url AS url,
'music' AS type,
1 AS rank,
grid
FROM ordered_artists
LIMIT 1
)
UNION ALL
(
SELECT
album_name AS title,
album_art AS image,
artist_url AS url,
'music' AS type,
2 AS rank,
grid
FROM ordered_albums
LIMIT 1
)
UNION ALL
(
SELECT
artist_name AS title,
artist_art AS image,
artist_url AS url,
'music' AS type,
3 AS rank,
grid
FROM ordered_artists
OFFSET 1 LIMIT 1
)
UNION ALL
(
SELECT
album_name AS title,
album_art AS image,
artist_url AS url,
'music' AS type,
4 AS rank,
grid
FROM ordered_albums
OFFSET 1 LIMIT 1
)
) AS recent_music_subquery
),
recent_watched_read AS (
SELECT * FROM (
(
SELECT
om.title,
om.image,
om.url,
'tv' AS type,
1 AS rank,
json_build_object(
'title', null,
'url', om.url,
'image', om.image,
'backdrop', om.backdrop,
'alt', CONCAT('Poster from ', om.title, ' (', om.year, ')'),
'subtext', CASE WHEN om.rating IS NOT NULL THEN
om.rating::text
ELSE
om.year::text
END
) AS grid
FROM optimized_movies om
WHERE om.last_watched IS NOT NULL
ORDER BY om.last_watched DESC, om.title ASC
LIMIT 1
)
UNION ALL
(
SELECT
os.title,
os.image,
os.url,
'tv' AS type,
2 AS rank,
json_build_object(
'title', null,
'image', os.image,
'url', os.url,
'alt', CONCAT('Poster from ', os.title),
'subtext', (
SELECT CONCAT('S', e.season_number, 'E', e.episode_number)
FROM episodes e
WHERE e.show = os.id
ORDER BY e.last_watched_at DESC, e.season_number DESC, e.episode_number DESC
LIMIT 1
)
) AS grid
FROM optimized_shows os
WHERE os.last_watched_at IS NOT NULL
ORDER BY os.last_watched_at DESC, os.title ASC
LIMIT 1
)
UNION ALL
(
SELECT
ob.title,
ob.image,
ob.url,
'books' AS type,
3 AS rank,
json_build_object(
'title', null,
'image', ob.image,
'url', ob.url,
'alt', CONCAT('Book cover from ', ob.title, ' by ', ob.author),
'subtext', CASE WHEN ob.rating IS NOT NULL THEN
ob.rating
ELSE
NULL
END
) AS grid
FROM optimized_books ob
WHERE ob.status = 'finished'
ORDER BY ob.date_finished DESC, ob.title ASC
LIMIT 1
)
UNION ALL
(
SELECT
om.title,
om.image,
om.url,
'tv' AS type,
4 AS rank,
json_build_object(
'title', null,
'url', om.url,
'image', om.image,
'backdrop', om.backdrop,
'alt', CONCAT('Poster from ', om.title, ' (', om.year, ')'),
'subtext', CASE WHEN om.rating IS NOT NULL THEN
om.rating::text
ELSE
om.year::text
END
) AS grid
FROM optimized_movies om
WHERE om.last_watched IS NOT NULL
ORDER BY om.last_watched DESC, om.title ASC
OFFSET 1 LIMIT 1
)
UNION ALL
(
SELECT
os.title,
os.image,
os.url,
'tv' AS type,
5 AS rank,
json_build_object(
'title', null,
'image', os.image,
'url', os.url,
'alt', CONCAT('Poster from ', os.title),
'subtext', (
SELECT CONCAT('S', e.season_number, 'E', e.episode_number)
FROM episodes e
WHERE e.show = os.id
ORDER BY e.last_watched_at DESC, e.season_number DESC, e.episode_number DESC
LIMIT 1
)
) AS grid
FROM optimized_shows os
WHERE os.last_watched_at IS NOT NULL
ORDER BY os.last_watched_at DESC, os.title ASC
OFFSET 1 LIMIT 1
)
UNION ALL
(
SELECT
ob.title,
ob.image,
ob.url,
'books' AS type,
6 AS rank,
json_build_object(
'title', null,
'image', ob.image,
'url', ob.url,
'alt', CONCAT('Book cover from ', ob.title, ' by ', ob.author),
'subtext', CASE WHEN ob.rating IS NOT NULL THEN
ob.rating
ELSE
NULL
END
) AS grid
FROM optimized_books ob
WHERE ob.status = 'finished'
ORDER BY ob.date_finished DESC, ob.title ASC
OFFSET 1 LIMIT 1
)
) AS recent_watched_read_subquery
)
SELECT json_build_object(
'recentMusic', (
SELECT json_agg(m.* ORDER BY m.rank)
FROM recent_music m
),
'recentWatchedRead', (
SELECT json_agg(w.* ORDER BY w.rank)
FROM recent_watched_read w
)
) AS recent_activity;

View file

@ -0,0 +1,10 @@
CREATE OR REPLACE VIEW optimized_last_watched_episodes AS
SELECT DISTINCT ON (e.show)
e.show AS show_id,
e.season_number,
e.episode_number,
e.last_watched_at,
CONCAT('S', e.season_number, 'E', e.episode_number) AS last_watched_episode
FROM episodes e
WHERE e.last_watched_at IS NOT NULL
ORDER BY e.show, e.last_watched_at DESC;

View file

@ -0,0 +1,26 @@
CREATE OR REPLACE VIEW optimized_scheduled_episodes AS
SELECT
se.show_id,
se.season_number,
se.episode_number,
se.status,
se.air_date,
(
SELECT CONCAT('S', se2.season_number, 'E', se2.episode_number)
FROM scheduled_episodes se2
WHERE se2.show_id = se.show_id
AND se2.status IN ('upcoming', 'aired')
ORDER BY se2.air_date ASC
LIMIT 1
) AS next_scheduled_episode,
(
SELECT se2.air_date
FROM scheduled_episodes se2
WHERE se2.show_id = se.show_id
AND se2.status IN ('upcoming', 'aired')
ORDER BY se2.air_date ASC
LIMIT 1
) AS next_air_date
FROM scheduled_episodes se
WHERE se.status IN ('upcoming', 'aired')
GROUP BY se.show_id, se.season_number, se.episode_number, se.status, se.air_date;

View file

@ -0,0 +1,145 @@
CREATE OR REPLACE VIEW optimized_scheduled_shows AS
SELECT json_build_object(
'watching', (
SELECT json_agg(watching) FROM (
SELECT
s.id,
s.tmdb_id,
s.title,
s.year,
s.ongoing,
s.slug AS url,
CONCAT('/', df_art.filename_disk) AS image,
CONCAT('/', df_backdrop.filename_disk) AS backdrop,
json_build_object(
'title', s.title,
'image', CONCAT('/', df_art.filename_disk),
'backdrop', CONCAT('/', df_backdrop.filename_disk),
'url', s.slug,
'alt', CONCAT('Poster from ', s.title),
'subtext', COALESCE(
(SELECT CONCAT(
'S', se.season_number, 'E', se.episode_number, ' • ',
CASE
WHEN EXTRACT(YEAR FROM se.air_date) < EXTRACT(YEAR FROM CURRENT_DATE)
THEN TO_CHAR(se.air_date, 'FMMM/FMDD/YY')
ELSE TO_CHAR(se.air_date, 'FMMM/FMDD')
END
)
FROM scheduled_episodes se
WHERE se.show_id = s.id
AND se.status IN ('upcoming', 'aired')
AND NOT EXISTS (
SELECT 1 FROM episodes e
WHERE e.show = s.id
AND e.season_number = se.season_number
AND e.episode_number = se.episode_number
)
ORDER BY se.season_number ASC, se.episode_number ASC
LIMIT 1),
(SELECT CONCAT(
'S', e.season_number, 'E', e.episode_number, ' • ',
CASE
WHEN EXTRACT(YEAR FROM e.last_watched_at) < EXTRACT(YEAR FROM CURRENT_DATE)
THEN TO_CHAR(e.last_watched_at, 'FMMM/FMDD/YY')
ELSE TO_CHAR(e.last_watched_at, 'FMMM/FMDD')
END
)
FROM episodes e
WHERE e.show = s.id
ORDER BY e.last_watched_at DESC, e.season_number DESC, e.episode_number DESC
LIMIT 1),
s.year::text
)
) AS grid,
CASE
WHEN (
SELECT se.air_date
FROM scheduled_episodes se
WHERE se.show_id = s.id
AND se.status IN ('upcoming', 'aired')
AND NOT EXISTS (
SELECT 1 FROM episodes e
WHERE e.show = s.id
AND e.season_number = se.season_number
AND e.episode_number = se.episode_number
)
ORDER BY se.season_number ASC, se.episode_number ASC
LIMIT 1
) >= NOW()
THEN (
SELECT se.air_date::timestamp
FROM scheduled_episodes se
WHERE se.show_id = s.id
AND se.status IN ('upcoming', 'aired')
AND NOT EXISTS (
SELECT 1 FROM episodes e
WHERE e.show = s.id
AND e.season_number = se.season_number
AND e.episode_number = se.episode_number
)
ORDER BY se.season_number ASC, se.episode_number ASC
LIMIT 1
)
ELSE (
SELECT MIN(e.last_watched_at)::timestamp
FROM episodes e
WHERE e.show = s.id
)
END AS sort_date
FROM shows s
LEFT JOIN directus_files df_art ON s.art = df_art.id
LEFT JOIN directus_files df_backdrop ON s.backdrop = df_backdrop.id
WHERE s.ongoing = true
AND EXISTS (
SELECT 1
FROM scheduled_episodes se
WHERE se.show_id = s.id
AND se.status IN ('upcoming', 'aired')
)
AND EXISTS (
SELECT 1
FROM episodes e
WHERE e.show = s.id
)
ORDER BY sort_date ASC NULLS LAST, s.title ASC NULLS LAST
) watching
),
'unstarted', (
SELECT json_agg(unstarted) FROM (
SELECT
s.id,
s.tmdb_id,
s.title,
s.year,
s.ongoing,
s.slug AS url,
CONCAT('/', df_art.filename_disk) AS image,
CONCAT('/', df_backdrop.filename_disk) AS backdrop,
json_build_object(
'title', s.title,
'image', CONCAT('/', df_art.filename_disk),
'backdrop', CONCAT('/', df_backdrop.filename_disk),
'url', s.slug,
'alt', CONCAT('Poster from ', s.title),
'subtext', s.year::text
) AS grid
FROM shows s
LEFT JOIN directus_files df_art ON s.art = df_art.id
LEFT JOIN directus_files df_backdrop ON s.backdrop = df_backdrop.id
WHERE s.ongoing = true
AND EXISTS (
SELECT 1
FROM scheduled_episodes se
WHERE se.show_id = s.id
AND se.status IN ('upcoming', 'aired')
)
AND NOT EXISTS (
SELECT 1
FROM episodes e
WHERE e.show = s.id
)
ORDER BY s.title ASC
) unstarted
)
) AS scheduled_shows

View file

@ -0,0 +1,125 @@
CREATE OR REPLACE VIEW optimized_shows AS
SELECT
s.id,
s.tmdb_id,
s.title,
s.year,
s.favorite,
s.tattoo,
s.description,
s.review,
s.ongoing,
s.slug AS url,
CONCAT('/', df_art.filename_disk) AS image,
CONCAT('/', df_backdrop.filename_disk) AS backdrop,
json_build_object(
'title', NULL,
'image', CONCAT('/', df_art.filename_disk),
'backdrop', CONCAT('/', df_backdrop.filename_disk),
'url', s.slug,
'alt', CONCAT('Poster from ', s.title),
'subtext', CASE
WHEN (
SELECT MAX(e1.last_watched_at)
FROM episodes e1
WHERE e1.show = s.id
) >= NOW() - INTERVAL '90 days' THEN
(SELECT CONCAT('S', e2.season_number, 'E', e2.episode_number)
FROM episodes e2
WHERE e2.show = s.id
ORDER BY e2.last_watched_at DESC, e2.season_number DESC, e2.episode_number DESC
LIMIT 1)
ELSE
s.year::text
END
) AS grid,
json_build_object(
'title', s.title,
'year', s.year,
'url', s.slug,
'image', CONCAT('/', df_art.filename_disk),
'backdrop', CONCAT('/', df_backdrop.filename_disk),
'formatted_episode', COALESCE((
SELECT CONCAT('S', e2.season_number, 'E', e2.episode_number)
FROM episodes e2
WHERE e2.show = s.id
ORDER BY e2.last_watched_at DESC, e2.season_number DESC, e2.episode_number DESC
LIMIT 1
), NULL),
'last_watched_at', (
SELECT MAX(e3.last_watched_at)
FROM episodes e3
WHERE e3.show = s.id
)
) AS episode,
(
SELECT
json_agg(json_build_object('title', m.title, 'year', m.year, 'url', m.slug)
ORDER BY m.year DESC)
FROM
shows_movies sm
LEFT JOIN movies m ON sm.movies_id = m.id
WHERE
sm.shows_id = s.id
) AS movies,
(
SELECT
json_agg(json_build_object('title', b.title, 'author', b.author, 'url', b.slug)
ORDER BY b.title ASC)
FROM
shows_books sb
LEFT JOIN books b ON sb.books_id = b.id
WHERE
sb.shows_id = s.id
) AS books,
(
SELECT
json_agg(json_build_object('title', p.title, 'date', p.date, 'url', p.slug)
ORDER BY p.date DESC)
FROM
posts_shows ps
LEFT JOIN posts p ON ps.posts_id = p.id
WHERE
ps.shows_id = s.id
) AS posts,
(
SELECT
array_agg(t.name)
FROM
shows_tags st
LEFT JOIN tags t ON st.tags_id = t.id
WHERE
st.shows_id = s.id
) AS tags,
(
SELECT
json_agg(json_build_object('title', rs.title, 'year', rs.year, 'url', rs.slug)
ORDER BY rs.year DESC)
FROM
related_shows sr
LEFT JOIN shows rs ON sr.related_shows_id = rs.id
WHERE
sr.shows_id = s.id
) AS related_shows,
(
SELECT
json_agg(json_build_object('name', a.name_string, 'url', a.slug, 'country', a.country, 'total_plays', a.total_plays)
ORDER BY a.name_string ASC)
FROM
shows_artists sa
LEFT JOIN artists a ON sa.artists_id = a.id
WHERE
sa.shows_id = s.id
) AS artists,
MAX(e.last_watched_at) AS last_watched_at
FROM
shows s
LEFT JOIN episodes e ON s.id = e.show
LEFT JOIN directus_files df_art ON s.art = df_art.id
LEFT JOIN directus_files df_backdrop ON s.backdrop = df_backdrop.id
GROUP BY
s.id,
df_art.filename_disk,
df_backdrop.filename_disk
ORDER BY
MAX(e.last_watched_at) DESC;

View file

@ -0,0 +1,14 @@
slotmem_shm
http2
proxy_fcgi
deflate
expires
brotli
socache_shmcb
headers
remoteip
mpm_worker
rewrite
ssl
proxy
proxy_http

View file

@ -0,0 +1,9 @@
php8.3-cli
php8.3-fpm
php8.3-igbinary
php8.3-mbstring
php8.3-mysql
php8.3-opcache
php8.3-readline
php8.3-redis
php8.3-xml

146
scripts/setup.sh Executable file
View file

@ -0,0 +1,146 @@
#!/bin/bash
COLOR_BLUE="\033[38;2;51;100;255m"
COLOR_RESET="\033[0m"
echo "${COLOR_BLUE}"
echo "=========================================="
echo " setting up coryd.dev locally "
echo "=========================================="
echo "${COLOR_RESET}"
# step 1: retrieve and build .env file from 1password
echo "${COLOR_BLUE}signing in to 1password...${COLOR_RESET}"
eval $(op signin)
echo "${COLOR_BLUE}fetching secrets from 1password...${COLOR_RESET}"
SECRETS_JSON='{
"POSTGREST_URL": "{{ op://Private/coryd.dev secrets/POSTGREST_URL }}",
"POSTGREST_API_KEY": "{{ op://Private/coryd.dev secrets/POSTGREST_API_KEY }}",
"MASTODON_ACCESS_TOKEN": "{{ op://Private/coryd.dev secrets/MASTODON_ACCESS_TOKEN }}",
"MASTODON_SYNDICATION_TOKEN": "{{ op://Private/coryd.dev secrets/MASTODON_SYNDICATION_TOKEN }}",
"FORWARDEMAIL_API_KEY": "{{ op://Private/coryd.dev secrets/FORWARDEMAIL_API_KEY }}",
"BOOK_IMPORT_TOKEN": "{{ op://Private/coryd.dev secrets/BOOK_IMPORT_TOKEN }}",
"WATCHING_IMPORT_TOKEN": "{{ op://Private/coryd.dev secrets/WATCHING_IMPORT_TOKEN }}",
"TMDB_API_KEY": "{{ op://Private/coryd.dev secrets/TMDB_API_KEY }}",
"SEASONS_IMPORT_TOKEN": "{{ op://Private/coryd.dev secrets/SEASONS_IMPORT_TOKEN }}",
"NAVIDROME_SCROBBLE_TOKEN": "{{ op://Private/coryd.dev secrets/NAVIDROME_SCROBBLE_TOKEN }}",
"NAVIDROME_API_URL": "{{ op://Private/coryd.dev secrets/NAVIDROME_API_URL }}",
"NAVIDROME_API_TOKEN": "{{ op://Private/coryd.dev secrets/NAVIDROME_API_TOKEN }}",
"COOLIFY_REBUILD_TOKEN": "{{ op://Private/coryd.dev secrets/COOLIFY_REBUILD_TOKEN }}"
}'
SECRETS=$(echo "$SECRETS_JSON" | op inject)
if [ -z "$SECRETS" ]; then
echo "error: failed to retrieve secrets from 1password."
exit 1
fi
echo "${COLOR_BLUE}writing .env file...${COLOR_RESET}"
echo "$SECRETS" | jq -r 'to_entries | .[] | "\(.key)=\(.value)"' > .env
# load environment variables from .env
export $(grep -v '^#' .env | xargs)
# step 2: generate final config files from templates
echo "${COLOR_BLUE}generating configuration files from templates...${COLOR_RESET}"
mkdir -p generated
for file in scripts/templates/*.template; do
[ -e "$file" ] || continue
new_file="generated/$(basename ${file%.template})"
cp "$file" "$new_file"
# use awk to replace placeholders safely
awk -v POSTGREST_URL="$POSTGREST_URL" \
-v POSTGREST_API_KEY="$POSTGREST_API_KEY" \
-v FORWARDEMAIL_API_KEY="$FORWARDEMAIL_API_KEY" \
-v MASTODON_ACCESS_TOKEN="$MASTODON_ACCESS_TOKEN" \
-v MASTODON_SYNDICATION_TOKEN="$MASTODON_SYNDICATION_TOKEN" \
-v BOOK_IMPORT_TOKEN="$BOOK_IMPORT_TOKEN" \
-v WATCHING_IMPORT_TOKEN="$WATCHING_IMPORT_TOKEN" \
-v TMDB_API_KEY="$TMDB_API_KEY" \
-v NAVIDROME_SCROBBLE_TOKEN="$NAVIDROME_SCROBBLE_TOKEN" \
-v SEASONS_IMPORT_TOKEN="$SEASONS_IMPORT_TOKEN" \
-v NAVIDROME_API_URL="$NAVIDROME_API_URL" \
-v NAVIDROME_API_TOKEN="$NAVIDROME_API_TOKEN" \
-v ARTIST_IMPORT_TOKEN="$ARTIST_IMPORT_TOKEN" \
-v COOLIFY_REBUILD_TOKEN="$COOLIFY_REBUILD_TOKEN" \
'{gsub(/{{POSTGREST_URL}}/, POSTGREST_URL);
gsub(/{{POSTGREST_API_KEY}}/, POSTGREST_API_KEY);
gsub(/{{FORWARDEMAIL_API_KEY}}/, FORWARDEMAIL_API_KEY);
gsub(/{{MASTODON_ACCESS_TOKEN}}/, MASTODON_ACCESS_TOKEN);
gsub(/{{MASTODON_SYNDICATION_TOKEN}}/, MASTODON_SYNDICATION_TOKEN);
gsub(/{{BOOK_IMPORT_TOKEN}}/, BOOK_IMPORT_TOKEN);
gsub(/{{WATCHING_IMPORT_TOKEN}}/, WATCHING_IMPORT_TOKEN);
gsub(/{{TMDB_API_KEY}}/, TMDB_API_KEY);
gsub(/{{NAVIDROME_SCROBBLE_TOKEN}}/, NAVIDROME_SCROBBLE_TOKEN);
gsub(/{{SEASONS_IMPORT_TOKEN}}/, SEASONS_IMPORT_TOKEN);
gsub(/{{NAVIDROME_API_URL}}/, NAVIDROME_API_URL);
gsub(/{{NAVIDROME_API_TOKEN}}/, NAVIDROME_API_TOKEN);
gsub(/{{ARTIST_IMPORT_TOKEN}}/, ARTIST_IMPORT_TOKEN);
gsub(/{{COOLIFY_REBUILD_TOKEN}}/, COOLIFY_REBUILD_TOKEN);
print}' "$new_file" > tmpfile && mv tmpfile "$new_file"
done
echo "${COLOR_BLUE}all configurations generated in the 'generated' folder.${COLOR_RESET}"
# step 3: ensure apache_modules.list exists
MODULES_LIST="scripts/lists/apache_modules.list"
if [ ! -f "$MODULES_LIST" ]; then
echo "apache_modules.list not found, generating it..."
a2query -m | awk '{print $1}' > "$MODULES_LIST"
fi
# step 4: ensure php_extensions.list exists
PHP_EXTENSIONS_LIST="scripts/lists/php_extensions.list"
if [ ! -f "$PHP_EXTENSIONS_LIST" ]; then
echo "php_extensions.list not found, generating it..."
dpkg --get-selections | awk '/php8.3/ {print $1}' > "$PHP_EXTENSIONS_LIST"
fi
# step 5: display manual installation instructions
echo "${COLOR_BLUE}"
echo "=========================================="
echo " setup complete! "
echo " your local environment is ready! 🚀 "
echo "=========================================="
echo "${COLOR_RESET}"
echo "${COLOR_BLUE}next steps:${COLOR_RESET}"
echo "1⃣ move the coryd.dev.conf apache configuration to the correct location:"
echo " sudo a2ensite coryd.dev.conf"
echo " sudo systemctl reload apache2"
echo ""
echo "2⃣ enable the required apache modules:"
if [ -f "$MODULES_LIST" ]; then
REQUIRED_MODULES=$(tr '\n' ' ' < "$MODULES_LIST" | sed 's/ *$//')
if [ -n "$REQUIRED_MODULES" ]; then
echo " sudo a2enmod $REQUIRED_MODULES && sudo systemctl restart apache2"
else
echo " no required modules found."
fi
else
echo " error: apache_modules.list not found."
fi
echo ""
echo "3⃣ install the required php extensions:"
if [ -f "$PHP_EXTENSIONS_LIST" ]; then
REQUIRED_PHP_EXTENSIONS=$(tr '\n' ' ' < "$PHP_EXTENSIONS_LIST" | sed 's/ *$//')
if [ -n "$REQUIRED_PHP_EXTENSIONS" ]; then
echo " sudo apt install -y $REQUIRED_PHP_EXTENSIONS && sudo systemctl restart php8.3-fpm"
else
echo " no required php extensions found."
fi
else
echo " error: php_extensions.list not found."
fi
echo ""
echo "4⃣ apply crontabs manually:"
echo " root: crontab -e"
echo " www-data: sudo crontab -u www-data -e"
echo "${COLOR_BLUE}all done! 🎉${COLOR_RESET}"

View file

@ -0,0 +1,53 @@
<VirtualHost *:80>
ServerAdmin hi@coryd.dev
ServerName coryd.dev
Redirect permanent / https://www.coryd.dev/
</VirtualHost>
<VirtualHost *:443>
ServerAdmin hi@coryd.dev
ServerName coryd.dev
Redirect permanent / https://www.coryd.dev/
SSLEngine on
SSLCertificateFile /etc/letsencrypt/live/coryd.dev/fullchain.pem
SSLCertificateKeyFile /etc/letsencrypt/live/coryd.dev/privkey.pem
</VirtualHost>
<VirtualHost *:443>
ServerAdmin hi@coryd.dev
ServerName www.coryd.dev
DocumentRoot /var/www/coryd.dev
SSLEngine on
SSLCertificateFile /etc/letsencrypt/live/coryd.dev/fullchain.pem
SSLCertificateKeyFile /etc/letsencrypt/live/coryd.dev/privkey.pem
SetEnv POSTGREST_URL "{{POSTGREST_URL}}"
SetEnv POSTGREST_API_KEY "{{POSTGREST_API_KEY}}"
SetEnv BASE_URL "https://www.coryd.dev"
SetEnv FORWARDEMAIL_API_KEY "{{FORWARDEMAIL_API_KEY}}"
SetEnv MASTODON_ACCESS_TOKEN "{{MASTODON_ACCESS_TOKEN}}"
SetEnv MASTODON_SYNDICATION_TOKEN "{{MASTODON_SYNDICATION_TOKEN}}"
SetEnv BOOK_IMPORT_TOKEN "{{BOOK_IMPORT_TOKEN}}"
SetEnv WATCHING_IMPORT_TOKEN "{{WATCHING_IMPORT_TOKEN}}"
SetEnv TMDB_API_KEY "{{TMDB_API_KEY}}"
SetEnv SEASONS_IMPORT_TOKEN "{{SEASONS_IMPORT_TOKEN}}"
SetEnv NAVIDROME_SCROBBLE_TOKEN "{{NAVIDROME_SCROBBLE_TOKEN}}"
SetEnv NAVIDROME_API_URL "{{NAVIDROME_API_URL}}"
SetEnv NAVIDROME_API_TOKEN "{{NAVIDROME_API_TOKEN}}"
SetEnv ARTIST_IMPORT_TOKEN "{{ARTIST_IMPORT_TOKEN}}"
SetEnvIf Authorization "(.*)" HTTP_AUTHORIZATION=$1
RequestHeader set Authorization "%{HTTP_AUTHORIZATION}e" env=HTTP_AUTHORIZATION
<Directory /var/www/coryd.dev/>
Options Indexes FollowSymLinks MultiViews
AllowOverride All
Require all granted
</Directory>
<FilesMatch \.php$>
SetHandler "proxy:unix:/var/run/php/php8.3-fpm.sock|fcgi://localhost/"
</FilesMatch>
</VirtualHost>

View file

@ -0,0 +1 @@
0 2 * * * certbot renew --quiet && systemctl reload apache2

View file

@ -0,0 +1,4 @@
*/15 * * * * curl -X POST -H "Authorization: Bearer {{MASTODON_ACCESS_TOKEN}}" -H "Content-Type: application/json" https://www.coryd.dev/api/mastodon.php
0 * * * * curl -X POST "https://apps.coryd.dev/api/v1/deploy?uuid=q004wcg840s0s88g8cwo8wkg&force=true" -H "Authorization: Bearer {{COOLIFY_REBUILD_TOKEN}}" -H "Content-Type: application/json" >/dev/null 2>&1
*/3 * * * * curl -X POST -H "Authorization: Bearer {{NAVIDROME_SCROBBLE_TOKEN}}" https://www.coryd.dev/api/scrobble.php
0 0 * * * curl -X POST -H "Authorization: Bearer {{SEASONS_IMPORT_TOKEN}}" https://www.coryd.dev/api/seasons-import.php

16
server/utils/icons.php Normal file
View file

@ -0,0 +1,16 @@
<?php
function getTablerIcon($iconName, $class = '', $size = 24)
{
$icons = [
'article' => '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="icon icon-tabler icons-tabler-outline icon-tabler-article"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M3 4m0 2a2 2 0 0 1 2 -2h14a2 2 0 0 1 2 2v12a2 2 0 0 1 -2 2h-14a2 2 0 0 1 -2 -2z" /><path d="M7 8h10" /><path d="M7 12h10" /><path d="M7 16h10" /></svg>',
'books' => '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="icon icon-tabler icons-tabler-outline icon-tabler-books"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M5 4m0 1a1 1 0 0 1 1 -1h2a1 1 0 0 1 1 1v14a1 1 0 0 1 -1 1h-2a1 1 0 0 1 -1 -1z" /><path d="M9 4m0 1a1 1 0 0 1 1 -1h2a1 1 0 0 1 1 1v14a1 1 0 0 1 -1 1h-2a1 1 0 0 1 -1 -1z" /><path d="M5 8h4" /><path d="M9 16h4" /><path d="M13.803 4.56l2.184 -.53c.562 -.135 1.133 .19 1.282 .732l3.695 13.418a1.02 1.02 0 0 1 -.634 1.219l-.133 .041l-2.184 .53c-.562 .135 -1.133 -.19 -1.282 -.732l-3.695 -13.418a1.02 1.02 0 0 1 .634 -1.219l.133 -.041z" /><path d="M14 9l4 -1" /><path d="M16 16l3.923 -.98" /></svg>',
'device-tv-old' => '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="icon icon-tabler icons-tabler-outline icon-tabler-device-tv-old"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M3 7m0 2a2 2 0 0 1 2 -2h14a2 2 0 0 1 2 2v9a2 2 0 0 1 -2 2h-14a2 2 0 0 1 -2 -2z" /><path d="M16 3l-4 4l-4 -4" /><path d="M15 7v13" /><path d="M18 15v.01" /><path d="M18 12v.01" /></svg>',
'headphones' => '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="icon icon-tabler icons-tabler-outline icon-tabler-headphones"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M4 13m0 2a2 2 0 0 1 2 -2h1a2 2 0 0 1 2 2v3a2 2 0 0 1 -2 2h-1a2 2 0 0 1 -2 -2z" /><path d="M15 13m0 2a2 2 0 0 1 2 -2h1a2 2 0 0 1 2 2v3a2 2 0 0 1 -2 2h-1a2 2 0 0 1 -2 -2z" /><path d="M4 15v-3a8 8 0 0 1 16 0v3" /></svg>',
'movie' => '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="icon icon-tabler icons-tabler-outline icon-tabler-movie"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M4 4m0 2a2 2 0 0 1 2 -2h12a2 2 0 0 1 2 2v12a2 2 0 0 1 -2 2h-12a2 2 0 0 1 -2 -2z" /><path d="M8 4l0 16" /><path d="M16 4l0 16" /><path d="M4 8l4 0" /><path d="M4 16l4 0" /><path d="M4 12l16 0" /><path d="M16 8l4 0" /><path d="M16 16l4 0" /></svg>'
];
return $icons[$iconName] ?? '<span class="icon-placeholder">[Missing: ' . htmlspecialchars($iconName) . ']</span>';
}
?>

Some files were not shown because too many files have changed in this diff Show more