feat: initial commit
This commit is contained in:
commit
0ff7457679
192 changed files with 24379 additions and 0 deletions
40
workers/analytics/index.js
Normal file
40
workers/analytics/index.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
const scriptName = "/js/script.js";
|
||||
const endpoint = "/api/event";
|
||||
|
||||
addEventListener("fetch", (event) => {
|
||||
event.passThroughOnException();
|
||||
event.respondWith(handleRequest(event));
|
||||
});
|
||||
|
||||
async function handleRequest(event) {
|
||||
const url = new URL(event.request.url);
|
||||
const pathname = url.pathname;
|
||||
|
||||
if (pathname === scriptName) {
|
||||
return getScript(event);
|
||||
} else if (pathname === endpoint) {
|
||||
return postData(event);
|
||||
}
|
||||
return new Response(null, { status: 404 });
|
||||
}
|
||||
|
||||
async function getScript(event) {
|
||||
const cache = caches.default;
|
||||
let response = await cache.match(event.request);
|
||||
|
||||
if (!response) {
|
||||
const scriptUrl =
|
||||
"https://plausible.io/js/plausible.outbound-links.tagged-events.js";
|
||||
response = await fetch(scriptUrl);
|
||||
if (response.ok)
|
||||
event.waitUntil(cache.put(event.request, response.clone()));
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async function postData(event) {
|
||||
const request = new Request(event.request);
|
||||
request.headers.delete("cookie");
|
||||
return await fetch("https://plausible.io/api/event", request);
|
||||
}
|
13
workers/analytics/wrangler.template.toml
Normal file
13
workers/analytics/wrangler.template.toml
Normal file
|
@ -0,0 +1,13 @@
|
|||
name = "analytics-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "analytics-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/js/*", zone_id = "${CF_ZONE_ID}" },
|
||||
{ pattern = "coryd.dev/api/event", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
184
workers/artist-import/index.js
Normal file
184
workers/artist-import/index.js
Normal file
|
@ -0,0 +1,184 @@
|
|||
import slugify from "slugify";
|
||||
import countries from "i18n-iso-countries";
|
||||
|
||||
countries.registerLocale(require("i18n-iso-countries/langs/en.json"));
|
||||
|
||||
function sanitizeMediaString(str) {
|
||||
const sanitizedString = str
|
||||
.normalize("NFD")
|
||||
.replace(/[\u0300-\u036f\u2010\-\.\?\(\)\[\]\{\}]/g, "")
|
||||
.replace(/\.{3}/g, "");
|
||||
return slugify(sanitizedString, {
|
||||
replacement: "-",
|
||||
remove: /[#,&,+()$~%.'\":*?<>{}]/g,
|
||||
lower: true,
|
||||
});
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
const directusUrl = env.DIRECTUS_URL;
|
||||
const directusToken = env.DIRECTUS_API_TOKEN;
|
||||
const artistImportToken = env.ARTIST_IMPORT_TOKEN;
|
||||
const artistFlowID = env.ARTIST_FLOW_ID;
|
||||
const albumFlowID = env.ALBUM_FLOW_ID;
|
||||
const placeholderImageId = "4cef75db-831f-4f5d-9333-79eaa5bb55ee";
|
||||
const requestUrl = new URL(request["url"]);
|
||||
const providedToken = requestUrl.searchParams.get("token");
|
||||
|
||||
if (!providedToken || providedToken !== artistImportToken) return new Response("Unauthorized", { status: 401 });
|
||||
|
||||
async function saveToDirectus(endpoint, payload) {
|
||||
const response = await fetch(`${directusUrl}/items/${endpoint}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${directusToken}`,
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
const data = await response.json();
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
data["errors"]
|
||||
? data["errors"][0]["message"]
|
||||
: "Failed to save to Directus"
|
||||
);
|
||||
}
|
||||
return data["data"];
|
||||
}
|
||||
|
||||
async function findGenreIdByName(genreName) {
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${directusUrl}/items/genres?filter[name][_eq]=${encodeURIComponent(
|
||||
genreName.toLowerCase()
|
||||
)}`,
|
||||
{ headers: { Authorization: `Bearer ${directusToken}` } }
|
||||
);
|
||||
const data = await response.json();
|
||||
return data["data"].length > 0 ? data["data"][0]["id"] : null;
|
||||
} catch (error) {
|
||||
console.error("Error fetching genre ID:", error["message"]);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const artistId = requestUrl.searchParams.get("artist_id");
|
||||
if (!artistId)
|
||||
return new Response("artist_id parameter is required", { status: 400 });
|
||||
|
||||
let artistData;
|
||||
try {
|
||||
const artistResponse = await fetch(
|
||||
`${directusUrl}/flows/trigger/${artistFlowID}?artist_id=${artistId}&import_token=${artistImportToken}`,
|
||||
{ headers: { Authorization: `Bearer ${directusToken}` } }
|
||||
);
|
||||
artistData = await artistResponse.json();
|
||||
artistData =
|
||||
artistData["get_artist_data"]["data"]["MediaContainer"]["Metadata"][0];
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"Error fetching artist data from Directus flow:",
|
||||
error["message"]
|
||||
);
|
||||
return new Response("Error fetching artist data", { status: 500 });
|
||||
}
|
||||
|
||||
const artistName = artistData["title"] || "";
|
||||
const artistKey = sanitizeMediaString(artistName);
|
||||
const countryName = artistData["Country"]
|
||||
? artistData["Country"][0]?.["tag"]
|
||||
: "";
|
||||
const countryIsoCode = countries.getAlpha2Code(countryName, "en") || "";
|
||||
const slug = `/music/artists/${artistKey}-${countryName.toLowerCase()}`;
|
||||
const description = artistData["summary"] || "";
|
||||
const mbid = artistData["Guid"]?.[0]?.["id"]?.replace("mbid://", "") || "";
|
||||
|
||||
const genreNames = artistData["Genre"]
|
||||
? artistData["Genre"].map((g) => g["tag"].toLowerCase())
|
||||
: [];
|
||||
let genreId = null;
|
||||
for (const genreName of genreNames) {
|
||||
genreId = await findGenreIdByName(genreName);
|
||||
if (genreId) break;
|
||||
}
|
||||
|
||||
const artistPayload = {
|
||||
name: artistName,
|
||||
name_string: artistName,
|
||||
slug: slug,
|
||||
description: description,
|
||||
mbid: mbid,
|
||||
tentative: true,
|
||||
genres: genreId,
|
||||
country: countryIsoCode,
|
||||
art: placeholderImageId,
|
||||
};
|
||||
|
||||
let insertedArtist;
|
||||
try {
|
||||
insertedArtist = await saveToDirectus("artists", artistPayload);
|
||||
} catch (error) {
|
||||
console.error("Error saving artist:", error["message"]);
|
||||
return new Response("Error saving artist", { status: 500 });
|
||||
}
|
||||
|
||||
let albumData;
|
||||
try {
|
||||
const albumResponse = await fetch(
|
||||
`${directusUrl}/flows/trigger/${albumFlowID}?artist_id=${artistId}&import_token=${artistImportToken}`,
|
||||
{ headers: { Authorization: `Bearer ${directusToken}` } }
|
||||
);
|
||||
albumData = await albumResponse.json();
|
||||
albumData =
|
||||
albumData["get_album_data"]["data"]["MediaContainer"]["Metadata"];
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"Error fetching album data from Directus flow:",
|
||||
error["message"]
|
||||
);
|
||||
return new Response("Error fetching album data", { status: 500 });
|
||||
}
|
||||
|
||||
for (const album of albumData) {
|
||||
const albumName = album["title"] || "";
|
||||
const albumKey = `${artistKey}-${sanitizeMediaString(albumName)}`;
|
||||
const albumSlug = `/music/albums/${albumKey}`;
|
||||
const albumDescription = album["summary"] || "";
|
||||
const albumReleaseDate = album["originallyAvailableAt"] || "";
|
||||
const albumReleaseYear = albumReleaseDate
|
||||
? new Date(albumReleaseDate).getFullYear()
|
||||
: null;
|
||||
const albumGenres = album["Genre"]
|
||||
? album["Genre"].map((g) => g["tag"])
|
||||
: [];
|
||||
const albumMbid =
|
||||
album["Guid"]?.[0]?.["id"]?.replace("mbid://", "") || null;
|
||||
|
||||
const albumPayload = {
|
||||
name: albumName,
|
||||
key: albumKey,
|
||||
slug: albumSlug,
|
||||
mbid: albumMbid,
|
||||
description: albumDescription,
|
||||
release_year: albumReleaseYear,
|
||||
artist: insertedArtist["id"],
|
||||
artist_name: artistName,
|
||||
genres: albumGenres,
|
||||
art: placeholderImageId,
|
||||
tentative: true,
|
||||
};
|
||||
|
||||
try {
|
||||
await saveToDirectus("albums", albumPayload);
|
||||
} catch (error) {
|
||||
console.error("Error saving album:", error["message"]);
|
||||
}
|
||||
}
|
||||
|
||||
return new Response("Artist and albums synced successfully", {
|
||||
status: 200,
|
||||
});
|
||||
},
|
||||
};
|
15
workers/artist-import/wrangler.template.toml
Normal file
15
workers/artist-import/wrangler.template.toml
Normal file
|
@ -0,0 +1,15 @@
|
|||
name = "import-artist-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[observability]
|
||||
enabled = true
|
||||
|
||||
[env.production]
|
||||
name = "import-artist-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/api/import-artist*", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
101
workers/contact/index.js
Normal file
101
workers/contact/index.js
Normal file
|
@ -0,0 +1,101 @@
|
|||
import { createClient } from "@supabase/supabase-js";
|
||||
|
||||
const RATE_LIMIT = 5;
|
||||
const TIME_FRAME = 60 * 120 * 1000;
|
||||
const ipSubmissions = new Map();
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
if (request.method === "POST") {
|
||||
const ip =
|
||||
request.headers.get("CF-Connecting-IP") ||
|
||||
request.headers.get("X-Forwarded-For") ||
|
||||
request.headers.get("Remote-Addr");
|
||||
const currentTime = Date.now();
|
||||
|
||||
if (!ipSubmissions.has(ip)) ipSubmissions.set(ip, []);
|
||||
|
||||
const submissions = ipSubmissions
|
||||
.get(ip)
|
||||
.filter((time) => currentTime - time < TIME_FRAME);
|
||||
|
||||
if (submissions.length >= RATE_LIMIT)
|
||||
return Response.redirect("https://coryd.dev/rate-limit", 301);
|
||||
|
||||
submissions.push(currentTime);
|
||||
ipSubmissions.set(ip, submissions);
|
||||
|
||||
try {
|
||||
const formData = await request.formData();
|
||||
const name = formData.get("name");
|
||||
const email = formData.get("email");
|
||||
const message = formData.get("message");
|
||||
const hpName = formData.get("hp_name");
|
||||
if (hpName) return new Response("Spam detected", { status: 400 });
|
||||
if (!name || !email || !message)
|
||||
return new Response("Invalid input", { status: 400 });
|
||||
|
||||
const emailDomain = email.split("@")[1].toLowerCase();
|
||||
const supabaseUrl = env.SUPABASE_URL;
|
||||
const supabaseKey = env.SUPABASE_KEY;
|
||||
const supabase = createClient(supabaseUrl, supabaseKey);
|
||||
const { data: blockedDomains, error: domainError } = await supabase
|
||||
.from("blocked_domains")
|
||||
.select("domain_name");
|
||||
|
||||
if (domainError)
|
||||
throw new Error(
|
||||
`Failed to fetch blocked domains: ${domainError.message}`
|
||||
);
|
||||
|
||||
const domainList = blockedDomains.map((item) =>
|
||||
item["domain_name"].toLowerCase()
|
||||
);
|
||||
|
||||
if (domainList.includes(emailDomain))
|
||||
return new Response("Email domain is blocked.", { status: 400 });
|
||||
|
||||
const { error } = await supabase
|
||||
.from("contacts")
|
||||
.insert([{ name, email, message, replied: false }]);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const forwardEmailApiKey = env.FORWARDEMAIL_API_KEY;
|
||||
const authHeader = "Basic " + btoa(`${forwardEmailApiKey}:`);
|
||||
const emailData = new URLSearchParams({
|
||||
from: `${name} <hi@admin.coryd.dev>`,
|
||||
to: "hi@coryd.dev",
|
||||
subject: `${message}`,
|
||||
text: `Name: ${name}\nEmail: ${email}\nMessage: ${message}`,
|
||||
replyTo: email,
|
||||
}).toString();
|
||||
const response = await fetch("https://api.forwardemail.net/v1/emails", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
Authorization: authHeader,
|
||||
},
|
||||
body: emailData,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.error(
|
||||
"Email API response error:",
|
||||
response.status,
|
||||
errorText
|
||||
);
|
||||
throw new Error(`Failed to send email: ${errorText}`);
|
||||
}
|
||||
|
||||
return Response.redirect("https://coryd.dev/contact-success", 301);
|
||||
} catch (error) {
|
||||
console.error("Error:", error.message);
|
||||
return Response.redirect("https://coryd.dev/broken", 301);
|
||||
}
|
||||
} else {
|
||||
return Response.redirect("https://coryd.dev/not-allowed", 301);
|
||||
}
|
||||
},
|
||||
};
|
12
workers/contact/wrangler.template.toml
Normal file
12
workers/contact/wrangler.template.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
name = "contact-form-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "contact-form-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/api/contact", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
149
workers/mastodon/index.js
Normal file
149
workers/mastodon/index.js
Normal file
|
@ -0,0 +1,149 @@
|
|||
import { XMLParser } from "fast-xml-parser";
|
||||
import { convert } from "html-to-text";
|
||||
import { createClient } from "@supabase/supabase-js";
|
||||
|
||||
const BASE_URL = "https://coryd.dev";
|
||||
|
||||
export default {
|
||||
async scheduled(event, env) {
|
||||
await handleMastodonPost(env);
|
||||
},
|
||||
};
|
||||
|
||||
async function handleMastodonPost(env) {
|
||||
const mastodonApiUrl = "https://follow.coryd.dev/api/v1/statuses";
|
||||
const accessToken = env.MASTODON_ACCESS_TOKEN;
|
||||
const rssFeedUrl = "https://coryd.dev/feeds/syndication.xml";
|
||||
const supabaseUrl = env.SUPABASE_URL;
|
||||
const supabaseKey = env.SUPABASE_KEY;
|
||||
const supabase = createClient(supabaseUrl, supabaseKey);
|
||||
|
||||
try {
|
||||
const latestItems = await fetchRSSFeed(rssFeedUrl);
|
||||
|
||||
for (let i = latestItems.length - 1; i >= 0; i--) {
|
||||
const item = latestItems[i];
|
||||
const existingPost = await env.RSS_TO_MASTODON_NAMESPACE.get(item.link);
|
||||
|
||||
if (existingPost) continue;
|
||||
|
||||
const title = item.title;
|
||||
const link = item.link;
|
||||
const maxLength = 500;
|
||||
const plainTextDescription = convert(item.description, {
|
||||
wordwrap: false,
|
||||
selectors: [
|
||||
{ selector: "a", options: { ignoreHref: true } },
|
||||
{ selector: "h1", options: { uppercase: false } },
|
||||
{ selector: "h2", options: { uppercase: false } },
|
||||
{ selector: "h3", options: { uppercase: false } },
|
||||
{ selector: "*", format: "block" },
|
||||
],
|
||||
});
|
||||
|
||||
const cleanedDescription = plainTextDescription
|
||||
.replace(/\s+/g, " ")
|
||||
.trim();
|
||||
const content = truncateContent(
|
||||
title,
|
||||
cleanedDescription,
|
||||
link,
|
||||
maxLength
|
||||
);
|
||||
|
||||
const mastodonPostUrl = await postToMastodon(
|
||||
mastodonApiUrl,
|
||||
accessToken,
|
||||
content
|
||||
);
|
||||
const timestamp = new Date().toISOString();
|
||||
|
||||
await env.RSS_TO_MASTODON_NAMESPACE.put(link, timestamp);
|
||||
|
||||
if (link.includes("coryd.dev/posts")) {
|
||||
const slug = link.replace(BASE_URL, "");
|
||||
await addMastodonUrlToPost(supabase, slug, mastodonPostUrl);
|
||||
}
|
||||
|
||||
console.log(`Posted stored URL: ${link}`);
|
||||
}
|
||||
|
||||
console.log("RSS processed successfully");
|
||||
} catch (error) {
|
||||
console.error("Error in scheduled event:", error);
|
||||
}
|
||||
}
|
||||
|
||||
async function addMastodonUrlToPost(supabase, slug, mastodonPostUrl) {
|
||||
const { data, error } = await supabase
|
||||
.from("posts")
|
||||
.update({ mastodon_url: mastodonPostUrl })
|
||||
.eq("slug", slug);
|
||||
|
||||
if (error) {
|
||||
console.error("Error updating post:", error);
|
||||
} else {
|
||||
console.log(`Updated post with Mastodon URL: ${mastodonPostUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
function truncateContent(title, description, link, maxLength) {
|
||||
const baseLength = `${title}\n\n${link}`.length;
|
||||
const availableSpace = maxLength - baseLength - 4;
|
||||
let truncatedDescription = description;
|
||||
|
||||
if (description.length > availableSpace)
|
||||
truncatedDescription =
|
||||
description
|
||||
.substring(0, availableSpace)
|
||||
.split(" ")
|
||||
.slice(0, -1)
|
||||
.join(" ") + "...";
|
||||
|
||||
truncatedDescription = truncatedDescription.replace(/\s+([.,!?;:])/g, "$1");
|
||||
|
||||
return `${title}\n\n${truncatedDescription}\n\n${link}`;
|
||||
}
|
||||
|
||||
async function fetchRSSFeed(rssFeedUrl) {
|
||||
const response = await fetch(rssFeedUrl);
|
||||
const rssText = await response.text();
|
||||
const parser = new XMLParser();
|
||||
const rssData = parser.parse(rssText);
|
||||
const items = rssData.rss.channel.item;
|
||||
|
||||
let latestItems = [];
|
||||
|
||||
items.forEach((item) => {
|
||||
const title = item.title;
|
||||
const link = item.link;
|
||||
const description = item.description;
|
||||
latestItems.push({ title, link, description });
|
||||
});
|
||||
|
||||
return latestItems;
|
||||
}
|
||||
|
||||
async function postToMastodon(apiUrl, accessToken, content) {
|
||||
const response = await fetch(apiUrl, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ status: content }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(
|
||||
`Error posting to Mastodon: ${response.statusText} - ${errorText}`
|
||||
);
|
||||
}
|
||||
|
||||
const responseData = await response.json();
|
||||
|
||||
console.log("Posted to Mastodon successfully.");
|
||||
|
||||
return responseData.url;
|
||||
}
|
21
workers/mastodon/wrangler.template.toml
Normal file
21
workers/mastodon/wrangler.template.toml
Normal file
|
@ -0,0 +1,21 @@
|
|||
name = "rss-to-mastodon-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[observability]
|
||||
enabled = true
|
||||
|
||||
[[kv_namespaces]]
|
||||
binding = "RSS_TO_MASTODON_NAMESPACE"
|
||||
id = "${RSS_TO_MASTODON_KV_NAMESPACE_ID}"
|
||||
|
||||
[env.production]
|
||||
name = "rss-to-mastodon-worker-production"
|
||||
triggers = { crons = ["*/15 * * * *"] }
|
||||
|
||||
[[env.production.kv_namespaces]]
|
||||
binding = "RSS_TO_MASTODON_NAMESPACE"
|
||||
id = "${RSS_TO_MASTODON_KV_NAMESPACE_ID}"
|
20
workers/rebuild/index.js
Normal file
20
workers/rebuild/index.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
export default {
|
||||
async scheduled(event, env, ctx) {
|
||||
const deployHookUrl = env.DEPLOY_HOOK_URL;
|
||||
|
||||
const response = await fetch(deployHookUrl, {
|
||||
method: "POST",
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.error(
|
||||
`Error triggering deploy: ${response.statusText}`,
|
||||
errorText
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Deploy triggered successfully");
|
||||
},
|
||||
};
|
10
workers/rebuild/wrangler.template.toml
Normal file
10
workers/rebuild/wrangler.template.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
name = "scheduled-rebuild-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "scheduled-rebuild-worker-production"
|
||||
triggers = {crons = ["0 * * * *"]}
|
249
workers/scrobble/index.js
Normal file
249
workers/scrobble/index.js
Normal file
|
@ -0,0 +1,249 @@
|
|||
import { createClient } from "@supabase/supabase-js";
|
||||
import slugify from "slugify";
|
||||
|
||||
const sanitizeMediaString = (str) => {
|
||||
const sanitizedString = str
|
||||
.normalize("NFD")
|
||||
.replace(/[\u0300-\u036f\u2010\-\.\?\(\)\[\]\{\}]/g, "")
|
||||
.replace(/\.{3}/g, "");
|
||||
return slugify(sanitizedString, {
|
||||
replacement: "-",
|
||||
remove: /[#,&,+()$~%.'":*?<>{}]/g,
|
||||
lower: true,
|
||||
});
|
||||
};
|
||||
|
||||
const sendEmail = async (subject, text, authHeader, maxRetries = 3) => {
|
||||
const emailData = new URLSearchParams({
|
||||
from: "coryd.dev <hi@admin.coryd.dev>",
|
||||
to: "hi@coryd.dev",
|
||||
subject: subject,
|
||||
text: text,
|
||||
}).toString();
|
||||
|
||||
let attempt = 0;
|
||||
let success = false;
|
||||
|
||||
while (attempt < maxRetries && !success) {
|
||||
attempt++;
|
||||
try {
|
||||
const response = await fetch("https://api.forwardemail.net/v1/emails", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
Authorization: authHeader,
|
||||
},
|
||||
body: emailData,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const responseText = await response.text();
|
||||
console.error(
|
||||
`Attempt ${attempt}: Email API response error:`,
|
||||
response.status,
|
||||
responseText
|
||||
);
|
||||
throw new Error(`Failed to send email: ${responseText}`);
|
||||
}
|
||||
|
||||
console.log("Email sent successfully on attempt", attempt);
|
||||
success = true;
|
||||
} catch (error) {
|
||||
console.error(`Attempt ${attempt}: Error sending email:`, error.message);
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
console.log(
|
||||
`Retrying email send (attempt ${attempt + 1}/${maxRetries})...`
|
||||
);
|
||||
} else {
|
||||
console.error("All attempts to send email failed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
};
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
const supabaseUrl = env.SUPABASE_URL;
|
||||
const supabaseKey = env.SUPABASE_KEY;
|
||||
const FORWARDEMAIL_API_KEY = env.FORWARDEMAIL_API_KEY;
|
||||
const ACCOUNT_ID_PLEX = env.ACCOUNT_ID_PLEX;
|
||||
const supabase = createClient(supabaseUrl, supabaseKey);
|
||||
const authHeader = "Basic " + btoa(`${FORWARDEMAIL_API_KEY}:`);
|
||||
const url = new URL(request.url);
|
||||
const params = url.searchParams;
|
||||
const id = params.get("id");
|
||||
|
||||
if (!id)
|
||||
return new Response(JSON.stringify({ status: "Bad request" }), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
if (id !== ACCOUNT_ID_PLEX)
|
||||
return new Response(JSON.stringify({ status: "Forbidden" }), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
const contentType = request.headers.get("Content-Type") || "";
|
||||
if (!contentType.includes("multipart/form-data"))
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
status: "Bad request",
|
||||
message: "Invalid Content-Type. Expected multipart/form-data.",
|
||||
}),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
|
||||
try {
|
||||
const data = await request.formData();
|
||||
const payload = JSON.parse(data.get("payload"));
|
||||
|
||||
if (payload?.event === "media.scrobble") {
|
||||
const artistName = payload["Metadata"]["grandparentTitle"];
|
||||
const albumName = payload["Metadata"]["parentTitle"];
|
||||
const trackName = payload["Metadata"]["title"];
|
||||
const listenedAt = Math.floor(Date.now() / 1000);
|
||||
const artistKey = sanitizeMediaString(artistName);
|
||||
const albumKey = `${artistKey}-${sanitizeMediaString(albumName)}`;
|
||||
|
||||
let { data: artistData, error: artistError } = await supabase
|
||||
.from("artists")
|
||||
.select("*")
|
||||
.ilike("name_string", artistName)
|
||||
.single();
|
||||
|
||||
if (artistError && artistError.code === "PGRST116") {
|
||||
const { error: insertArtistError } = await supabase
|
||||
.from("artists")
|
||||
.insert([
|
||||
{
|
||||
mbid: null,
|
||||
art: "4cef75db-831f-4f5d-9333-79eaa5bb55ee",
|
||||
name: artistName,
|
||||
slug: "/music",
|
||||
tentative: true,
|
||||
total_plays: 0,
|
||||
},
|
||||
]);
|
||||
|
||||
if (insertArtistError) {
|
||||
console.error(
|
||||
"Error inserting artist: ",
|
||||
insertArtistError.message
|
||||
);
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
status: "error",
|
||||
message: insertArtistError.message,
|
||||
}),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
await sendEmail(
|
||||
"New tentative artist record",
|
||||
`A new tentative artist record was inserted:\n\nArtist: ${artistName}\nKey: ${artistKey}`,
|
||||
authHeader
|
||||
);
|
||||
({ data: artistData, error: artistError } = await supabase
|
||||
.from("artists")
|
||||
.select("*")
|
||||
.ilike("name_string", artistName)
|
||||
.single());
|
||||
}
|
||||
|
||||
if (artistError) {
|
||||
console.error("Error fetching artist:", artistError.message);
|
||||
return new Response(
|
||||
JSON.stringify({ status: "error", message: artistError.message }),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
let { data: albumData, error: albumError } = await supabase
|
||||
.from("albums")
|
||||
.select("*")
|
||||
.ilike("key", albumKey)
|
||||
.single();
|
||||
|
||||
if (albumError && albumError.code === "PGRST116") {
|
||||
const { error: insertAlbumError } = await supabase
|
||||
.from("albums")
|
||||
.insert([
|
||||
{
|
||||
mbid: null,
|
||||
art: "4cef75db-831f-4f5d-9333-79eaa5bb55ee",
|
||||
key: albumKey,
|
||||
name: albumName,
|
||||
tentative: true,
|
||||
total_plays: 0,
|
||||
artist: artistData.id,
|
||||
},
|
||||
]);
|
||||
|
||||
if (insertAlbumError) {
|
||||
console.error("Error inserting album:", insertAlbumError.message);
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
status: "error",
|
||||
message: insertAlbumError.message,
|
||||
}),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
await sendEmail(
|
||||
"New tentative album record",
|
||||
`A new tentative album record was inserted:\n\nAlbum: ${albumName}\nKey: ${albumKey}\nArtist: ${artistName}`,
|
||||
authHeader
|
||||
);
|
||||
({ data: albumData, error: albumError } = await supabase
|
||||
.from("albums")
|
||||
.select("*")
|
||||
.ilike("key", albumKey)
|
||||
.single());
|
||||
}
|
||||
|
||||
if (albumError) {
|
||||
console.error("Error fetching album:", albumError.message);
|
||||
return new Response(
|
||||
JSON.stringify({ status: "error", message: albumError.message }),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
const { error: listenError } = await supabase.from("listens").insert([
|
||||
{
|
||||
artist_name: artistData["name_string"] || artistName,
|
||||
album_name: albumData["name"] || albumName,
|
||||
track_name: trackName,
|
||||
listened_at: listenedAt,
|
||||
album_key: albumKey,
|
||||
},
|
||||
]);
|
||||
|
||||
if (listenError) {
|
||||
console.error("Error inserting listen:", listenError.message);
|
||||
return new Response(
|
||||
JSON.stringify({ status: "error", message: listenError.message }),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
console.log("Listen record inserted successfully");
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify({ status: "success" }), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
} catch (e) {
|
||||
console.error("Error processing request:", e.message);
|
||||
return new Response(
|
||||
JSON.stringify({ status: "error", message: e.message }),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
},
|
||||
};
|
15
workers/scrobble/wrangler.template.toml
Normal file
15
workers/scrobble/wrangler.template.toml
Normal file
|
@ -0,0 +1,15 @@
|
|||
name = "scrobble-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[observability]
|
||||
enabled = true
|
||||
|
||||
[env.production]
|
||||
name = "scrobble-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/api/scrobble*", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
44
workers/search/index.js
Normal file
44
workers/search/index.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
import { createClient } from "@supabase/supabase-js";
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
const supabaseUrl = env.SUPABASE_URL;
|
||||
const supabaseKey = env.SUPABASE_KEY;
|
||||
const supabase = createClient(supabaseUrl, supabaseKey);
|
||||
|
||||
const { searchParams } = new URL(request.url);
|
||||
const query = searchParams.get("q") || "";
|
||||
const rawTypes = searchParams.getAll("type") || [];
|
||||
const types = rawTypes.length > 0 ? rawTypes[0].split(",") : null;
|
||||
|
||||
const page = parseInt(searchParams.get("page") || "1", 10);
|
||||
const pageSize = parseInt(searchParams.get("pageSize") || "10", 10);
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
try {
|
||||
const { data, error } = await supabase.rpc("search_optimized_index", {
|
||||
search_query: query,
|
||||
page_size: pageSize,
|
||||
page_offset: offset,
|
||||
types: types.length ? types : null,
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error fetching search data:", error);
|
||||
return new Response(JSON.stringify({ results: [], total: 0 }), {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
|
||||
const total = data.length > 0 ? data[0].total_count : 0;
|
||||
const results = data.map(({ total_count, ...item }) => item);
|
||||
|
||||
return new Response(JSON.stringify({ results, total, page, pageSize }), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Unexpected error:", error);
|
||||
return new Response("Internal Server Error", { status: 500 });
|
||||
}
|
||||
},
|
||||
};
|
12
workers/search/wrangler.template.toml
Normal file
12
workers/search/wrangler.template.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
name = "search-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "search-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/api/search*", zone_id = "${CF_ZONE_ID}" },
|
||||
]
|
Reference in a new issue