chore: pull workers in
This commit is contained in:
parent
0bf47f97ad
commit
a0f654a1a2
15 changed files with 539 additions and 9 deletions
4
.env
4
.env
|
@ -1,4 +1,6 @@
|
|||
API_KEY_PLAUSIBLE=
|
||||
ACCOUNT_ID_PLEX=
|
||||
SUPABASE_URL=
|
||||
SUPABASE_KEY=
|
||||
SUPABASE_KEY=
|
||||
CF_ACCOUNT_ID=
|
||||
CF_ZONE_ID=
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -7,4 +7,8 @@ node_modules
|
|||
.env.local
|
||||
|
||||
# system files
|
||||
.DS_Store
|
||||
.DS_Store
|
||||
|
||||
# workers
|
||||
wrangler.toml
|
||||
.wrangler
|
||||
|
|
10
package-lock.json
generated
10
package-lock.json
generated
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "coryd.dev",
|
||||
"version": "20.12.15",
|
||||
"version": "20.13.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "coryd.dev",
|
||||
"version": "20.12.15",
|
||||
"version": "20.13.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@cdransf/api-text": "^1.4.0",
|
||||
|
@ -1254,9 +1254,9 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/electron-to-chromium": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.1.tgz",
|
||||
"integrity": "sha512-FKbOCOQ5QRB3VlIbl1LZQefWIYwszlBloaXcY2rbfpu9ioJnNh3TK03YtIDKDo3WKBi8u+YV4+Fn2CkEozgf4w==",
|
||||
"version": "1.5.2",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.2.tgz",
|
||||
"integrity": "sha512-kc4r3U3V3WLaaZqThjYz/Y6z8tJe+7K0bbjUVo3i+LWIypVdMx5nXCkwRe6SWbY6ILqLdc1rKcKmr3HoH7wjSQ==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "coryd.dev",
|
||||
"version": "20.12.15",
|
||||
"version": "20.13.0",
|
||||
"description": "The source for my personal site. Built using 11ty (and other tools).",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
@ -8,7 +8,12 @@
|
|||
"start:quick": "eleventy --serve --incremental --ignore-initial",
|
||||
"build": "ELEVENTY_PRODUCTION=true eleventy",
|
||||
"update:deps": "npm upgrade && ncu",
|
||||
"debug": "DEBUG=Eleventy* npx @11ty/eleventy --serve"
|
||||
"debug": "DEBUG=Eleventy* npx @11ty/eleventy --serve",
|
||||
"publish:analytics": "node scripts/worker-build.mjs analytics && wrangler deploy --env production --config workers/analytics/wrangler.toml",
|
||||
"publish:contact": "node scripts/worker-build.mjs contact && wrangler deploy --env production --config workers/contact/wrangler.toml",
|
||||
"publish:playing": "node scripts/worker-build.mjs playing && wrangler deploy --env production --config workers/playing/wrangler.toml",
|
||||
"publish:rebuild": "node scripts/worker-build.mjs rebuild && wrangler deploy --env production --config workers/rebuild/wrangler.toml",
|
||||
"publish:scrobble": "node scripts/worker-build.mjs scrobble && wrangler deploy --env production --config workers/scrobble/wrangler.toml"
|
||||
},
|
||||
"keywords": [
|
||||
"11ty",
|
||||
|
|
22
scripts/worker-build.mjs
Normal file
22
scripts/worker-build.mjs
Normal file
|
@ -0,0 +1,22 @@
|
|||
import fs from 'fs/promises';
|
||||
import dotenv from 'dotenv-flow';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const workerName = process.argv[2];
|
||||
|
||||
if (!workerName) {
|
||||
console.error('Please specify a worker name.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const templatePath = `workers/${workerName}/wrangler.template.toml`;
|
||||
const outputPath = `workers/${workerName}/wrangler.toml`;
|
||||
const template = await fs.readFile(templatePath, 'utf8');
|
||||
const output = template
|
||||
.replace(/\${CF_ACCOUNT_ID}/g, process.env.CF_ACCOUNT_ID)
|
||||
.replace(/\${CF_ZONE_ID}/g, process.env.CF_ZONE_ID);
|
||||
|
||||
await fs.writeFile(outputPath, output);
|
||||
|
||||
console.log(`Generated wrangler.toml for ${workerName}`);
|
39
workers/analytics/index.js
Normal file
39
workers/analytics/index.js
Normal file
|
@ -0,0 +1,39 @@
|
|||
const ScriptName = '/js/script.js';
|
||||
const Endpoint = '/api/event';
|
||||
|
||||
addEventListener('fetch', event => {
|
||||
event.passThroughOnException();
|
||||
event.respondWith(handleRequest(event));
|
||||
});
|
||||
|
||||
async function handleRequest(event) {
|
||||
const url = new URL(event.request.url);
|
||||
const pathname = url.pathname;
|
||||
|
||||
if (pathname === ScriptName) {
|
||||
return getScript(event);
|
||||
} else if (pathname === Endpoint) {
|
||||
return postData(event);
|
||||
}
|
||||
return new Response(null, { status: 404 });
|
||||
}
|
||||
|
||||
async function getScript(event) {
|
||||
const cache = caches.default;
|
||||
let response = await cache.match(event.request);
|
||||
|
||||
if (!response) {
|
||||
const scriptUrl = "https://plausible.io/js/plausible.outbound-links.tagged-events.js";
|
||||
response = await fetch(scriptUrl);
|
||||
|
||||
if (response.ok) event.waitUntil(cache.put(event.request, response.clone()));
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async function postData(event) {
|
||||
const request = new Request(event.request);
|
||||
request.headers.delete('cookie');
|
||||
return await fetch("https://plausible.io/api/event", request);
|
||||
}
|
13
workers/analytics/wrangler.template.toml
Normal file
13
workers/analytics/wrangler.template.toml
Normal file
|
@ -0,0 +1,13 @@
|
|||
name = "analytics-worker"
|
||||
main = "./index.js" # Add this line to specify the entry point
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "analytics-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/js/*", zone_id = "${CF_ZONE_ID}" },
|
||||
{ pattern = "coryd.dev/api/event", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
76
workers/contact/index.js
Normal file
76
workers/contact/index.js
Normal file
|
@ -0,0 +1,76 @@
|
|||
import { createClient } from '@supabase/supabase-js'
|
||||
|
||||
const RATE_LIMIT = 5
|
||||
const TIME_FRAME = 60 * 60 * 1000
|
||||
const ipSubmissions = new Map()
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
if (request.method === 'POST') {
|
||||
const ip = request.headers.get('CF-Connecting-IP') || request.headers.get('X-Forwarded-For') || request.headers.get('Remote-Addr')
|
||||
const currentTime = Date.now()
|
||||
|
||||
if (!ipSubmissions.has(ip)) ipSubmissions.set(ip, [])
|
||||
|
||||
const submissions = ipSubmissions.get(ip).filter(time => currentTime - time < TIME_FRAME)
|
||||
|
||||
if (submissions.length >= RATE_LIMIT) return Response.redirect('https://coryd.dev/rate-limit', 429)
|
||||
|
||||
submissions.push(currentTime)
|
||||
ipSubmissions.set(ip, submissions)
|
||||
|
||||
try {
|
||||
const formData = await request.formData()
|
||||
const name = formData.get('name')
|
||||
const email = formData.get('email')
|
||||
const message = formData.get('message')
|
||||
const hpName = formData.get('hp_name')
|
||||
|
||||
if (hpName) return new Response('Spam detected', { status: 400 })
|
||||
if (!name || !email || !message) return new Response('Invalid input', { status: 400 })
|
||||
|
||||
const supabaseUrl = env.SUPABASE_URL
|
||||
const supabaseKey = env.SUPABASE_KEY
|
||||
const supabase = createClient(supabaseUrl, supabaseKey)
|
||||
const { error } = await supabase.from('contacts').insert([
|
||||
{ name, email, message, replied: false }
|
||||
])
|
||||
|
||||
if (error) throw error
|
||||
|
||||
const forwardEmailApiKey = env.FORWARDEMAIL_API_KEY
|
||||
const authHeader = 'Basic ' + btoa(`${forwardEmailApiKey}:`)
|
||||
|
||||
const emailData = new URLSearchParams({
|
||||
from: 'hi@admin.coryd.dev',
|
||||
to: 'hi@coryd.dev',
|
||||
subject: 'New contact form submission',
|
||||
text: `Name: ${name}\nEmail: ${email}\nMessage: ${message}`,
|
||||
replyTo: email
|
||||
}).toString()
|
||||
|
||||
const response = await fetch('https://api.forwardemail.net/v1/emails', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Authorization': authHeader
|
||||
},
|
||||
body: emailData
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
console.error('Email API response error:', response.status, errorText)
|
||||
throw new Error(`Failed to send email: ${errorText}`)
|
||||
}
|
||||
|
||||
return Response.redirect('https://coryd.dev/contact/success', 301)
|
||||
} catch (error) {
|
||||
console.error('Error:', error.message)
|
||||
return Response.redirect('https://coryd.dev/broken', 301)
|
||||
}
|
||||
} else {
|
||||
return Response.redirect('https://coryd.dev/not-allowed', 301)
|
||||
}
|
||||
}
|
||||
}
|
12
workers/contact/wrangler.template.toml
Normal file
12
workers/contact/wrangler.template.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
name = "contact-form-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "contact-form-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/api/contact", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
85
workers/playing/index.js
Normal file
85
workers/playing/index.js
Normal file
|
@ -0,0 +1,85 @@
|
|||
import { createClient } from '@supabase/supabase-js'
|
||||
import slugify from 'slugify'
|
||||
|
||||
const sanitizeMediaString = (str) => {
|
||||
const sanitizedString = str.normalize('NFD').replace(/[\u0300-\u036f\u2010—\.\?\(\)\[\]\{\}]/g, '').replace(/\.{3}/g, '')
|
||||
|
||||
return slugify(sanitizedString, {
|
||||
replacement: '-',
|
||||
remove: /[#,&,+()$~%.'":*?<>{}]/g,
|
||||
lower: true,
|
||||
})
|
||||
}
|
||||
|
||||
const regionNames = new Intl.DisplayNames(['en'], { type: 'region' })
|
||||
const getCountryName = (countryCode) => regionNames.of(countryCode.trim()) || countryCode.trim()
|
||||
const parseCountryField = (countryField) => {
|
||||
if (!countryField) return null
|
||||
|
||||
const delimiters = [',', '/', '&', 'and']
|
||||
let countries = [countryField]
|
||||
|
||||
delimiters.forEach(delimiter => {
|
||||
countries = countries.flatMap(country => country.split(delimiter))
|
||||
})
|
||||
|
||||
return countries.map(getCountryName).join(', ')
|
||||
}
|
||||
|
||||
const fetchGenreById = async (supabase, genreId) => {
|
||||
const { data, error } = await supabase
|
||||
.from('genres')
|
||||
.select('emoji')
|
||||
.eq('id', genreId)
|
||||
.single()
|
||||
|
||||
if (error) {
|
||||
console.error('Error fetching genre:', error)
|
||||
return null
|
||||
}
|
||||
|
||||
return data.emoji
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
const SUPABASE_URL = env.SUPABASE_URL
|
||||
const SUPABASE_KEY = env.SUPABASE_KEY
|
||||
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('listens')
|
||||
.select(`
|
||||
track_name,
|
||||
artist_name,
|
||||
listened_at,
|
||||
artists (mbid, genres, country, emoji)
|
||||
`)
|
||||
.order('listened_at', { ascending: false })
|
||||
.range(0, 1)
|
||||
|
||||
const headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Cache-Control": "public, s-maxage=360, stale-while-revalidate=1080",
|
||||
}
|
||||
|
||||
if (error) {
|
||||
console.error('Error fetching data:', error)
|
||||
return new Response(JSON.stringify({ error: "Failed to fetch the latest track" }), { headers })
|
||||
}
|
||||
|
||||
if (data.length === 0) {
|
||||
return new Response(JSON.stringify({ message: "No recent tracks found" }), { headers })
|
||||
}
|
||||
|
||||
const scrobbleData = data[0]
|
||||
const genreEmoji = await fetchGenreById(supabase, scrobbleData.artists.genres)
|
||||
const emoji = scrobbleData.artists.emoji || genreEmoji
|
||||
|
||||
return new Response(JSON.stringify({
|
||||
content: `${emoji || '🎧'} ${scrobbleData.track_name} by <a href="https://coryd.dev/music/artists/${sanitizeMediaString(scrobbleData.artist_name)}-${sanitizeMediaString(parseCountryField(scrobbleData.artists.country))}">${
|
||||
scrobbleData.artist_name
|
||||
}</a>`,
|
||||
}), { headers })
|
||||
}
|
||||
}
|
12
workers/playing/wrangler.template.toml
Normal file
12
workers/playing/wrangler.template.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
name = "now-playing-worker"
|
||||
main = "./index.js" # Specify the entry point
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "now-playing-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/api/now-playing", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
17
workers/rebuild/index.js
Normal file
17
workers/rebuild/index.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
export default {
|
||||
async scheduled(event, env, ctx) {
|
||||
const deployHookUrl = env.DEPLOY_HOOK_URL
|
||||
|
||||
const response = await fetch(deployHookUrl, {
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
console.error(`Error triggering deploy: ${response.statusText}`, errorText)
|
||||
return
|
||||
}
|
||||
|
||||
console.log('Deploy triggered successfully')
|
||||
}
|
||||
}
|
10
workers/rebuild/wrangler.template.toml
Normal file
10
workers/rebuild/wrangler.template.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
name = "scheduled-rebuild-worker"
|
||||
main = "./index.js"
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "scheduled-rebuild-worker-production"
|
||||
triggers = {crons = ["0 * * * *"]}
|
221
workers/scrobble/index.js
Normal file
221
workers/scrobble/index.js
Normal file
|
@ -0,0 +1,221 @@
|
|||
import { createClient } from '@supabase/supabase-js'
|
||||
import { DateTime } from 'luxon'
|
||||
import slugify from 'slugify'
|
||||
|
||||
const sanitizeMediaString = (str) => {
|
||||
const sanitizedString = str
|
||||
.normalize('NFD')
|
||||
.replace(/[\u0300-\u036f\u2010\-\.\?\(\)\[\]\{\}]/g, '')
|
||||
.replace(/\.{3}/g, '')
|
||||
return slugify(sanitizedString, {
|
||||
replacement: '-',
|
||||
remove: /[#,&,+()$~%.'":*?<>{}]/g,
|
||||
lower: true,
|
||||
})
|
||||
}
|
||||
|
||||
const sendEmail = async (subject, text, authHeader) => {
|
||||
const emailData = new URLSearchParams({
|
||||
from: 'hi@admin.coryd.dev',
|
||||
to: 'hi@coryd.dev',
|
||||
subject: subject,
|
||||
text: text,
|
||||
}).toString()
|
||||
|
||||
try {
|
||||
const response = await fetch('https://api.forwardemail.net/v1/emails', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Authorization': authHeader,
|
||||
},
|
||||
body: emailData,
|
||||
})
|
||||
|
||||
const responseText = await response.text()
|
||||
|
||||
if (!response.ok) {
|
||||
console.error('Email API response error:', response.status, responseText)
|
||||
throw new Error(`Failed to send email: ${responseText}`)
|
||||
} else {
|
||||
console.log('Email sent successfully')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error sending email:', error.message)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
const SUPABASE_URL = env.SUPABASE_URL
|
||||
const SUPABASE_KEY = env.SUPABASE_KEY
|
||||
const FORWARDEMAIL_API_KEY = env.FORWARDEMAIL_API_KEY
|
||||
const ACCOUNT_ID_PLEX = env.ACCOUNT_ID_PLEX
|
||||
|
||||
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
|
||||
const authHeader = 'Basic ' + btoa(`${FORWARDEMAIL_API_KEY}:`)
|
||||
const url = new URL(request.url)
|
||||
const params = url.searchParams
|
||||
const id = params.get('id')
|
||||
|
||||
if (!id) return new Response(JSON.stringify({ status: 'Bad request' }), {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
})
|
||||
|
||||
if (id !== ACCOUNT_ID_PLEX) return new Response(JSON.stringify({ status: 'Forbidden' }), {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
})
|
||||
|
||||
const contentType = request.headers.get('Content-Type') || ''
|
||||
if (!contentType.includes('multipart/form-data')) return new Response(
|
||||
JSON.stringify({
|
||||
status: 'Bad request',
|
||||
message: 'Invalid Content-Type. Expected multipart/form-data.',
|
||||
}),
|
||||
{ headers: { 'Content-Type': 'application/json' } }
|
||||
)
|
||||
|
||||
try {
|
||||
const data = await request.formData()
|
||||
const payload = JSON.parse(data.get('payload'))
|
||||
|
||||
if (payload?.event === 'media.scrobble') {
|
||||
const artist = payload['Metadata']['grandparentTitle']
|
||||
const album = payload['Metadata']['parentTitle']
|
||||
const track = payload['Metadata']['title']
|
||||
const listenedAt = Math.floor(DateTime.now().toSeconds())
|
||||
const artistKey = sanitizeMediaString(artist)
|
||||
const albumKey = `${artistKey}-${sanitizeMediaString(album)}`
|
||||
|
||||
let { data: artistData, error: artistError } = await supabase
|
||||
.from('artists')
|
||||
.select('*')
|
||||
.ilike('name_string', artist)
|
||||
.single()
|
||||
|
||||
if (artistError && artistError.code === 'PGRST116') {
|
||||
const { error: insertArtistError } = await supabase
|
||||
.from('artists')
|
||||
.insert([
|
||||
{
|
||||
mbid: null,
|
||||
art: '4cef75db-831f-4f5d-9333-79eaa5bb55ee',
|
||||
name: artist,
|
||||
tentative: true,
|
||||
total_plays: 0,
|
||||
},
|
||||
])
|
||||
|
||||
if (insertArtistError) {
|
||||
console.error('Error inserting artist:', insertArtistError.message)
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
status: 'error',
|
||||
message: insertArtistError.message,
|
||||
}),
|
||||
{ headers: { 'Content-Type': 'application/json' } }
|
||||
)
|
||||
}
|
||||
|
||||
await sendEmail(
|
||||
'New tentative artist record',
|
||||
`A new tentative artist record was inserted:\n\nArtist: ${artist}\nKey: ${artistKey}`,
|
||||
authHeader
|
||||
)
|
||||
|
||||
({ data: artistData, error: artistError } = await supabase
|
||||
.from('artists')
|
||||
.select('*')
|
||||
.ilike('name_string', artist)
|
||||
.single())
|
||||
} else if (artistError) {
|
||||
console.error('Error fetching artist:', artistError.message)
|
||||
return new Response(
|
||||
JSON.stringify({ status: 'error', message: artistError.message }),
|
||||
{ headers: { 'Content-Type': 'application/json' } }
|
||||
)
|
||||
}
|
||||
|
||||
let { data: albumData, error: albumError } = await supabase
|
||||
.from('albums')
|
||||
.select('*')
|
||||
.ilike('key', albumKey)
|
||||
.single()
|
||||
|
||||
if (albumError && albumError.code === 'PGRST116') {
|
||||
const { error: insertAlbumError } = await supabase
|
||||
.from('albums')
|
||||
.insert([
|
||||
{
|
||||
mbid: null,
|
||||
art: '4cef75db-831f-4f5d-9333-79eaa5bb55ee',
|
||||
key: albumKey,
|
||||
name: album,
|
||||
tentative: true,
|
||||
total_plays: 0,
|
||||
},
|
||||
])
|
||||
|
||||
if (insertAlbumError) {
|
||||
console.error('Error inserting album:', insertAlbumError.message)
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
status: 'error',
|
||||
message: insertAlbumError.message,
|
||||
}),
|
||||
{ headers: { 'Content-Type': 'application/json' } }
|
||||
)
|
||||
}
|
||||
|
||||
await sendEmail(
|
||||
'New tentative album record',
|
||||
`A new tentative album record was inserted:\n\nAlbum: ${album}\nKey: ${albumKey}`,
|
||||
authHeader
|
||||
)
|
||||
|
||||
({ data: albumData, error: albumError } = await supabase
|
||||
.from('albums')
|
||||
.select('*')
|
||||
.ilike('key', albumKey)
|
||||
.single())
|
||||
} else if (albumError) {
|
||||
console.error('Error fetching album:', albumError.message)
|
||||
return new Response(
|
||||
JSON.stringify({ status: 'error', message: albumError.message }),
|
||||
{ headers: { 'Content-Type': 'application/json' } }
|
||||
)
|
||||
}
|
||||
|
||||
const { error: listenError } = await supabase.from('listens').insert([
|
||||
{
|
||||
artist_name: artist,
|
||||
album_name: album,
|
||||
track_name: track,
|
||||
listened_at: listenedAt,
|
||||
album_key: albumKey,
|
||||
},
|
||||
])
|
||||
|
||||
if (listenError) {
|
||||
console.error('Error inserting listen:', listenError.message)
|
||||
return new Response(
|
||||
JSON.stringify({ status: 'error', message: listenError.message }),
|
||||
{ headers: { 'Content-Type': 'application/json' } }
|
||||
)
|
||||
}
|
||||
|
||||
console.log('Listen record inserted successfully')
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify({ status: 'success' }), {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
})
|
||||
} catch (e) {
|
||||
console.error('Error processing request:', e.message)
|
||||
return new Response(
|
||||
JSON.stringify({ status: 'error', message: e.message }),
|
||||
{ headers: { 'Content-Type': 'application/json' } }
|
||||
)
|
||||
}
|
||||
},
|
||||
}
|
12
workers/scrobble/wrangler.template.toml
Normal file
12
workers/scrobble/wrangler.template.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
name = "scrobble-worker"
|
||||
main = "./index.js" # Add this line to specify the entry point
|
||||
compatibility_date = "2023-01-01"
|
||||
|
||||
account_id = "${CF_ACCOUNT_ID}"
|
||||
workers_dev = true
|
||||
|
||||
[env.production]
|
||||
name = "scrobble-worker-production"
|
||||
routes = [
|
||||
{ pattern = "coryd.dev/api/scrobble*", zone_id = "${CF_ZONE_ID}" }
|
||||
]
|
Reference in a new issue