diff --git a/_redirects b/_redirects
index 190bdf8f..c0dc078a 100644
--- a/_redirects
+++ b/_redirects
@@ -50,19 +50,20 @@
/now.html /now 301
# feeds
- /rss https://feedpress.me/coryd 301
- /atom https://feedpress.me/coryd 301
- /rss.xml https://feedpress.me/coryd 301
- /atom.xml https://feedpress.me/coryd 301
- /feeds/all.xml https://feedpress.me/coryd 301
- /feed.xml https://feedpress.me/coryd
- /feed.json https://feedpress.me/coryd.json
- /books.xml https://feedpress.me/coryd-books
- /books.json https://feedpress.me/coryd-books.json
- /links.xml https://feedpress.me/coryd-links
- /links.json https://feedpress.me/coryd-links.json
- /follow.xml https://feedpress.me/coryd-all
- /follow.json https://feedpress.me/coryd-all.json
+/feed https://feedpress.me/coryd 301
+/rss https://feedpress.me/coryd 301
+/atom https://feedpress.me/coryd 301
+/rss.xml https://feedpress.me/coryd 301
+/atom.xml https://feedpress.me/coryd 301
+/feeds/all.xml https://feedpress.me/coryd 301
+/feed.xml https://feedpress.me/coryd
+/feed.json https://feedpress.me/coryd.json
+/books.xml https://feedpress.me/coryd-books
+/books.json https://feedpress.me/coryd-books.json
+/links.xml https://feedpress.me/coryd-links
+/links.json https://feedpress.me/coryd-links.json
+/follow.xml https://feedpress.me/coryd-all
+/follow.json https://feedpress.me/coryd-all.json
# general
/articles/ /posts/ 301
diff --git a/config/filters/index.js b/config/filters/index.js
index 82782df6..b7870efd 100644
--- a/config/filters/index.js
+++ b/config/filters/index.js
@@ -155,12 +155,12 @@ export default {
url: item['url'],
type: item.type
}
- if (item.type === 'artists') {
+ if (item.type === 'artist') {
normalized['title'] = item['title']
normalized['alt'] = `${item['plays']} plays of ${item['title']}`
normalized['subtext'] = `${item['plays']} plays`
}
- if (item.type === 'albums') {
+ if (item.type === 'album') {
normalized['title'] = item['title']
normalized['alt'] = `${item['title']} by ${item['artist']}`
normalized['subtext'] = `${item['artist']}`
diff --git a/package-lock.json b/package-lock.json
index a4fb390b..194dbac4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "coryd.dev",
- "version": "20.13.4",
+ "version": "21.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "coryd.dev",
- "version": "20.13.4",
+ "version": "21.0.0",
"license": "MIT",
"dependencies": {
"@cdransf/api-text": "^1.4.0",
@@ -16,7 +16,7 @@
"youtube-video-element": "^1.1.6"
},
"devDependencies": {
- "@11ty/eleventy": "3.0.0-alpha.18",
+ "@11ty/eleventy": "v3.0.0-beta.1",
"@11ty/eleventy-fetch": "^4.0.1",
"@11ty/eleventy-plugin-syntaxhighlight": "^5.0.0",
"@11tyrocks/eleventy-plugin-lightningcss": "^1.4.0",
@@ -68,9 +68,9 @@
}
},
"node_modules/@11ty/eleventy": {
- "version": "3.0.0-alpha.18",
- "resolved": "https://registry.npmjs.org/@11ty/eleventy/-/eleventy-3.0.0-alpha.18.tgz",
- "integrity": "sha512-CQu4HOtYJySEexVRT/tFtLtqtI4+winun0NFmFIUp0SvxRpP46+ZxBvBc9ezSFLo1nN0zJkwoG8GTkhMPOThtg==",
+ "version": "3.0.0-beta.1",
+ "resolved": "https://registry.npmjs.org/@11ty/eleventy/-/eleventy-3.0.0-beta.1.tgz",
+ "integrity": "sha512-iJT7vekH11l8PAUPBfUAcb5oWbYK0w4ijgwDTutUsk6tX9rp4ZRL1jdhVWvZq04/rkc55mczNFPPhHB/XO1/qw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -582,9 +582,9 @@
"peer": true
},
"node_modules/@types/node": {
- "version": "22.0.0",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-22.0.0.tgz",
- "integrity": "sha512-VT7KSYudcPOzP5Q0wfbowyNLaVR8QWUdw+088uFWwfvpY6uCWaXpqV6ieLAu9WBcnTa7H4Z5RLK8I5t2FuOcqw==",
+ "version": "22.0.2",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-22.0.2.tgz",
+ "integrity": "sha512-yPL6DyFwY5PiMVEwymNeqUTKsDczQBJ/5T7W/46RwLU/VH+AA8aT5TZkvBviLKLbbm0hlfftEkGrNzfRk/fofQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -896,9 +896,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001644",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001644.tgz",
- "integrity": "sha512-YGvlOZB4QhZuiis+ETS0VXR+MExbFf4fZYYeMTEE0aTQd/RdIjkTyZjLrbYVKnHzppDvnOhritRVv+i7Go6mHw==",
+ "version": "1.0.30001645",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001645.tgz",
+ "integrity": "sha512-GFtY2+qt91kzyMk6j48dJcwJVq5uTkk71XxE3RtScx7XWRLsO7bU44LOFkOZYR8w9YMS0UhPSYpN/6rAMImmLw==",
"dev": true,
"funding": [
{
@@ -1254,9 +1254,9 @@
"license": "MIT"
},
"node_modules/electron-to-chromium": {
- "version": "1.5.3",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.3.tgz",
- "integrity": "sha512-QNdYSS5i8D9axWp/6XIezRObRHqaav/ur9z1VzCDUCH1XIFOr9WQk5xmgunhsTpjjgDy3oLxO/WMOVZlpUQrlA==",
+ "version": "1.5.4",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.4.tgz",
+ "integrity": "sha512-orzA81VqLyIGUEA77YkVA1D+N+nNfl2isJVjjmOyrlxuooZ19ynb+dOlaDTqd/idKRS9lDCSBmtzM+kyCsMnkA==",
"dev": true,
"license": "ISC"
},
@@ -3111,9 +3111,9 @@
}
},
"node_modules/rimraf": {
- "version": "5.0.9",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.9.tgz",
- "integrity": "sha512-3i7b8OcswU6CpU8Ej89quJD4O98id7TtVM5U4Mybh84zQXdrFmDLouWBEEaD/QfO3gDDfH+AGFCGsR7kngzQnA==",
+ "version": "5.0.10",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
+ "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -3122,9 +3122,6 @@
"bin": {
"rimraf": "dist/esm/bin.mjs"
},
- "engines": {
- "node": "14 >=14.20 || 16 >=16.20 || >=18"
- },
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
diff --git a/package.json b/package.json
index f902c7e5..d6151dd0 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "coryd.dev",
- "version": "20.13.4",
+ "version": "21.0.0",
"description": "The source for my personal site. Built using 11ty (and other tools).",
"type": "module",
"scripts": {
@@ -31,7 +31,7 @@
"youtube-video-element": "^1.1.6"
},
"devDependencies": {
- "@11ty/eleventy": "3.0.0-alpha.18",
+ "@11ty/eleventy": "v3.0.0-beta.1",
"@11ty/eleventy-fetch": "^4.0.1",
"@11ty/eleventy-plugin-syntaxhighlight": "^5.0.0",
"@11tyrocks/eleventy-plugin-lightningcss": "^1.4.0",
diff --git a/src/data/albumReleases.js b/src/data/albumReleases.js
index 55ff8115..52563528 100644
--- a/src/data/albumReleases.js
+++ b/src/data/albumReleases.js
@@ -9,37 +9,46 @@ const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchAlbumReleases = async () => {
const today = DateTime.utc().toISO()
const { data, error } = await supabase
- .from('albums')
+ .from('optimized_album_releases')
.select(`
name,
key,
release_date,
release_link,
total_plays,
- art(filename_disk),
- artists(name_string, mbid, country)
+ art,
+ artist_name,
+ artist_mbid,
+ artist_country
`)
.gt('release_date', today)
if (error) {
console.error('Error fetching data:', error)
- return
+ return []
}
- return data.filter(album => !album['total_plays'] || !album['total_plays'] > 0).map(album => ({
- artist: album['artists']['name_string'],
+ return data
+ .filter(album => !album['total_plays'] || album['total_plays'] <= 0)
+ .map(album => ({
+ artist: album['artist_name'],
title: album['name'],
date: DateTime.fromISO(album['release_date']).toLocaleString(DateTime.DATE_FULL),
url: album['release_link'],
- image: `/${album?.['art']?.['filename_disk']}` || '',
- artist_url: `/music/artists/${sanitizeMediaString(album['artists']['name_string'])}-${sanitizeMediaString(parseCountryField(album['artists']['country']))}`,
- mbid: album['artists']['mbid'],
+ image: album['art'] ? `/${album['art']}` : '',
+ artist_url: `/music/artists/${sanitizeMediaString(album['artist_name'])}-${sanitizeMediaString(parseCountryField(album['artist_country']))}`,
+ mbid: album['artist_mbid'],
timestamp: DateTime.fromISO(album['release_date']).toSeconds(),
type: 'album-release'
- }
- )).sort((a, b) => a['timestamp'] - b['timestamp'])
+ }))
+ .sort((a, b) => a['timestamp'] - b['timestamp'])
}
export default async function () {
- return await fetchAlbumReleases()
-}
+ try {
+ return await fetchAlbumReleases()
+ } catch (error) {
+ console.error('Error fetching and processing album releases:', error)
+ return []
+ }
+}
\ No newline at end of file
diff --git a/src/data/artists.js b/src/data/artists.js
index 034d39a8..e4f557f8 100644
--- a/src/data/artists.js
+++ b/src/data/artists.js
@@ -1,85 +1,81 @@
import { createClient } from '@supabase/supabase-js'
import { sanitizeMediaString, parseCountryField } from '../../config/utilities/index.js'
-import { DateTime } from 'luxon'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
+const PAGE_SIZE = 500
-const PAGE_SIZE = 50
+const fetchAllArtists = async () => {
+ let artists = []
+ let rangeStart = 0
-const fetchPaginatedData = async (table, selectFields) => {
- let data = []
- let page = 0
- let hasMoreRecords = true
-
- while (hasMoreRecords) {
- const { data: pageData, error } = await supabase
- .from(table)
- .select(selectFields)
- .order('id', { ascending: true })
- .range(page * PAGE_SIZE, (page + 1) * PAGE_SIZE - 1)
+ while (true) {
+ const { data, error } = await supabase
+ .from('optimized_artists')
+ .select(`
+ id,
+ mbid,
+ name_string,
+ tentative,
+ total_plays,
+ country,
+ description,
+ favorite,
+ genre,
+ emoji,
+ tattoo,
+ art,
+ albums,
+ concerts
+ `)
+ .range(rangeStart, rangeStart + PAGE_SIZE - 1)
if (error) {
- console.error(`Error fetching ${table}:`, error)
+ console.error('Error fetching artists:', error)
break
}
- data = data.concat(pageData)
-
- if (pageData.length < PAGE_SIZE) {
- hasMoreRecords = false
- } else {
- page++
- }
- }
-
- return data
-}
-
-const fetchGenreMapping = async () => {
- const { data, error } = await supabase
- .from('genres')
- .select('id, name')
-
- if (error) {
- console.error('Error fetching genres:', error)
- return {}
- }
-
- return data.reduce((acc, genre) => {
- acc[genre['id']] = genre['name']
- return acc
- }, {})
-}
-
-export default async function () {
- const genreMapping = await fetchGenreMapping()
- const artists = await fetchPaginatedData('artists', 'id, mbid, name_string, art(filename_disk), total_plays, country, description, favorite, tattoo, genres')
- const allAlbums = await fetchPaginatedData('albums', 'id, mbid, name, release_year, total_plays, artist, release_date')
- const albums = allAlbums.filter(album =>
- !album['release_date'] ||
- DateTime.fromISO(album['release_date']) <= DateTime.now() ||
- (DateTime.fromISO(album['release_date']) > DateTime.now() && album['total_plays'] > 0)
- )
- const albumsByArtist = albums.reduce((acc, album) => {
- if (!acc[album['artist']]) acc[album['artist']] = []
- acc[album['artist']].push({
- id: album['id'],
- name: album['name'],
- release_year: album['release_year'],
- total_plays: album['total_plays'] > 0 ? album['total_plays'] : '-'
- })
- return acc
- }, {})
-
- for (const artist of artists) {
- artist['albums'] = albumsByArtist[artist['id']]?.sort((a, b) => a['release_year'] - b['release_year']) || []
- artist['image'] = `/${artist['art']['filename_disk']}`
- artist['country'] = parseCountryField(artist['country'])
- artist['genres'] = genreMapping[artist['genres']] || ''
- artist['url'] = `/music/artists/${sanitizeMediaString(artist['name_string'])}-${sanitizeMediaString(artist['country'])}`
+ artists = artists.concat(data)
+ if (data.length < PAGE_SIZE) break
+ rangeStart += PAGE_SIZE
}
return artists
+}
+
+const processArtists = (artists) => {
+ return artists.map(artist => ({
+ id: artist['id'],
+ mbid: artist['mbid'],
+ name: artist['name_string'],
+ tentative: artist['tentative'],
+ totalPlays: artist['total_plays'],
+ country: parseCountryField(artist['country']),
+ description: artist['description'],
+ favorite: artist['favorite'],
+ genre: artist['genre'],
+ emoji: artist['emoji'],
+ tattoo: artist['tattoo'],
+ image: artist['art'] ? `/${artist['art']}` : '',
+ url: `/music/artists/${sanitizeMediaString(artist['name_string'])}-${sanitizeMediaString(parseCountryField(artist['country']))}`,
+ albums: (artist['albums'] || []).map(album => ({
+ id: album['id'],
+ name: album['name'],
+ releaseYear: album['release_year'],
+ totalPlays: album['total_plays'],
+ art: album.art ? `/${album['art']}` : ''
+ })).sort((a, b) => a['release_year'] - b['release_year']),
+ concerts: artist['concerts'] || []
+ }))
+}
+
+export default async function () {
+ try {
+ const artists = await fetchAllArtists()
+ return processArtists(artists)
+ } catch (error) {
+ console.error('Error fetching and processing artists data:', error)
+ return []
+ }
}
\ No newline at end of file
diff --git a/src/data/badges.js b/src/data/badges.js
index c1bf8406..fc17d64f 100644
--- a/src/data/badges.js
+++ b/src/data/badges.js
@@ -14,17 +14,22 @@ const fetchAllBadges = async () => {
if (error) {
console.error('Error fetching badge data:', error)
- return null
+ return []
}
const transformedData = data.map(badge => ({
...badge,
- image: badge['image']['filename_disk'],
+ image: badge.image?.filename_disk || '',
})).sort((a, b) => a.sort - b.sort)
return transformedData
}
export default async function () {
- return await fetchAllBadges()
+ try {
+ return await fetchAllBadges()
+ } catch (error) {
+ console.error('Error fetching and processing badge data:', error)
+ return []
+ }
}
\ No newline at end of file
diff --git a/src/data/blogroll.js b/src/data/blogroll.js
index 2d129c2a..547be3a6 100644
--- a/src/data/blogroll.js
+++ b/src/data/blogroll.js
@@ -12,13 +12,18 @@ const fetchBlogroll = async () => {
.order('name', { ascending: true })
if (error) {
- console.error('Error fetching authors with for the blogroll:', error)
+ console.error('Error fetching authors for the blogroll:', error)
return []
}
- return data.sort((a, b) => a['name'].toLowerCase().localeCompare(b['name'].toLowerCase()))
+ return data.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()))
}
export default async function () {
- return await fetchBlogroll()
+ try {
+ return await fetchBlogroll()
+ } catch (error) {
+ console.error('Error fetching and processing the blogroll:', error)
+ return []
+ }
}
\ No newline at end of file
diff --git a/src/data/books.js b/src/data/books.js
index 06406c38..d6627a47 100644
--- a/src/data/books.js
+++ b/src/data/books.js
@@ -1,50 +1,49 @@
import { createClient } from '@supabase/supabase-js'
-const { SUPABASE_URL, SUPABASE_KEY } = process.env
+const SUPABASE_URL = process.env.SUPABASE_URL
+const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
+const PAGE_SIZE = 500
-const PAGE_SIZE = 1000
-
-const fetchTagsForBook = async (bookId) => {
- const { data, error } = await supabase
- .from('books_tags')
- .select('tags(id, name)')
- .eq('books_id', bookId)
-
- if (error) {
- console.error(`Error fetching tags for book ${bookId}:`, error)
- return []
- }
-
- return data.map(bt => bt['tags']['name'])
-}
-
-async function fetchAllBooks() {
+const fetchAllBooks = async () => {
let books = []
- let from = 0
+ let rangeStart = 0
while (true) {
const { data, error } = await supabase
- .from('books')
- .select(`*, art(filename_disk)`)
- .range(from, from + PAGE_SIZE - 1)
+ .from('optimized_books')
+ .select(`
+ id,
+ isbn,
+ date_finished,
+ author,
+ description,
+ title,
+ progress,
+ read_status,
+ star_rating,
+ review,
+ art,
+ favorite,
+ tags
+ `)
+ .order('date_finished', { ascending: false })
+ .range(rangeStart, rangeStart + PAGE_SIZE - 1)
if (error) {
console.error('Error fetching data from Supabase:', error)
break
}
- for (const book of data) {
- book['tags'] = await fetchTagsForBook(book['id'])
- }
-
books = books.concat(data)
-
if (data.length < PAGE_SIZE) break
-
- from += PAGE_SIZE
+ rangeStart += PAGE_SIZE
}
+ return books
+}
+
+const processBooks = (books) => {
return books.map(book => {
const dateFinished = new Date(book['date_finished'])
const year = dateFinished.getUTCFullYear()
@@ -55,12 +54,12 @@ async function fetchAllBooks() {
rating: book['star_rating'] !== 'unrated' ? book['star_rating'] : '',
favorite: book['favorite'],
description: book['description'],
- image: `/${book?.['art']?.['filename_disk']}`,
+ image: `/${book['art']}`,
url: `/books/${book['isbn']}`,
date: book['date_finished'],
status: book['read_status'],
progress: book['progress'],
- tags: book['tags'],
+ tags: book['tags'] ? book['tags'].split(',') : [],
isbn: book['isbn'],
type: 'book',
year,
@@ -83,5 +82,6 @@ const sortBooksByYear = (books) => {
export default async function () {
const books = await fetchAllBooks()
- return { all: books, years: sortBooksByYear(books) }
+ const processedBooks = processBooks(books)
+ return { all: processedBooks, years: sortBooksByYear(processedBooks) }
}
\ No newline at end of file
diff --git a/src/data/concerts.js b/src/data/concerts.js
new file mode 100644
index 00000000..145075ba
--- /dev/null
+++ b/src/data/concerts.js
@@ -0,0 +1,80 @@
+import { createClient } from '@supabase/supabase-js'
+import { sanitizeMediaString, parseCountryField } from '../../config/utilities/index.js'
+
+const SUPABASE_URL = process.env.SUPABASE_URL
+const SUPABASE_KEY = process.env.SUPABASE_KEY
+const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
+const PAGE_SIZE = 500
+
+const fetchAllConcerts = async () => {
+ let concerts = []
+ let rangeStart = 0
+
+ while (true) {
+ const { data, error } = await supabase
+ .from('optimized_concerts')
+ .select(`
+ id,
+ date,
+ artist_name_string,
+ venue,
+ concert_notes,
+ artist,
+ venue_name,
+ latitude,
+ longitude,
+ bounding_box,
+ venue_notes,
+ artist_name,
+ artist_mbid,
+ artist_country
+ `)
+ .range(rangeStart, rangeStart + PAGE_SIZE - 1)
+
+ if (error) {
+ console.error('Error fetching concerts:', error)
+ break
+ }
+
+ concerts = concerts.concat(data)
+ if (data.length < PAGE_SIZE) break
+ rangeStart += PAGE_SIZE
+ }
+
+ return concerts
+}
+
+const processConcerts = (concerts) => {
+ return concerts.map(concert => ({
+ id: concert['id'],
+ date: concert['date'],
+ artist_name_string: concert['artist_name_string'],
+ venue: {
+ id: concert['venue'],
+ name: concert['venue_name'],
+ latitude: concert['latitude'],
+ longitude: concert['longitude'],
+ bounding_box: concert['bounding_box'],
+ notes: concert['venue_notes']
+ },
+ notes: concert['concert_notes'],
+ artist: concert['artist'] ? {
+ id: concert['artist'],
+ name: concert['artist_name'],
+ mbid: concert['artist_mbid'],
+ country: parseCountryField(concert['artist_country'])
+ } : null,
+ url: `/concerts/${concert['id']}`,
+ artist_url: concert['artist'] ? `/music/artists/${sanitizeMediaString(concert['artist_name'])}-${sanitizeMediaString(parseCountryField(concert['artist_country']))}` : null
+ }))
+}
+
+export default async function () {
+ try {
+ const concerts = await fetchAllConcerts()
+ return processConcerts(concerts)
+ } catch (error) {
+ console.error('Error fetching and processing concerts data:', error)
+ return []
+ }
+}
\ No newline at end of file
diff --git a/src/data/genres.js b/src/data/genres.js
index 3bd7f40d..ae5f1113 100644
--- a/src/data/genres.js
+++ b/src/data/genres.js
@@ -30,17 +30,21 @@ const fetchGenresWithArtists = async () => {
return []
}
- data.forEach(genre => {
- genre['artists'] = genre['artists'].map(artist => ({
+ return data.map(genre => ({
+ ...genre,
+ artists: genre['artists'].map(artist => ({
...artist,
country: parseCountryField(artist['country'])
- }))
- genre['url'] = `/music/genres/${slugify(genre['name'].replace('/', '-').toLowerCase())}`
- })
-
- return data
+ })),
+ url: `/music/genres/${slugify(genre['name'].replace('/', '-').toLowerCase())}`
+ }))
}
export default async function () {
- return await fetchGenresWithArtists()
+ try {
+ return await fetchGenresWithArtists()
+ } catch (error) {
+ console.error('Error fetching and processing genres:', error)
+ return []
+ }
}
\ No newline at end of file
diff --git a/src/data/globals.js b/src/data/globals.js
index 2a40aa5d..000771c0 100644
--- a/src/data/globals.js
+++ b/src/data/globals.js
@@ -6,43 +6,23 @@ const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchGlobals = async () => {
const { data, error } = await supabase
- .from('globals')
- .select(`
- *,
- favicon_ico(filename_disk),
- favicon_svg(filename_disk),
- opengraph_default(filename_disk),
- feed_image(filename_disk),
- apple_touch_icon(filename_disk),
- about(filename_disk),
- logo_the_claw(filename_disk)
- `)
+ .from('optimized_globals')
+ .select('*')
+ .single()
if (error) {
console.error('Error fetching globals:', error)
return {}
}
- const globalData = data.pop()
- const keysToProcess = [
- 'favicon_ico',
- 'favicon_svg',
- 'opengraph_default',
- 'feed_image',
- 'apple_touch_icon',
- 'about',
- 'logo_the_claw'
- ]
-
- keysToProcess.forEach(key => {
- if (globalData[key] && globalData[key].filename_disk) {
- globalData[key] = globalData[key].filename_disk
- }
- })
-
- return globalData
+ return data
}
export default async function () {
- return await fetchGlobals()
+ try {
+ return await fetchGlobals()
+ } catch (error) {
+ console.error('Error fetching and processing globals:', error)
+ return {}
+ }
}
\ No newline at end of file
diff --git a/src/data/links.js b/src/data/links.js
index cf5f449a..67a704c2 100644
--- a/src/data/links.js
+++ b/src/data/links.js
@@ -6,18 +6,22 @@ const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 50
-const fetchTagsForLink = async (linkId) => {
+const fetchAllTags = async () => {
const { data, error } = await supabase
.from('links_tags')
- .select('tags(id, name)')
- .eq('links_id', linkId)
+ .select('links_id, tags(name)')
if (error) {
- console.error(`Error fetching tags for link ${linkId}:`, error)
- return []
+ console.error('Error fetching all tags from Supabase:', error)
+ return {}
}
- return data.map((lt) => lt.tags.name)
+ return data.reduce((acc, { links_id, tags }) => {
+ if (!tags || !tags.name) return acc
+ if (!acc[links_id]) acc[links_id] = []
+ acc[links_id].push(tags['name'])
+ return acc
+ }, {})
}
const fetchAllLinks = async () => {
@@ -39,11 +43,6 @@ const fetchAllLinks = async () => {
if (data.length < PAGE_SIZE) fetchMore = false
- for (const link of data) {
- link['tags'] = await fetchTagsForLink(link.id)
- link['type'] = 'link'
- }
-
links = links.concat(data)
page++
}
@@ -51,6 +50,15 @@ const fetchAllLinks = async () => {
return links
}
+const processLinks = (links, tagsByLinkId) => {
+ return links.map(link => {
+ link['tags'] = tagsByLinkId[link['id']] || []
+ link['type'] = 'link'
+ return link
+ })
+}
+
export default async function () {
- return await fetchAllLinks()
+ const [links, tagsByLinkId] = await Promise.all([fetchAllLinks(), fetchAllTags()])
+ return processLinks(links, tagsByLinkId)
}
\ No newline at end of file
diff --git a/src/data/movies.js b/src/data/movies.js
index 5aa1e9a1..10888826 100644
--- a/src/data/movies.js
+++ b/src/data/movies.js
@@ -6,27 +6,13 @@ const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 1000
-const fetchTagsForMovie = async (movieId) => {
- const { data, error } = await supabase
- .from('movies_tags')
- .select('tags(id, name)')
- .eq('movies_id', movieId)
-
- if (error) {
- console.error(`Error fetching tags for movie ${movieId}:`, error)
- return []
- }
-
- return data.map(mt => mt.tags.name)
-}
-
const fetchAllMovies = async () => {
let movies = []
let rangeStart = 0
while (true) {
const { data, error } = await supabase
- .from('movies')
+ .from('optimized_movies')
.select(`
id,
tmdb_id,
@@ -39,8 +25,9 @@ const fetchAllMovies = async () => {
star_rating,
description,
review,
- art(filename_disk),
- backdrop(filename_disk)
+ art,
+ backdrop,
+ tags
`)
.order('last_watched', { ascending: false })
.range(rangeStart, rangeStart + PAGE_SIZE - 1)
@@ -50,10 +37,6 @@ const fetchAllMovies = async () => {
break
}
- for (const movie of data) {
- movie.tags = await fetchTagsForMovie(movie.id)
- }
-
movies = movies.concat(data)
if (data.length < PAGE_SIZE) break
@@ -63,41 +46,61 @@ const fetchAllMovies = async () => {
return movies
}
-export default async function () {
- const year = DateTime.now().year
- const movies = await fetchAllMovies()
- const formatMovieData = (movies, watched = true) => movies.map((item) => {
- const movie = {
+const processMovies = (movies) => {
+ return movies.map(item => {
+ const lastWatched = DateTime.fromISO(item['last_watched'], { zone: 'utc' })
+ const year = DateTime.now().year
+
+ return {
title: item['title'],
lastWatched: item['last_watched'],
dateAdded: item['last_watched'],
year: item['year'],
url: `/watching/movies/${item['tmdb_id']}`,
- description: `${item['title']} (${item['year']})
Watched at: ${DateTime.fromISO(item['last_watched'], { zone: 'utc' }).setZone('America/Los_Angeles').toFormat('MMMM d, yyyy, h:mma')}`,
- image: `/${item?.['art']?.['filename_disk']}`,
- backdrop: `/${item?.['backdrop']?.['filename_disk']}`,
+ description: item['description'],
+ image: item['art'] ? `/${item['art']}` : '',
+ backdrop: item['backdrop'] ? `/${item['backdrop']}` : '',
plays: item['plays'],
collected: item['collected'],
favorite: item['favorite'],
rating: item['star_rating'],
- description: item['description'],
review: item['review'],
id: item['tmdb_id'],
type: 'movie',
- tags: item['tags']
+ tags: item['tags'] ? item['tags'].split(',') : [],
}
+ })
+}
- return movie
- }).filter(movie => watched ? movie['lastWatched'] : !movie['lastWatched'])
- const favoriteMovies = movies.filter(movie => movie['favorite'])
- const collectedMovies = movies.filter(movie => movie['collected'])
- const recentlyWatchedMovies = movies.filter(movie => movie['last_watched'] && year - DateTime.fromISO(movie['last_watched']).year <= 3).sort((a, b) => new Date(b['last_watched']) - new Date(a['last_watched']))
+export default async function () {
+ const year = DateTime.now().year
- return {
- movies: [...formatMovieData(movies), ...formatMovieData(movies, false)],
- watchHistory: formatMovieData(movies),
- recentlyWatched: formatMovieData(recentlyWatchedMovies),
- favorites: formatMovieData(favoriteMovies).sort((a, b) => a['title'].localeCompare(b['title'])),
- collection: formatMovieData(collectedMovies),
+ try {
+ const movies = await fetchAllMovies()
+ const processedMovies = processMovies(movies)
+
+ const filterMovies = (condition) => processedMovies.filter(condition)
+ const formatMovieData = (movies) => movies.map(movie => movie)
+
+ const favoriteMovies = filterMovies(movie => movie['favorite'])
+ const collectedMovies = filterMovies(movie => movie['collected'])
+ const recentlyWatchedMovies = filterMovies(movie => movie['lastWatched'] && year - DateTime.fromISO(movie['lastWatched']).year <= 3).sort((a, b) => new Date(b['lastWatched']) - new Date(a['lastWatched']))
+
+ return {
+ movies: formatMovieData(processedMovies),
+ watchHistory: formatMovieData(filterMovies(movie => movie['lastWatched'])),
+ recentlyWatched: formatMovieData(recentlyWatchedMovies),
+ favorites: formatMovieData(favoriteMovies).sort((a, b) => a['title'].localeCompare(b['title'])),
+ collection: formatMovieData(collectedMovies),
+ }
+ } catch (error) {
+ console.error('Error fetching and processing movies data:', error)
+ return {
+ movies: [],
+ watchHistory: [],
+ recentlyWatched: [],
+ favorites: [],
+ collection: [],
+ }
}
}
\ No newline at end of file
diff --git a/src/data/music.js b/src/data/music.js
index 373c4603..a326f762 100644
--- a/src/data/music.js
+++ b/src/data/music.js
@@ -6,8 +6,9 @@ const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
+const PAGE_SIZE = 1000
+
const fetchDataForPeriod = async (startPeriod, fields, table) => {
- const PAGE_SIZE = 1000
let rows = []
let rangeStart = 0
@@ -20,7 +21,7 @@ const fetchDataForPeriod = async (startPeriod, fields, table) => {
.range(rangeStart, rangeStart + PAGE_SIZE - 1)
if (error) {
- console.error(error)
+ console.error(`Error fetching data from ${table}:`, error)
break
}
@@ -43,70 +44,49 @@ const fetchGenreMapping = async () => {
return {}
}
return data.reduce((acc, genre) => {
- acc[genre.id] = genre.name
+ acc[genre['id']] = genre['name']
return acc
}, {})
}
-const aggregateData = async (data, groupByField, groupByType) => {
+const aggregateData = (data, groupByField, groupByType, genreMapping) => {
const aggregation = {}
- const genreMapping = await fetchGenreMapping()
data.forEach(item => {
const key = item[groupByField]
if (!aggregation[key]) {
- if (groupByType === 'track') {
- aggregation[key] = {
- title: item[groupByField],
- plays: 0,
- mbid: item['albums']['mbid'],
- url: `/music/artists/${sanitizeMediaString(item['artist_name'])}-${sanitizeMediaString(parseCountryField(item['artists']['country']))}`,
- image: `/${item['albums']?.['art']?.['filename_disk']}` || '',
- timestamp: item['listened_at'],
- type: groupByType,
- genre: genreMapping[item['artists']['genres']] || ''
- }
- } else {
- aggregation[key] = {
- title: item[groupByField],
- plays: 0,
- mbid: item[groupByType]?.['mbid'] || '',
- url: `/music/artists/${sanitizeMediaString(item['artist_name'])}-${sanitizeMediaString(parseCountryField(item['artists']['country']))}`,
- image: `/${item[groupByType]?.['art']?.['filename_disk']}` || '',
- type: groupByType,
- genre: genreMapping[item['artists']['genres']] || ''
- }
+ aggregation[key] = {
+ title: item[groupByField],
+ plays: 0,
+ mbid: item[groupByType]?.['mbid'] || '',
+ url: `/music/artists/${sanitizeMediaString(item['artist_name'])}-${sanitizeMediaString(parseCountryField(item['artist_country']))}`,
+ image: `/${item[groupByType]}`,
+ type: groupByType === 'artist_art' ? 'artist' : groupByType === 'album_art' ? 'album' : groupByType,
+ genre: genreMapping[item['artist_genres']] || ''
}
- if (groupByType === 'track' || groupByType === 'albums') aggregation[key]['artist'] = item['artist_name']
+ if (groupByType === 'track' || groupByType === 'album_art') aggregation[key]['artist'] = item['artist_name']
}
aggregation[key].plays++
})
- const aggregatedData = Object.values(aggregation).sort((a, b) => b.plays - a.plays)
-
- aggregatedData.forEach((item, index) => {
- item.rank = index + 1
- })
-
- return aggregatedData.filter(item => item.plays > 0)
+ return Object.values(aggregation).sort((a, b) => b.plays - a.plays).map((item, index) => ({ ...item, rank: index + 1 }))
}
-const buildRecents = async (data) => {
+const buildRecents = (data) => {
return data.map(listen => ({
title: listen['track_name'],
artist: listen['artist_name'],
- url: `/music/artists/${sanitizeMediaString(listen['artist_name'])}-${sanitizeMediaString(parseCountryField(listen['artists']['country']))}`,
+ url: `/music/artists/${sanitizeMediaString(listen['artist_name'])}-${sanitizeMediaString(parseCountryField(listen['artist_country']))}`,
timestamp: listen['listened_at'],
- image: `/${listen['albums']?.['art']?.['filename_disk']}` || ''
- }))
+ image: `/${listen['album_art']}`
+ })).sort((a, b) => b.timestamp - a.timestamp)
}
-const aggregateGenres = async (data) => {
+const aggregateGenres = (data, genreMapping) => {
const genreAggregation = {}
- const genreMapping = await fetchGenreMapping()
data.forEach(item => {
- const genre = genreMapping[item['artists']['genres']] || ''
+ const genre = genreMapping[item['artist_genres']] || ''
if (!genreAggregation[genre]) genreAggregation[genre] = { genre, plays: 0 }
genreAggregation[genre]['plays']++
@@ -114,38 +94,51 @@ const aggregateGenres = async (data) => {
return Object.values(genreAggregation).sort((a, b) => b['plays'] - a['plays'])
}
-export default async function() {
+export default async function () {
const periods = {
- week: DateTime.now().minus({ days: 7 }).startOf('day'), // last week
- month: DateTime.now().minus({ days: 30 }).startOf('day'), // last 30 days
- threeMonth: DateTime.now().minus({ months: 3 }).startOf('day'), // last three months
+ week: DateTime.now().minus({ days: 7 }).startOf('day'),
+ month: DateTime.now().minus({ days: 30 }).startOf('day'),
+ threeMonth: DateTime.now().minus({ months: 3 }).startOf('day')
}
- const results = {}
const selectFields = `
+ id,
+ listened_at,
track_name,
artist_name,
album_name,
album_key,
- listened_at,
- artists (mbid, art(filename_disk), genres, country),
- albums (mbid, art(filename_disk))
+ artist_mbid,
+ artist_art,
+ artist_genres,
+ artist_country,
+ album_mbid,
+ album_art
`
- for (const [period, startPeriod] of Object.entries(periods)) {
- const periodData = await fetchDataForPeriod(startPeriod, selectFields, 'listens')
- results[period] = {
- artists: await aggregateData(periodData, 'artist_name', 'artists'),
- albums: await aggregateData(periodData, 'album_name', 'albums'),
- tracks: await aggregateData(periodData, 'track_name', 'track'),
- genres: await aggregateGenres(periodData),
- totalTracks: periodData?.length?.toLocaleString('en-US')
- }
+ try {
+ const genreMapping = await fetchGenreMapping()
+
+ const results = await Promise.all(Object.entries(periods).map(async ([period, startPeriod]) => {
+ const periodData = await fetchDataForPeriod(startPeriod, selectFields, 'optimized_listens')
+ return {
+ [period]: {
+ artists: aggregateData(periodData, 'artist_name', 'artist_art', genreMapping),
+ albums: aggregateData(periodData, 'album_name', 'album_art', genreMapping),
+ tracks: aggregateData(periodData, 'track_name', 'track', genreMapping),
+ genres: aggregateGenres(periodData, genreMapping),
+ totalTracks: periodData.length.toLocaleString('en-US')
+ }
+ }
+ }))
+
+ const recentData = await fetchDataForPeriod(DateTime.now().minus({ days: 7 }), selectFields, 'optimized_listens')
+
+ results.push({ recent: buildRecents(recentData) })
+
+ return Object.assign({}, ...results)
+ } catch (error) {
+ console.error('Error in fetching and processing music data:', error)
+ return {}
}
-
- const recentData = await fetchDataForPeriod(DateTime.now().minus({ days: 7 }), selectFields, 'listens')
-
- results['recent'] = (await buildRecents(recentData)).sort((a, b) => b.timestamp - a.timestamp)
-
- return results
}
\ No newline at end of file
diff --git a/src/data/nav.js b/src/data/nav.js
index 29f8c865..c52846c5 100644
--- a/src/data/nav.js
+++ b/src/data/nav.js
@@ -6,45 +6,43 @@ const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchAllNavigation = async () => {
const { data, error } = await supabase
- .from('navigation')
- .select(`
- *,
- pages(title, permalink)
- `)
+ .from('optimized_navigation')
+ .select('*')
if (error) {
console.error('Error fetching navigation data:', error)
- return null
+ return {}
}
- const menu = {}
- data.forEach(item => {
- const menuItem = item.pages ? {
- title: item.pages.title,
- permalink: item.pages.permalink,
- icon: item.icon,
- sort: item.sort
- } : {
- title: item.title,
- permalink: item.permalink,
- icon: item.icon,
- sort: item.sort
+ const menu = data.reduce((acc, item) => {
+ const menuItem = {
+ title: item['title'] || item['page_title'],
+ permalink: item['permalink'] || item ['page_permalink'],
+ icon: item['icon'],
+ sort: item['sort']
}
- if (!menu[item.menu_location]) {
- menu[item.menu_location] = [menuItem]
+ if (!acc[item['menu_location']]) {
+ acc[item['menu_location']] = [menuItem]
} else {
- menu[item.menu_location].push(menuItem)
+ acc[item['menu_location']].push(menuItem)
}
- })
+
+ return acc
+ }, {})
Object.keys(menu).forEach(location => {
- menu[location].sort((a, b) => a.sort - b.sort)
+ menu[location].sort((a, b) => a['sort'] - b['sort'])
})
return menu
}
export default async function () {
- return await fetchAllNavigation()
+ try {
+ return await fetchAllNavigation()
+ } catch (error) {
+ console.error('Error fetching and processing navigation data:', error)
+ return {}
+ }
}
\ No newline at end of file
diff --git a/src/data/pages.js b/src/data/pages.js
index 39b04728..36bd23b8 100644
--- a/src/data/pages.js
+++ b/src/data/pages.js
@@ -1,7 +1,7 @@
import { createClient } from '@supabase/supabase-js'
-const SUPABASE_URL = process.env.SUPABASE_URL
-const SUPABASE_KEY = process.env.SUPABASE_KEY
+const SUPABASE_URL = process.env['SUPABASE_URL']
+const SUPABASE_KEY = process.env['SUPABASE_KEY']
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 50
@@ -21,28 +21,23 @@ const fetchBlockData = async (collection, itemId) => {
return data
}
-const fetchBlocksForPage = async (pageId) => {
+const fetchAllBlocks = async () => {
const { data, error } = await supabase
.from('pages_blocks')
- .select('collection, item, sort')
- .eq('pages_id', pageId)
+ .select('pages_id, collection, item, sort')
if (error) {
- console.error(`Error fetching blocks for page ${pageId}:`, error)
- return []
+ console.error('Error fetching all blocks from Supabase:', error)
+ return {}
}
- const blocks = await Promise.all(data.map(async block => {
- const blockData = await fetchBlockData(block.collection, block.item)
-
- return {
- type: block['collection'],
- sort: block['sort'],
- ...blockData
+ return data.reduce((acc, block) => {
+ if (!acc[block['pages_id']]) {
+ acc[block['pages_id']] = []
}
- }))
-
- return blocks.sort((a, b) => a.sort - b.sort)
+ acc[block['pages_id']].push(block)
+ return acc
+ }, {})
}
const fetchAllPages = async () => {
@@ -52,11 +47,8 @@ const fetchAllPages = async () => {
while (fetchMore) {
const { data, error } = await supabase
- .from('pages')
- .select(`
- *,
- open_graph_image(filename_disk)
- `)
+ .from('optimized_pages')
+ .select('*')
.range(page * PAGE_SIZE, (page + 1) * PAGE_SIZE - 1)
if (error) {
@@ -66,18 +58,50 @@ const fetchAllPages = async () => {
if (data.length < PAGE_SIZE) fetchMore = false
- for (const page of data) {
- page['blocks'] = await fetchBlocksForPage(page['id'])
- if (page['open_graph_image']) page['open_graph_image'] = page['open_graph_image']['filename_disk']
- pages.push(page)
- }
-
+ pages = pages.concat(data)
page++
}
return pages
}
+const processPages = async (pages, blocksByPageId) => {
+ return Promise.all(pages.map(async page => {
+ const blocks = blocksByPageId[page['id']] || []
+
+ page['blocks'] = await Promise.all(blocks.map(async block => {
+ const blockData = await fetchBlockData(block['collection'], block['item'])
+
+ if (!blockData) return {
+ 'type': block['collection'],
+ 'sort': block['sort']
+ }
+
+ return {
+ 'type': block['collection'],
+ 'sort': block['sort'],
+ ...blockData
+ }
+ })).then(blocks => blocks.filter(block => block !== null))
+
+ page['blocks'].sort((a, b) => a['sort'] - b['sort'])
+
+ if (page['open_graph_image']) page['open_graph_image'] = page['open_graph_image']['filename_disk']
+
+ return page
+ }))
+}
+
export default async function () {
- return await fetchAllPages()
+ try {
+ const [pages, blocksByPageId] = await Promise.all([
+ fetchAllPages(),
+ fetchAllBlocks()
+ ])
+
+ return await processPages(pages, blocksByPageId)
+ } catch (error) {
+ console.error('Error fetching and processing pages:', error)
+ return []
+ }
}
\ No newline at end of file
diff --git a/src/data/posts.js b/src/data/posts.js
index c3a5cf06..6110f378 100644
--- a/src/data/posts.js
+++ b/src/data/posts.js
@@ -1,11 +1,47 @@
import { createClient } from '@supabase/supabase-js'
-const SUPABASE_URL = process.env.SUPABASE_URL
-const SUPABASE_KEY = process.env.SUPABASE_KEY
+const SUPABASE_URL = process.env['SUPABASE_URL']
+const SUPABASE_KEY = process.env['SUPABASE_KEY']
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
-
const PAGE_SIZE = 50
+const fetchAllTags = async () => {
+ const { data, error } = await supabase
+ .from('posts_tags')
+ .select('posts_id, tags(name)')
+
+ if (error) {
+ console.error('Error fetching all tags from Supabase:', error)
+ return {}
+ }
+
+ return data.reduce((acc, { posts_id, tags }) => {
+ if (!tags || !tags['name']) return acc
+ if (!acc[posts_id]) acc[posts_id] = []
+ acc[posts_id].push(tags['name'])
+ return acc
+ }, {})
+}
+
+const fetchAllBlocks = async () => {
+ const { data, error } = await supabase
+ .from('posts_blocks')
+ .select('posts_id, collection, item, sort')
+
+ if (error) {
+ console.error('Error fetching all blocks from Supabase:', error)
+ return {}
+ }
+
+ return data.reduce((acc, block) => {
+ if (!acc[block['posts_id']]) {
+ acc[block['posts_id']] = []
+ }
+ acc[block['posts_id']].push(block)
+ return acc
+ }, {})
+}
+
const fetchBlockData = async (collection, itemId) => {
const { data, error } = await supabase
.from(collection)
@@ -21,44 +57,6 @@ const fetchBlockData = async (collection, itemId) => {
return data
}
-const fetchTagsForPost = async (postId) => {
- const { data, error } = await supabase
- .from('posts_tags')
- .select('tags(id, name)')
- .eq('posts_id', postId)
-
- if (error) {
- console.error(`Error fetching tags for post ${postId}:`, error)
- return []
- }
-
- return data.map(pt => pt.tags.name)
-}
-
-const fetchBlocksForPost = async (postId) => {
- const { data, error } = await supabase
- .from('posts_blocks')
- .select('collection, item, sort')
- .eq('posts_id', postId)
-
- if (error) {
- console.error(`Error fetching blocks for post ${postId}:`, error)
- return []
- }
-
- const blocks = await Promise.all(data.map(async block => {
- const blockData = await fetchBlockData(block.collection, block.item)
-
- return {
- type: block['collection'],
- sort: block['sort'],
- ...blockData
- }
- }))
-
- return blocks
-}
-
const fetchAllPosts = async () => {
let posts = []
let page = 0
@@ -67,11 +65,8 @@ const fetchAllPosts = async () => {
while (fetchMore) {
const { data, error } = await supabase
- .from('posts')
- .select(`
- *,
- image(filename_disk)
- `)
+ .from('optimized_posts')
+ .select('*')
.order('date', { ascending: false })
.range(page * PAGE_SIZE, (page + 1) * PAGE_SIZE - 1)
@@ -85,18 +80,42 @@ const fetchAllPosts = async () => {
for (const post of data) {
if (uniqueSlugs.has(post['slug'])) continue
- uniqueSlugs.add(post.slug)
- post['tags'] = await fetchTagsForPost(post['id'])
- post['blocks'] = await fetchBlocksForPost(post['id'])
- if (post?.['image']?.['filename_disk']) post['image'] = post['image']['filename_disk']
+ uniqueSlugs.add(post['slug'])
posts.push(post)
}
page++
}
+
return posts
}
+const processPosts = async (posts, tagsByPostId, blocksByPostId) => {
+ return Promise.all(posts.map(async post => {
+ post['tags'] = tagsByPostId[post['id']] || []
+ const blocks = blocksByPostId[post['id']] || []
+
+ post['blocks'] = await Promise.all(blocks.map(async block => {
+ const blockData = await fetchBlockData(block['collection'], block['item'])
+ if (!blockData) return null
+ return {
+ 'type': block['collection'],
+ 'sort': block['sort'],
+ ...blockData
+ }
+ })).then(blocks => blocks.filter(block => block !== null))
+
+ if (post['image']) post['image'] = post['image']['filename_disk']
+ return post
+ }))
+}
+
export default async function () {
- return await fetchAllPosts()
+ const [posts, tagsByPostId, blocksByPostId] = await Promise.all([
+ fetchAllPosts(),
+ fetchAllTags(),
+ fetchAllBlocks()
+ ])
+
+ return await processPosts(posts, tagsByPostId, blocksByPostId)
}
\ No newline at end of file
diff --git a/src/data/robots.js b/src/data/robots.js
index 78bde461..885538b0 100644
--- a/src/data/robots.js
+++ b/src/data/robots.js
@@ -9,26 +9,31 @@ const PAGE_SIZE = 100
const fetchAllRobots = async () => {
let robots = []
let from = 0
- let to = PAGE_SIZE - 1
while (true) {
const { data, error } = await supabase
.from('robots')
.select('user_agent')
- .range(from, to)
+ .range(from, from + PAGE_SIZE - 1)
if (error) {
console.error('Error fetching robot data:', error)
- return null
+ return []
}
robots = robots.concat(data)
if (data.length < PAGE_SIZE) break
+ from += PAGE_SIZE
}
- return robots.map(robot => robot['user_agent']).sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()))
+ return robots.map(robot => robot.user_agent).sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()))
}
export default async function () {
- return await fetchAllRobots()
-}
+ try {
+ return await fetchAllRobots()
+ } catch (error) {
+ console.error('Error fetching and processing robot data:', error)
+ return []
+ }
+}
\ No newline at end of file
diff --git a/src/data/tv.js b/src/data/tv.js
index 7b5ccb48..98d1db43 100644
--- a/src/data/tv.js
+++ b/src/data/tv.js
@@ -1,10 +1,9 @@
import { createClient } from '@supabase/supabase-js'
-const SUPABASE_URL = process.env['SUPABASE_URL']
-const SUPABASE_KEY = process.env['SUPABASE_KEY']
+const SUPABASE_URL = process.env.SUPABASE_URL
+const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
-
-const PAGE_SIZE = 1000
+const PAGE_SIZE = 500
const fetchAllShows = async () => {
let shows = []
@@ -12,27 +11,25 @@ const fetchAllShows = async () => {
while (true) {
const { data, error } = await supabase
- .from('shows')
+ .from('optimized_shows')
.select(`
- title,
+ id,
tmdb_id,
+ last_watched_at,
+ title,
+ year,
collected,
favorite,
- year,
description,
review,
- art(filename_disk),
- backdrop(filename_disk),
- episodes (
- episode_number,
- season_number,
- last_watched_at
- )
+ art,
+ backdrop,
+ episodes
`)
.range(rangeStart, rangeStart + PAGE_SIZE - 1)
if (error) {
- console.error(error)
+ console.error('Error fetching shows:', error)
break
}
@@ -44,122 +41,111 @@ const fetchAllShows = async () => {
return shows
}
-const prepareShowData = (show) => {
- return {
- ...show,
- image: show['art']?.['filename_disk'] ? `/${show['art']['filename_disk']}` : '',
- backdrop: show['backdrop']?.['filename_disk'] ? `/${show['backdrop']['filename_disk']}` : '',
- url: `/watching/shows/${show['tmdb_id']}`,
- }
-}
+const prepareShowData = (show) => ({
+ ...show,
+ image: show['art'] ? `/${show['art']}` : '',
+ backdrop: show['backdrop'] ? `/${show['backdrop']}` : '',
+ url: `/watching/shows/${show['tmdb_id']}`,
+ episodes: show['episodes'] || []
+})
-const prepareEpisodeData = (show) => {
- return show['episodes'].map(episode => ({
- ...episode,
- show_title: show['title'],
- show_tmdb_id: show['tmdb_id'],
- collected: show['collected'],
- favorite: show['favorite'],
- image: show['image'],
- backdrop: show['backdrop']
- }))
+const prepareEpisodeData = (show) => show['episodes'].map(episode => ({
+ ...episode,
+ show_title: show['title'],
+ show_tmdb_id: show['tmdb_id'],
+ collected: show['collected'],
+ favorite: show['favorite'],
+ image: show['image'],
+ backdrop: show['backdrop'],
+ episode_number: episode['episode_number'] || 0,
+ season_number: episode['season_number'] || 0,
+ last_watched_at: episode['last_watched_at'] || '1970-01-01T00:00:00Z'
+}))
+
+const formatEpisodeData = (episodes) => {
+ const showEpisodesMap = {}
+
+ episodes.forEach(episode => {
+ const showTmdbId = episode.show_tmdb_id
+
+ if (!showEpisodesMap[showTmdbId]) {
+ showEpisodesMap[showTmdbId] = {
+ title: episode['show_title'],
+ tmdbId: showTmdbId,
+ collected: episode['collected'],
+ favorite: episode['favorite'],
+ dateAdded: episode['last_watched_at'],
+ lastWatchedAt: episode['last_watched_at'],
+ episodes: [],
+ image: episode['image'],
+ backdrop: episode['backdrop'],
+ }
+ }
+
+ showEpisodesMap[showTmdbId].episodes.push({
+ name: episode['show_title'],
+ url: `/watching/shows/${showTmdbId}`,
+ subtext: `S${episode['season_number']}E${episode['episode_number']}`,
+ episode: episode['episode_number'],
+ season: episode['season_number'],
+ tmdbId: showTmdbId,
+ type: 'tv',
+ dateAdded: episode['last_watched_at'],
+ lastWatchedAt: episode['last_watched_at'],
+ image: episode['image'],
+ backdrop: episode['backdrop'],
+ })
+ })
+
+ return Object.values(showEpisodesMap).sort((a, b) => new Date(b['episodes'][0]['lastWatchedAt']) - new Date(a['episodes'][0]['lastWatchedAt'])).flatMap(show => {
+ const startingEpisode = show['episodes'][show['episodes'].length - 1]['episode']
+ const startingSeason = show['episodes'][show['episodes'].length - 1]['season']
+ const endingEpisode = show['episodes'][0]['episode']
+ const endingSeason = show['episodes'][0]['season']
+
+ if (show.episodes.length > 1) {
+ return {
+ name: show['title'],
+ url: `/watching/shows/${show['tmdbId']}`,
+ subtext: `S${startingSeason}E${startingEpisode} - S${endingSeason}E${endingEpisode}`,
+ startingEpisode,
+ startingSeason,
+ episode: endingEpisode,
+ season: endingSeason,
+ tmdbId: show['tmdbId'],
+ collected: show['collected'],
+ favorite: show['favorite'],
+ type: 'tv-range',
+ image: show['image'],
+ backdrop: show['backdrop'],
+ }
+ } else {
+ return show['episodes'][0]
+ }
+ })
}
export default async function () {
- const rawShows = await fetchAllShows()
- const shows = rawShows.map(prepareShowData)
+ try {
+ const rawShows = await fetchAllShows()
+ const shows = rawShows.map(prepareShowData)
+ const episodes = shows.flatMap(prepareEpisodeData).sort((a, b) => new Date(b.last_watched_at) - new Date(a.last_watched_at))
- const episodes = shows.flatMap(prepareEpisodeData)
+ const favoriteShows = shows.filter(show => show.favorite)
- episodes.sort((a, b) => new Date(b.last_watched_at) - new Date(a.last_watched_at))
-
- const formatEpisodeData = (episodes) => {
- const showEpisodesMap = {}
-
- episodes.forEach(episode => {
- const showTitle = episode['show_title']
- const showTmdbId = episode['show_tmdb_id']
- const episodeNumber = episode['episode_number']
- const seasonNumber = episode['season_number']
- const lastWatchedAt = episode['last_watched_at']
- const collected = episode['collected']
- const favorite = episode['favorite']
- const image = episode['image']
- const backdrop = episode['backdrop']
-
- if (!showEpisodesMap[showTmdbId]) {
- showEpisodesMap[showTmdbId] = {
- title: showTitle,
- tmdbId: showTmdbId,
- collected,
- favorite,
- dateAdded: lastWatchedAt,
- lastWatchedAt,
- episodes: [],
- image,
- backdrop
- }
- }
-
- showEpisodesMap[showTmdbId].episodes.push({
- name: showTitle,
- url: `/watching/shows/${showTmdbId}`,
- subtext: `S${seasonNumber}E${episodeNumber}`,
- episode: episodeNumber,
- season: seasonNumber,
- tmdbId: showTmdbId,
- type: 'tv',
- dateAdded: lastWatchedAt,
- lastWatchedAt,
- image,
- backdrop
- })
- })
-
- const sortedShows = Object.values(showEpisodesMap).sort((a, b) => new Date(b.episodes[0]['lastWatchedAt']) - new Date(a.episodes[0]['lastWatchedAt']))
-
- const episodeData = []
- sortedShows.forEach(show => {
- const startingEpisode = show['episodes'][show['episodes'].length - 1]['episode']
- const startingSeason = show['episodes'][show['episodes'].length - 1]['season']
- const endingEpisode = show['episodes'][0]['episode']
- const endingSeason = show['episodes'][0]['season']
-
- if (show['episodes'].length > 1) {
- episodeData.push({
- name: show['title'],
- url: `/watching/shows/${show['tmdbId']}`,
- subtext: `S${startingSeason}E${startingEpisode} - S${endingSeason}E${endingEpisode}`,
- startingEpisode,
- startingSeason,
- episode: endingEpisode,
- season: endingSeason,
- tmdbId: show['tmdbId'],
- collected: show['collected'],
- favorite: show['favorite'],
- type: 'tv-range',
- image: show['image'],
- backdrop: show['backdrop']
- })
- } else {
- const singleEpisode = show['episodes'][0]
- singleEpisode.collected = show['collected']
- singleEpisode.favorite = show['favorite']
- singleEpisode.image = show['image']
- singleEpisode.backdrop = show['backdrop']
- episodeData.push(singleEpisode)
- }
- })
-
- return episodeData
- }
-
- const favoriteShows = shows.filter(show => show['favorite'])
-
- return {
- shows,
- watchHistory: formatEpisodeData(episodes),
- recentlyWatched: formatEpisodeData(episodes.slice(0, 225)),
- favorites: formatEpisodeData(favoriteShows.flatMap(prepareEpisodeData)).sort((a, b) => a['name'].localeCompare(b['name']))
+ return {
+ shows,
+ watchHistory: formatEpisodeData(episodes),
+ recentlyWatched: formatEpisodeData(episodes.slice(0, 225)),
+ favorites: formatEpisodeData(favoriteShows.flatMap(prepareEpisodeData)).sort((a, b) => a.name.localeCompare(b.name)),
+ }
+ } catch (error) {
+ console.error('Error fetching and processing shows data:', error)
+ return {
+ shows: [],
+ watchHistory: [],
+ recentlyWatched: [],
+ favorites: [],
+ }
}
}
\ No newline at end of file
diff --git a/src/pages/dynamic/music/artists/artist.html b/src/pages/dynamic/music/artists/artist.html
index 84677a47..02fefdd0 100644
--- a/src/pages/dynamic/music/artists/artist.html
+++ b/src/pages/dynamic/music/artists/artist.html
@@ -9,7 +9,7 @@ updated: "now"
schema: artist
---
{%- capture alt -%}
- {{ artist.name_string }} / {{ artist.country }}
+ {{ artist.name }} / {{ artist.country }}
{%- endcapture -%}
{% capture js %}
{% render "../../../../assets/scripts/text-toggle.js" %}
@@ -38,23 +38,23 @@ schema: artist
height="480"
/>