chore: search cleanup
This commit is contained in:
parent
19d17f70d2
commit
fca18da3f7
36 changed files with 71 additions and 60 deletions
4
package-lock.json
generated
4
package-lock.json
generated
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "coryd.dev",
|
||||
"version": "1.5.16",
|
||||
"version": "1.5.17",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "coryd.dev",
|
||||
"version": "1.5.16",
|
||||
"version": "1.5.17",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@cdransf/api-text": "^1.5.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "coryd.dev",
|
||||
"version": "1.5.16",
|
||||
"version": "1.5.17",
|
||||
"description": "The source for my personal site. Built using 11ty (and other tools).",
|
||||
"type": "module",
|
||||
"engines": {
|
||||
|
|
42
queries/functions/search.psql
Normal file
42
queries/functions/search.psql
Normal file
|
@ -0,0 +1,42 @@
|
|||
CREATE OR REPLACE FUNCTION public.search_optimized_index(
|
||||
search_query text,
|
||||
page_size integer,
|
||||
page_offset integer,
|
||||
types text[]
|
||||
) RETURNS TABLE(
|
||||
result_id integer,
|
||||
url text,
|
||||
title text,
|
||||
description text,
|
||||
tags text,
|
||||
genre_name text,
|
||||
genre_url text,
|
||||
type text,
|
||||
total_plays integer,
|
||||
rank real
|
||||
) AS $$
|
||||
BEGIN
|
||||
RETURN QUERY
|
||||
SELECT
|
||||
s.id::integer AS result_id,
|
||||
s.url,
|
||||
s.title,
|
||||
s.description,
|
||||
array_to_string(s.tags, ', ') AS tags,
|
||||
s.genre_name,
|
||||
s.genre_url,
|
||||
s.type,
|
||||
s.total_plays,
|
||||
ts_rank_cd(
|
||||
to_tsvector('english', s.title || ' ' || s.description || array_to_string(s.tags, ' ')),
|
||||
plainto_tsquery('english', search_query)
|
||||
) AS rank
|
||||
FROM optimized_search_index s
|
||||
WHERE
|
||||
(types IS NULL OR s.type = ANY(types))
|
||||
AND plainto_tsquery('english', search_query) @@
|
||||
to_tsvector('english', s.title || ' ' || s.description || array_to_string(s.tags, ' '))
|
||||
ORDER BY rank DESC
|
||||
LIMIT page_size OFFSET page_offset;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
|
@ -220,15 +220,20 @@ window.addEventListener("load", () => {
|
|||
|
||||
total = index.total || results.length;
|
||||
|
||||
resultsById = results.reduce((acc, item) => {
|
||||
const formattedResults = results.map((item) => ({
|
||||
...item,
|
||||
id: item.result_id,
|
||||
}));
|
||||
|
||||
resultsById = formattedResults.reduce((acc, item) => {
|
||||
acc[item.id] = item;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
miniSearch.removeAll();
|
||||
miniSearch.addAll(results);
|
||||
miniSearch.addAll(formattedResults);
|
||||
|
||||
return results;
|
||||
return formattedResults;
|
||||
} catch (error) {
|
||||
console.error("Error fetching search data:", error);
|
||||
return [];
|
||||
|
|
|
@ -2,74 +2,38 @@ import { createClient } from "@supabase/supabase-js";
|
|||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
const allowedOrigin = "https://coryd.dev";
|
||||
const origin = request.headers.get("Origin") || "";
|
||||
const referer = request.headers.get("Referer") || "";
|
||||
const supabaseUrl = env.SUPABASE_URL;
|
||||
const supabaseKey = env.SUPABASE_KEY;
|
||||
const supabase = createClient(supabaseUrl, supabaseKey);
|
||||
|
||||
if (!origin.startsWith(allowedOrigin) && !referer.startsWith(allowedOrigin))
|
||||
return new Response("Forbidden", { status: 403 });
|
||||
|
||||
const supabase = createClient(env.SUPABASE_URL, env.SUPABASE_KEY);
|
||||
const { searchParams } = new URL(request.url);
|
||||
const query = searchParams.get("q") || "";
|
||||
const types = searchParams.get("type")?.split(",") || [];
|
||||
const rawTypes = searchParams.getAll("type") || [];
|
||||
const types = rawTypes.length > 0 ? rawTypes[0].split(",") : null;
|
||||
|
||||
const page = parseInt(searchParams.get("page") || "1", 10);
|
||||
const pageSize = parseInt(searchParams.get("pageSize") || "10", 10);
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
try {
|
||||
let supabaseQuery = supabase
|
||||
.from("optimized_search_index")
|
||||
.select(
|
||||
"id, title, description, url, tags, type, total_plays, genre_name, genre_url",
|
||||
{ count: "exact" }
|
||||
)
|
||||
.range(offset, offset + pageSize - 1);
|
||||
|
||||
if (types.length > 0) supabaseQuery = supabaseQuery.in("type", types);
|
||||
|
||||
if (query) {
|
||||
const queryLower = `%${query.toLowerCase()}%`;
|
||||
supabaseQuery = supabaseQuery.or(
|
||||
`title.ilike.${queryLower},description.ilike.${queryLower}`
|
||||
);
|
||||
}
|
||||
|
||||
const { data, error, count } = await supabaseQuery;
|
||||
const { data, error } = await supabase.rpc("search_optimized_index", {
|
||||
search_query: query,
|
||||
page_size: pageSize,
|
||||
page_offset: offset,
|
||||
types: types.length ? types : null,
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Query error:", error);
|
||||
return new Response(JSON.stringify({ error: "Error fetching data" }), {
|
||||
status: 500,
|
||||
});
|
||||
console.error("Error fetching search data:", error);
|
||||
return new Response(JSON.stringify({ results: [] }), { status: 500 });
|
||||
}
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
console.warn("No results found.");
|
||||
return new Response(
|
||||
JSON.stringify({ results: [], total: 0, page, pageSize }),
|
||||
{ headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
results: data,
|
||||
total: count || 0,
|
||||
page,
|
||||
pageSize,
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
return new Response(JSON.stringify({ results: data }), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Unexpected error:", error);
|
||||
return new Response(JSON.stringify({ error: "Internal Server Error" }), {
|
||||
status: 500,
|
||||
});
|
||||
return new Response("Internal Server Error", { status: 500 });
|
||||
}
|
||||
},
|
||||
};
|
||||
|
|
Reference in a new issue