initial commit
This commit is contained in:
commit
d799808203
126 changed files with 16265 additions and 0 deletions
33
lib/generate-rss.ts
Normal file
33
lib/generate-rss.ts
Normal file
|
@ -0,0 +1,33 @@
|
|||
import { escape } from '@/lib/utils/htmlEscaper'
|
||||
|
||||
import siteMetadata from '@/data/siteMetadata'
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
|
||||
const generateRssItem = (post: PostFrontMatter) => `
|
||||
<item>
|
||||
<guid>${siteMetadata.siteUrl}/blog/${post.slug}</guid>
|
||||
<title>${escape(post.title)}</title>
|
||||
<link>${siteMetadata.siteUrl}/blog/${post.slug}</link>
|
||||
${post.summary && `<description>${escape(post.summary)}</description>`}
|
||||
<pubDate>${new Date(post.date).toUTCString()}</pubDate>
|
||||
<author>${siteMetadata.email} (${siteMetadata.author})</author>
|
||||
${post.tags && post.tags.map((t) => `<category>${t}</category>`).join('')}
|
||||
</item>
|
||||
`
|
||||
|
||||
const generateRss = (posts: PostFrontMatter[], page = 'feed.xml') => `
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>${escape(siteMetadata.title)}</title>
|
||||
<link>${siteMetadata.siteUrl}/blog</link>
|
||||
<description>${escape(siteMetadata.description)}</description>
|
||||
<language>${siteMetadata.language}</language>
|
||||
<managingEditor>${siteMetadata.email} (${siteMetadata.author})</managingEditor>
|
||||
<webMaster>${siteMetadata.email} (${siteMetadata.author})</webMaster>
|
||||
<lastBuildDate>${new Date(posts[0].date).toUTCString()}</lastBuildDate>
|
||||
<atom:link href="${siteMetadata.siteUrl}/${page}" rel="self" type="application/rss+xml"/>
|
||||
${posts.map(generateRssItem).join('')}
|
||||
</channel>
|
||||
</rss>
|
||||
`
|
||||
export default generateRss
|
139
lib/mdx.ts
Normal file
139
lib/mdx.ts
Normal file
|
@ -0,0 +1,139 @@
|
|||
import { bundleMDX } from 'mdx-bundler'
|
||||
import fs from 'fs'
|
||||
import matter from 'gray-matter'
|
||||
import path from 'path'
|
||||
import readingTime from 'reading-time'
|
||||
import getAllFilesRecursively from './utils/files'
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
import { AuthorFrontMatter } from 'types/AuthorFrontMatter'
|
||||
import { Toc } from 'types/Toc'
|
||||
// Remark packages
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import remarkFootnotes from 'remark-footnotes'
|
||||
import remarkMath from 'remark-math'
|
||||
import remarkExtractFrontmatter from './remark-extract-frontmatter'
|
||||
import remarkCodeTitles from './remark-code-title'
|
||||
import remarkTocHeadings from './remark-toc-headings'
|
||||
import remarkImgToJsx from './remark-img-to-jsx'
|
||||
// Rehype packages
|
||||
import rehypeSlug from 'rehype-slug'
|
||||
import rehypeAutolinkHeadings from 'rehype-autolink-headings'
|
||||
import rehypeKatex from 'rehype-katex'
|
||||
import rehypeCitation from 'rehype-citation'
|
||||
import rehypePrismPlus from 'rehype-prism-plus'
|
||||
import rehypePresetMinify from 'rehype-preset-minify'
|
||||
|
||||
const root = process.cwd()
|
||||
|
||||
export function getFiles(type: 'blog' | 'authors') {
|
||||
const prefixPaths = path.join(root, 'data', type)
|
||||
const files = getAllFilesRecursively(prefixPaths)
|
||||
// Only want to return blog/path and ignore root, replace is needed to work on Windows
|
||||
return files.map((file) => file.slice(prefixPaths.length + 1).replace(/\\/g, '/'))
|
||||
}
|
||||
|
||||
export function formatSlug(slug: string) {
|
||||
return slug.replace(/\.(mdx|md)/, '')
|
||||
}
|
||||
|
||||
export function dateSortDesc(a: string, b: string) {
|
||||
if (a > b) return -1
|
||||
if (a < b) return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
export async function getFileBySlug<T>(type: 'authors' | 'blog', slug: string | string[]) {
|
||||
const mdxPath = path.join(root, 'data', type, `${slug}.mdx`)
|
||||
const mdPath = path.join(root, 'data', type, `${slug}.md`)
|
||||
const source = fs.existsSync(mdxPath)
|
||||
? fs.readFileSync(mdxPath, 'utf8')
|
||||
: fs.readFileSync(mdPath, 'utf8')
|
||||
|
||||
// https://github.com/kentcdodds/mdx-bundler#nextjs-esbuild-enoent
|
||||
if (process.platform === 'win32') {
|
||||
process.env.ESBUILD_BINARY_PATH = path.join(root, 'node_modules', 'esbuild', 'esbuild.exe')
|
||||
} else {
|
||||
process.env.ESBUILD_BINARY_PATH = path.join(root, 'node_modules', 'esbuild', 'bin', 'esbuild')
|
||||
}
|
||||
|
||||
const toc: Toc = []
|
||||
|
||||
const { code, frontmatter } = await bundleMDX({
|
||||
source,
|
||||
// mdx imports can be automatically source from the components directory
|
||||
cwd: path.join(root, 'components'),
|
||||
xdmOptions(options, frontmatter) {
|
||||
// this is the recommended way to add custom remark/rehype plugins:
|
||||
// The syntax might look weird, but it protects you in case we add/remove
|
||||
// plugins in the future.
|
||||
options.remarkPlugins = [
|
||||
...(options.remarkPlugins ?? []),
|
||||
remarkExtractFrontmatter,
|
||||
[remarkTocHeadings, { exportRef: toc }],
|
||||
remarkGfm,
|
||||
remarkCodeTitles,
|
||||
[remarkFootnotes, { inlineNotes: true }],
|
||||
remarkMath,
|
||||
remarkImgToJsx,
|
||||
]
|
||||
options.rehypePlugins = [
|
||||
...(options.rehypePlugins ?? []),
|
||||
rehypeSlug,
|
||||
rehypeAutolinkHeadings,
|
||||
rehypeKatex,
|
||||
[rehypeCitation, { path: path.join(root, 'data') }],
|
||||
[rehypePrismPlus, { ignoreMissing: true }],
|
||||
rehypePresetMinify,
|
||||
]
|
||||
return options
|
||||
},
|
||||
esbuildOptions: (options) => {
|
||||
options.loader = {
|
||||
...options.loader,
|
||||
'.js': 'jsx',
|
||||
}
|
||||
return options
|
||||
},
|
||||
})
|
||||
|
||||
return {
|
||||
mdxSource: code,
|
||||
toc,
|
||||
frontMatter: {
|
||||
readingTime: readingTime(code),
|
||||
slug: slug || null,
|
||||
fileName: fs.existsSync(mdxPath) ? `${slug}.mdx` : `${slug}.md`,
|
||||
...frontmatter,
|
||||
date: frontmatter.date ? new Date(frontmatter.date).toISOString() : null,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAllFilesFrontMatter(folder: 'blog') {
|
||||
const prefixPaths = path.join(root, 'data', folder)
|
||||
|
||||
const files = getAllFilesRecursively(prefixPaths)
|
||||
|
||||
const allFrontMatter: PostFrontMatter[] = []
|
||||
|
||||
files.forEach((file: string) => {
|
||||
// Replace is needed to work on Windows
|
||||
const fileName = file.slice(prefixPaths.length + 1).replace(/\\/g, '/')
|
||||
// Remove Unexpected File
|
||||
if (path.extname(fileName) !== '.md' && path.extname(fileName) !== '.mdx') {
|
||||
return
|
||||
}
|
||||
const source = fs.readFileSync(file, 'utf8')
|
||||
const matterFile = matter(source)
|
||||
const frontmatter = matterFile.data as AuthorFrontMatter | PostFrontMatter
|
||||
if ('draft' in frontmatter && frontmatter.draft !== true) {
|
||||
allFrontMatter.push({
|
||||
...frontmatter,
|
||||
slug: formatSlug(fileName),
|
||||
date: frontmatter.date ? new Date(frontmatter.date).toISOString() : null,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return allFrontMatter.sort((a, b) => dateSortDesc(a.date, b.date))
|
||||
}
|
33
lib/remark-code-title.ts
Normal file
33
lib/remark-code-title.ts
Normal file
|
@ -0,0 +1,33 @@
|
|||
import { Parent } from 'unist'
|
||||
import { visit } from 'unist-util-visit'
|
||||
|
||||
export default function remarkCodeTitles() {
|
||||
return (tree: Parent & { lang?: string }) =>
|
||||
visit(tree, 'code', (node: Parent & { lang?: string }, index, parent: Parent) => {
|
||||
const nodeLang = node.lang || ''
|
||||
let language = ''
|
||||
let title = ''
|
||||
|
||||
if (nodeLang.includes(':')) {
|
||||
language = nodeLang.slice(0, nodeLang.search(':'))
|
||||
title = nodeLang.slice(nodeLang.search(':') + 1, nodeLang.length)
|
||||
}
|
||||
|
||||
if (!title) {
|
||||
return
|
||||
}
|
||||
|
||||
const className = 'remark-code-title'
|
||||
|
||||
const titleNode = {
|
||||
type: 'mdxJsxFlowElement',
|
||||
name: 'div',
|
||||
attributes: [{ type: 'mdxJsxAttribute', name: 'className', value: className }],
|
||||
children: [{ type: 'text', value: title }],
|
||||
data: { _xdmExplicitJsx: true },
|
||||
}
|
||||
|
||||
parent.children.splice(index, 0, titleNode)
|
||||
node.lang = language
|
||||
})
|
||||
}
|
13
lib/remark-extract-frontmatter.ts
Normal file
13
lib/remark-extract-frontmatter.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
import { Parent } from 'unist'
|
||||
import { VFile } from 'vfile'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import { load } from 'js-yaml'
|
||||
|
||||
export default function extractFrontmatter() {
|
||||
return (tree: Parent, file: VFile) => {
|
||||
visit(tree, 'yaml', (node: Parent) => {
|
||||
//@ts-ignore
|
||||
file.data.frontmatter = load(node.value)
|
||||
})
|
||||
}
|
||||
}
|
44
lib/remark-img-to-jsx.ts
Normal file
44
lib/remark-img-to-jsx.ts
Normal file
|
@ -0,0 +1,44 @@
|
|||
import { Parent, Node, Literal } from 'unist'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import sizeOf from 'image-size'
|
||||
import fs from 'fs'
|
||||
|
||||
type ImageNode = Parent & {
|
||||
url: string
|
||||
alt: string
|
||||
name: string
|
||||
attributes: (Literal & { name: string })[]
|
||||
}
|
||||
|
||||
export default function remarkImgToJsx() {
|
||||
return (tree: Node) => {
|
||||
visit(
|
||||
tree,
|
||||
// only visit p tags that contain an img element
|
||||
(node: Parent): node is Parent =>
|
||||
node.type === 'paragraph' && node.children.some((n) => n.type === 'image'),
|
||||
(node: Parent) => {
|
||||
const imageNode = node.children.find((n) => n.type === 'image') as ImageNode
|
||||
|
||||
// only local files
|
||||
if (fs.existsSync(`${process.cwd()}/public${imageNode.url}`)) {
|
||||
const dimensions = sizeOf(`${process.cwd()}/public${imageNode.url}`)
|
||||
|
||||
// Convert original node to next/image
|
||||
;(imageNode.type = 'mdxJsxFlowElement'),
|
||||
(imageNode.name = 'Image'),
|
||||
(imageNode.attributes = [
|
||||
{ type: 'mdxJsxAttribute', name: 'alt', value: imageNode.alt },
|
||||
{ type: 'mdxJsxAttribute', name: 'src', value: imageNode.url },
|
||||
{ type: 'mdxJsxAttribute', name: 'width', value: dimensions.width },
|
||||
{ type: 'mdxJsxAttribute', name: 'height', value: dimensions.height },
|
||||
])
|
||||
|
||||
// Change node type from p to div to avoid nesting error
|
||||
node.type = 'div'
|
||||
node.children = [imageNode]
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
17
lib/remark-toc-headings.ts
Normal file
17
lib/remark-toc-headings.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
//@ts-nocheck
|
||||
import { Parent } from 'unist'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import { slug } from 'github-slugger'
|
||||
import { toString } from 'mdast-util-to-string'
|
||||
|
||||
export default function remarkTocHeadings(options) {
|
||||
return (tree: Parent) =>
|
||||
visit(tree, 'heading', (node) => {
|
||||
const textContent = toString(node)
|
||||
options.exportRef.push({
|
||||
value: textContent,
|
||||
url: '#' + slug(textContent),
|
||||
depth: node.depth,
|
||||
})
|
||||
})
|
||||
}
|
32
lib/tags.ts
Normal file
32
lib/tags.ts
Normal file
|
@ -0,0 +1,32 @@
|
|||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
import fs from 'fs'
|
||||
import matter from 'gray-matter'
|
||||
import path from 'path'
|
||||
import { getFiles } from './mdx'
|
||||
import kebabCase from './utils/kebabCase'
|
||||
|
||||
const root = process.cwd()
|
||||
|
||||
export async function getAllTags(type: 'blog' | 'authors') {
|
||||
const files = getFiles(type)
|
||||
|
||||
const tagCount: Record<string, number> = {}
|
||||
// Iterate through each post, putting all found tags into `tags`
|
||||
files.forEach((file) => {
|
||||
const source = fs.readFileSync(path.join(root, 'data', type, file), 'utf8')
|
||||
const matterFile = matter(source)
|
||||
const data = matterFile.data as PostFrontMatter
|
||||
if (data.tags && data.draft !== true) {
|
||||
data.tags.forEach((tag) => {
|
||||
const formattedTag = kebabCase(tag)
|
||||
if (formattedTag in tagCount) {
|
||||
tagCount[formattedTag] += 1
|
||||
} else {
|
||||
tagCount[formattedTag] = 1
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return tagCount
|
||||
}
|
23
lib/utils/files.ts
Normal file
23
lib/utils/files.ts
Normal file
|
@ -0,0 +1,23 @@
|
|||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
const pipe =
|
||||
(...fns) =>
|
||||
(x) =>
|
||||
fns.reduce((v, f) => f(v), x)
|
||||
|
||||
const flattenArray = (input) =>
|
||||
input.reduce((acc, item) => [...acc, ...(Array.isArray(item) ? item : [item])], [])
|
||||
|
||||
const map = (fn) => (input) => input.map(fn)
|
||||
|
||||
const walkDir = (fullPath: string) => {
|
||||
return fs.statSync(fullPath).isFile() ? fullPath : getAllFilesRecursively(fullPath)
|
||||
}
|
||||
|
||||
const pathJoinPrefix = (prefix: string) => (extraPath: string) => path.join(prefix, extraPath)
|
||||
|
||||
const getAllFilesRecursively = (folder: string): string[] =>
|
||||
pipe(fs.readdirSync, map(pipe(pathJoinPrefix(folder), walkDir)), flattenArray)(folder)
|
||||
|
||||
export default getAllFilesRecursively
|
14
lib/utils/formatDate.ts
Normal file
14
lib/utils/formatDate.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
import siteMetadata from '@/data/siteMetadata'
|
||||
|
||||
const formatDate = (date: string) => {
|
||||
const options: Intl.DateTimeFormatOptions = {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
}
|
||||
const now = new Date(date).toLocaleDateString(siteMetadata.locale, options)
|
||||
|
||||
return now
|
||||
}
|
||||
|
||||
export default formatDate
|
22
lib/utils/htmlEscaper.ts
Normal file
22
lib/utils/htmlEscaper.ts
Normal file
|
@ -0,0 +1,22 @@
|
|||
const { replace } = ''
|
||||
|
||||
// escape
|
||||
const ca = /[&<>'"]/g
|
||||
|
||||
const esca = {
|
||||
'&': '&',
|
||||
'<': '<',
|
||||
'>': '>',
|
||||
"'": ''',
|
||||
'"': '"',
|
||||
}
|
||||
const pe = (m: keyof typeof esca) => esca[m]
|
||||
|
||||
/**
|
||||
* Safely escape HTML entities such as `&`, `<`, `>`, `"`, and `'`.
|
||||
* @param {string} es the input to safely escape
|
||||
* @returns {string} the escaped input, and it **throws** an error if
|
||||
* the input type is unexpected, except for boolean and numbers,
|
||||
* converted as string.
|
||||
*/
|
||||
export const escape = (es: string): string => replace.call(es, ca, pe)
|
5
lib/utils/kebabCase.ts
Normal file
5
lib/utils/kebabCase.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
import { slug } from 'github-slugger'
|
||||
|
||||
const kebabCase = (str: string) => slug(str)
|
||||
|
||||
export default kebabCase
|
Loading…
Add table
Add a link
Reference in a new issue