chore: deduplicate permalinks; fix sitemap; organize

This commit is contained in:
Cory Dransfeldt 2024-07-15 22:52:57 -07:00
parent b40986256a
commit ae1cb3dc5e
No known key found for this signature in database
64 changed files with 94 additions and 31 deletions

45
src/data/albumReleases.js Normal file
View file

@ -0,0 +1,45 @@
import { createClient } from '@supabase/supabase-js'
import { DateTime } from 'luxon'
import { sanitizeMediaString, parseCountryField } from '../../config/utilities/index.js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchAlbumReleases = async () => {
const today = DateTime.utc().toISO()
const { data, error } = await supabase
.from('albums')
.select(`
name,
key,
release_date,
release_link,
total_plays,
art(filename_disk),
artists(name_string, mbid, country)
`)
.gt('release_date', today)
if (error) {
console.error('Error fetching data:', error)
return
}
return data.filter(album => !album['total_plays'] || !album['total_plays'] > 0).map(album => ({
artist: album['artists']['name_string'],
title: album['name'],
date: DateTime.fromISO(album['release_date']).toLocaleString(DateTime.DATE_FULL),
url: album['release_link'],
image: `/${album?.['art']?.['filename_disk']}` || '',
artist_url: `/music/artists/${sanitizeMediaString(album['artists']['name_string'])}-${sanitizeMediaString(parseCountryField(album['artists']['country']))}`,
mbid: album['artists']['mbid'],
timestamp: DateTime.fromISO(album['release_date']).toSeconds(),
type: 'album-release'
}
)).sort((a, b) => a['timestamp'] - b['timestamp'])
}
export default async function () {
return await fetchAlbumReleases()
}

18
src/data/analytics.js Normal file
View file

@ -0,0 +1,18 @@
import EleventyFetch from '@11ty/eleventy-fetch'
export default async function () {
const API_KEY_PLAUSIBLE = process.env.API_KEY_PLAUSIBLE
const url =
'https://plausible.io/api/v1/stats/breakdown?site_id=coryd.dev&period=6mo&property=event:page&limit=30'
const res = EleventyFetch(url, {
duration: '1h',
type: 'json',
fetchOptions: {
headers: {
Authorization: `Bearer ${API_KEY_PLAUSIBLE}`,
},
},
}).catch()
const pages = await res
return pages['results'].filter((p) => p['page'].includes('posts'))
}

79
src/data/artists.js Normal file
View file

@ -0,0 +1,79 @@
import { createClient } from '@supabase/supabase-js'
import { sanitizeMediaString, parseCountryField } from '../../config/utilities/index.js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 50
const fetchPaginatedData = async (table, selectFields) => {
let data = []
let page = 0
let hasMoreRecords = true
while (hasMoreRecords) {
const { data: pageData, error } = await supabase
.from(table)
.select(selectFields)
.order('id', { ascending: true })
.range(page * PAGE_SIZE, (page + 1) * PAGE_SIZE - 1)
if (error) {
console.error(`Error fetching ${table}:`, error)
break
}
data = data.concat(pageData)
if (pageData.length < PAGE_SIZE) {
hasMoreRecords = false
} else {
page++
}
}
return data
}
const fetchGenreMapping = async () => {
const { data, error } = await supabase
.from('genres')
.select('id, name')
if (error) {
console.error('Error fetching genres:', error)
return {}
}
return data.reduce((acc, genre) => {
acc[genre['id']] = genre['name']
return acc
}, {})
}
export default async function () {
const genreMapping = await fetchGenreMapping()
const artists = await fetchPaginatedData('artists', 'id, mbid, name_string, art(filename_disk), total_plays, country, description, favorite, tattoo, genres')
const albums = await fetchPaginatedData('albums', 'mbid, name, release_year, total_plays, artist')
const albumsByArtist = albums.reduce((acc, album) => {
if (!acc[album['artist']]) acc[album['artist']] = []
acc[album['artist']].push({
id: album['id'],
name: album['name'],
release_year: album['release_year'],
total_plays: album['total_plays'] > 0 ? album['total_plays'] : '-'
})
return acc
}, {})
for (const artist of artists) {
artist['albums'] = albumsByArtist[artist['id']]?.sort((a, b) => a['release_year'] - b['release_year']) || []
artist['image'] = `/${artist['art']['filename_disk']}`
artist['country'] = parseCountryField(artist['country'])
artist['genres'] = genreMapping[artist['genres']] || ''
artist['url'] = `/music/artists/${sanitizeMediaString(artist['name_string'])}-${sanitizeMediaString(artist['country'])}`
}
return artists
}

30
src/data/badges.js Normal file
View file

@ -0,0 +1,30 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchAllBadges = async () => {
const { data, error } = await supabase
.from('badges')
.select(`
*,
image(filename_disk)
`)
if (error) {
console.error('Error fetching badge data:', error)
return null
}
const transformedData = data.map(badge => ({
...badge,
image: badge['image']['filename_disk'],
}))
return transformedData
}
export default async function () {
return await fetchAllBadges()
}

24
src/data/blogroll.js Normal file
View file

@ -0,0 +1,24 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchBlogroll = async () => {
const { data, error } = await supabase
.from('authors')
.select('*')
.eq('blogroll', true)
.order('name', { ascending: true })
if (error) {
console.error('Error fetching authors with for the blogroll:', error)
return []
}
return data.sort((a, b) => a['name'].toLowerCase().localeCompare(b['name'].toLowerCase()))
}
export default async function () {
return await fetchBlogroll()
}

87
src/data/books.js Normal file
View file

@ -0,0 +1,87 @@
import { createClient } from '@supabase/supabase-js'
const { SUPABASE_URL, SUPABASE_KEY } = process.env
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 1000
const fetchTagsForBook = async (bookId) => {
const { data, error } = await supabase
.from('books_tags')
.select('tags(id, name)')
.eq('books_id', bookId)
if (error) {
console.error(`Error fetching tags for book ${bookId}:`, error)
return []
}
return data.map(bt => bt['tags']['name'])
}
async function fetchAllBooks() {
let books = []
let from = 0
while (true) {
const { data, error } = await supabase
.from('books')
.select(`*, art(filename_disk)`)
.range(from, from + PAGE_SIZE - 1)
if (error) {
console.error('Error fetching data from Supabase:', error)
break
}
for (const book of data) {
book['tags'] = await fetchTagsForBook(book['id'])
}
books = books.concat(data)
if (data.length < PAGE_SIZE) break
from += PAGE_SIZE
}
return books.map(book => {
const dateFinished = new Date(book['date_finished'])
const year = dateFinished.getUTCFullYear()
return {
title: book['title'],
author: book['author'] || '',
review: book['review'],
rating: book['star_rating'] !== 'unrated' ? book['star_rating'] : '',
favorite: book['favorite'],
description: book['description'],
image: `/${book?.['art']?.['filename_disk']}`,
url: `/books/${book['isbn']}`,
date: book['date_finished'],
status: book['read_status'],
progress: book['progress'],
tags: book['tags'],
isbn: book['isbn'],
type: 'book',
year,
}
})
}
const sortBooksByYear = (books) => {
const years = {}
books.forEach(book => {
const year = book['year']
if (!years[year]) {
years[year] = { value: year, data: [book] }
} else {
years[year]['data'].push(book)
}
})
return Object.values(years).filter(year => year.value > 2019)
}
export default async function () {
const books = await fetchAllBooks()
return { all: books, years: sortBooksByYear(books) }
}

46
src/data/genres.js Normal file
View file

@ -0,0 +1,46 @@
import { createClient } from '@supabase/supabase-js'
import slugify from 'slugify'
import { parseCountryField } from '../../config/utilities/index.js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchGenresWithArtists = async () => {
const { data, error } = await supabase
.from('genres')
.select(`
name,
description,
total_plays,
wiki_link,
artists (
mbid,
name_string,
total_plays,
country,
description,
favorite
)
`)
.order('id', { ascending: true })
if (error) {
console.error('Error fetching genres with artists:', error)
return []
}
data.forEach(genre => {
genre['artists'] = genre['artists'].map(artist => ({
...artist,
country: parseCountryField(artist['country'])
}))
genre['url'] = `/music/genres/${slugify(genre['name'].toLowerCase())}`
})
return data
}
export default async function () {
return await fetchGenresWithArtists()
}

48
src/data/globals.js Normal file
View file

@ -0,0 +1,48 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env['SUPABASE_URL']
const SUPABASE_KEY = process.env['SUPABASE_KEY']
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchGlobals = async () => {
const { data, error } = await supabase
.from('globals')
.select(`
*,
favicon_ico(filename_disk),
favicon_svg(filename_disk),
opengraph_default(filename_disk),
feed_image(filename_disk),
apple_touch_icon(filename_disk),
about(filename_disk),
logo_the_claw(filename_disk)
`)
if (error) {
console.error('Error fetching globals:', error)
return {}
}
const globalData = data.pop()
const keysToProcess = [
'favicon_ico',
'favicon_svg',
'opengraph_default',
'feed_image',
'apple_touch_icon',
'about',
'logo_the_claw'
]
keysToProcess.forEach(key => {
if (globalData[key] && globalData[key].filename_disk) {
globalData[key] = globalData[key].filename_disk
}
})
return globalData
}
export default async function () {
return await fetchGlobals()
}

56
src/data/links.js Normal file
View file

@ -0,0 +1,56 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 50
const fetchTagsForLink = async (linkId) => {
const { data, error } = await supabase
.from('links_tags')
.select('tags(id, name)')
.eq('links_id', linkId)
if (error) {
console.error(`Error fetching tags for link ${linkId}:`, error)
return []
}
return data.map((lt) => lt.tags.name)
}
const fetchAllLinks = async () => {
let links = []
let page = 0
let fetchMore = true
while (fetchMore) {
const { data, error } = await supabase
.from('links')
.select('*, authors (name, url, mastodon)')
.order('date', { ascending: false })
.range(page * PAGE_SIZE, (page + 1) * PAGE_SIZE - 1)
if (error) {
console.error('Error fetching links:', error)
return links
}
if (data.length < PAGE_SIZE) fetchMore = false
for (const link of data) {
link['tags'] = await fetchTagsForLink(link.id)
link['type'] = 'link'
}
links = links.concat(data)
page++
}
return links
}
export default async function () {
return await fetchAllLinks()
}

103
src/data/movies.js Normal file
View file

@ -0,0 +1,103 @@
import { createClient } from '@supabase/supabase-js'
import { DateTime } from 'luxon'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 1000
const fetchTagsForMovie = async (movieId) => {
const { data, error } = await supabase
.from('movies_tags')
.select('tags(id, name)')
.eq('movies_id', movieId)
if (error) {
console.error(`Error fetching tags for movie ${movieId}:`, error)
return []
}
return data.map(mt => mt.tags.name)
}
const fetchAllMovies = async () => {
let movies = []
let rangeStart = 0
while (true) {
const { data, error } = await supabase
.from('movies')
.select(`
id,
tmdb_id,
last_watched,
title,
year,
collected,
plays,
favorite,
star_rating,
description,
review,
art(filename_disk),
backdrop(filename_disk)
`)
.order('last_watched', { ascending: false })
.range(rangeStart, rangeStart + PAGE_SIZE - 1)
if (error) {
console.error(error)
break
}
for (const movie of data) {
movie.tags = await fetchTagsForMovie(movie.id)
}
movies = movies.concat(data)
if (data.length < PAGE_SIZE) break
rangeStart += PAGE_SIZE
}
return movies
}
export default async function () {
const year = DateTime.now().year
const movies = await fetchAllMovies()
const formatMovieData = (movies, watched = true) => movies.map((item) => {
const movie = {
title: item['title'],
lastWatched: item['last_watched'],
dateAdded: item['last_watched'],
year: item['year'],
url: `/watching/movies/${item['tmdb_id']}`,
description: `${item['title']} (${item['year']})<br/>Watched at: ${DateTime.fromISO(item['last_watched'], { zone: 'utc' }).setZone('America/Los_Angeles').toFormat('MMMM d, yyyy, h:mma')}`,
image: `/${item?.['art']?.['filename_disk']}`,
backdrop: `/${item?.['backdrop']?.['filename_disk']}`,
plays: item['plays'],
collected: item['collected'],
favorite: item['favorite'],
rating: item['star_rating'],
description: item['description'],
review: item['review'],
id: item['tmdb_id'],
type: 'movie',
tags: item['tags']
}
return movie
}).filter(movie => watched ? movie['lastWatched'] : !movie['lastWatched'])
const favoriteMovies = movies.filter(movie => movie['favorite'])
const collectedMovies = movies.filter(movie => movie['collected'])
const recentlyWatchedMovies = movies.filter(movie => movie['last_watched'] && year - DateTime.fromISO(movie['last_watched']).year <= 3).sort((a, b) => new Date(b['last_watched']) - new Date(a['last_watched']))
return {
movies: [...formatMovieData(movies), ...formatMovieData(movies, false)],
watchHistory: formatMovieData(movies),
recentlyWatched: formatMovieData(recentlyWatchedMovies),
favorites: formatMovieData(favoriteMovies).sort((a, b) => a['title'].localeCompare(b['title'])),
collection: formatMovieData(collectedMovies),
}
}

151
src/data/music.js Normal file
View file

@ -0,0 +1,151 @@
import { createClient } from '@supabase/supabase-js'
import { DateTime } from 'luxon'
import { sanitizeMediaString, parseCountryField } from '../../config/utilities/index.js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchDataForPeriod = async (startPeriod, fields, table) => {
const PAGE_SIZE = 1000
let rows = []
let rangeStart = 0
while (true) {
const { data, error } = await supabase
.from(table)
.select(fields)
.order('listened_at', { ascending: false })
.gte('listened_at', startPeriod.toSeconds())
.range(rangeStart, rangeStart + PAGE_SIZE - 1)
if (error) {
console.error(error)
break
}
rows = rows.concat(data)
if (data.length < PAGE_SIZE) break
rangeStart += PAGE_SIZE
}
return rows
}
const fetchGenreMapping = async () => {
const { data, error } = await supabase
.from('genres')
.select('id, name')
if (error) {
console.error('Error fetching genres:', error)
return {}
}
return data.reduce((acc, genre) => {
acc[genre.id] = genre.name
return acc
}, {})
}
const aggregateData = async (data, groupByField, groupByType) => {
const aggregation = {}
const genreMapping = await fetchGenreMapping()
data.forEach(item => {
const key = item[groupByField]
if (!aggregation[key]) {
if (groupByType === 'track') {
aggregation[key] = {
title: item[groupByField],
plays: 0,
mbid: item['albums']['mbid'],
url: `/music/artists/${sanitizeMediaString(item['artist_name'])}-${sanitizeMediaString(parseCountryField(item['artists']['country']))}`,
image: `/${item['albums']?.['art']?.['filename_disk']}` || '',
timestamp: item['listened_at'],
type: groupByType,
genre: genreMapping[item['artists']['genres']] || ''
}
} else {
aggregation[key] = {
title: item[groupByField],
plays: 0,
mbid: item[groupByType]?.['mbid'] || '',
url: `/music/artists/${sanitizeMediaString(item['artist_name'])}-${sanitizeMediaString(parseCountryField(item['artists']['country']))}`,
image: `/${item[groupByType]?.['art']?.['filename_disk']}` || '',
type: groupByType,
genre: genreMapping[item['artists']['genres']] || ''
}
}
if (groupByType === 'track' || groupByType === 'albums') aggregation[key]['artist'] = item['artist_name']
}
aggregation[key].plays++
})
const aggregatedData = Object.values(aggregation).sort((a, b) => b.plays - a.plays)
aggregatedData.forEach((item, index) => {
item.rank = index + 1
})
return aggregatedData.filter(item => item.plays > 0)
}
const buildRecents = async (data) => {
return data.map(listen => ({
title: listen['track_name'],
artist: listen['artist_name'],
url: `/music/artists/${sanitizeMediaString(listen['artist_name'])}-${sanitizeMediaString(parseCountryField(listen['artists']['country']))}`,
timestamp: listen['listened_at'],
image: `/${listen['albums']?.['art']?.['filename_disk']}` || ''
}))
}
const aggregateGenres = async (data) => {
const genreAggregation = {}
const genreMapping = await fetchGenreMapping()
data.forEach(item => {
const genre = genreMapping[item['artists']['genres']] || ''
if (!genreAggregation[genre]) genreAggregation[genre] = { genre, plays: 0 }
genreAggregation[genre]['plays']++
})
return Object.values(genreAggregation).sort((a, b) => b['plays'] - a['plays'])
}
export default async function() {
const periods = {
week: DateTime.now().minus({ days: 7 }).startOf('day'), // last week
month: DateTime.now().minus({ days: 30 }).startOf('day'), // last 30 days
threeMonth: DateTime.now().minus({ months: 3 }).startOf('day'), // last three months
}
const results = {}
const selectFields = `
track_name,
artist_name,
album_name,
album_key,
listened_at,
artists (mbid, art(filename_disk), genres, country),
albums (mbid, art(filename_disk))
`
for (const [period, startPeriod] of Object.entries(periods)) {
const periodData = await fetchDataForPeriod(startPeriod, selectFields, 'listens')
results[period] = {
artists: await aggregateData(periodData, 'artist_name', 'artists'),
albums: await aggregateData(periodData, 'album_name', 'albums'),
tracks: await aggregateData(periodData, 'track_name', 'track'),
genres: await aggregateGenres(periodData),
totalTracks: periodData?.length?.toLocaleString('en-US')
}
}
const recentData = await fetchDataForPeriod(DateTime.now().minus({ days: 7 }), selectFields, 'listens')
results['recent'] = (await buildRecents(recentData)).sort((a, b) => b.timestamp - a.timestamp)
return results
}

50
src/data/nav.js Normal file
View file

@ -0,0 +1,50 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const fetchAllNavigation = async () => {
const { data, error } = await supabase
.from('navigation')
.select(`
*,
pages(title, permalink)
`)
if (error) {
console.error('Error fetching navigation data:', error)
return null
}
const menu = {}
data.forEach(item => {
const menuItem = item.pages ? {
title: item.pages.title,
permalink: item.pages.permalink,
icon: item.icon,
sort: item.sort
} : {
title: item.title,
permalink: item.permalink,
icon: item.icon,
sort: item.sort
}
if (!menu[item.menu_location]) {
menu[item.menu_location] = [menuItem]
} else {
menu[item.menu_location].push(menuItem)
}
})
Object.keys(menu).forEach(location => {
menu[location].sort((a, b) => a.sort - b.sort)
})
return menu
}
export default async function () {
return await fetchAllNavigation()
}

83
src/data/pages.js Normal file
View file

@ -0,0 +1,83 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 50
const fetchBlockData = async (collection, itemId) => {
const { data, error } = await supabase
.from(collection)
.select(collection === 'hero' ? '*, image(filename_disk)' : '*')
.eq('id', itemId)
.single()
if (error) {
console.error(`Error fetching data from ${collection} for item ${itemId}:`, error)
return null
}
return data
}
const fetchBlocksForPage = async (pageId) => {
const { data, error } = await supabase
.from('pages_blocks')
.select('collection, item, sort')
.eq('pages_id', pageId)
if (error) {
console.error(`Error fetching blocks for page ${pageId}:`, error)
return []
}
const blocks = await Promise.all(data.map(async block => {
const blockData = await fetchBlockData(block.collection, block.item)
return {
type: block['collection'],
sort: block['sort'],
...blockData
}
}))
return blocks.sort((a, b) => a.sort - b.sort)
}
const fetchAllPages = async () => {
let pages = []
let page = 0
let fetchMore = true
while (fetchMore) {
const { data, error } = await supabase
.from('pages')
.select(`
*,
open_graph_image(filename_disk)
`)
.range(page * PAGE_SIZE, (page + 1) * PAGE_SIZE - 1)
if (error) {
console.error('Error fetching pages:', error)
return pages
}
if (data.length < PAGE_SIZE) fetchMore = false
for (const page of data) {
page['blocks'] = await fetchBlocksForPage(page['id'])
if (page['open_graph_image']) page['open_graph_image'] = page['open_graph_image']['filename_disk']
pages.push(page)
}
page++
}
return pages
}
export default async function () {
return await fetchAllPages()
}

102
src/data/posts.js Normal file
View file

@ -0,0 +1,102 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 50
const fetchBlockData = async (collection, itemId) => {
const { data, error } = await supabase
.from(collection)
.select('*')
.eq('id', itemId)
.single()
if (error) {
console.error(`Error fetching data from ${collection} for item ${itemId}:`, error)
return null
}
return data
}
const fetchTagsForPost = async (postId) => {
const { data, error } = await supabase
.from('posts_tags')
.select('tags(id, name)')
.eq('posts_id', postId)
if (error) {
console.error(`Error fetching tags for post ${postId}:`, error)
return []
}
return data.map(pt => pt.tags.name)
}
const fetchBlocksForPost = async (postId) => {
const { data, error } = await supabase
.from('posts_blocks')
.select('collection, item, sort')
.eq('posts_id', postId)
if (error) {
console.error(`Error fetching blocks for post ${postId}:`, error)
return []
}
const blocks = await Promise.all(data.map(async block => {
const blockData = await fetchBlockData(block.collection, block.item)
return {
type: block['collection'],
sort: block['sort'],
...blockData
}
}))
return blocks
}
const fetchAllPosts = async () => {
let posts = []
let page = 0
let fetchMore = true
const uniqueSlugs = new Set()
while (fetchMore) {
const { data, error } = await supabase
.from('posts')
.select(`
*,
image(filename_disk)
`)
.order('date', { ascending: false })
.range(page * PAGE_SIZE, (page + 1) * PAGE_SIZE - 1)
if (error) {
console.error('Error fetching posts:', error)
return posts
}
if (data.length < PAGE_SIZE) fetchMore = false
for (const post of data) {
if (uniqueSlugs.has(post['slug'])) continue
uniqueSlugs.add(post.slug)
post['tags'] = await fetchTagsForPost(post['id'])
post['blocks'] = await fetchBlocksForPost(post['id'])
if (post?.['image']?.['filename_disk']) post['image'] = post['image']['filename_disk']
posts.push(post)
}
page++
}
return posts
}
export default async function () {
return await fetchAllPosts()
}

34
src/data/robots.js Normal file
View file

@ -0,0 +1,34 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env.SUPABASE_URL
const SUPABASE_KEY = process.env.SUPABASE_KEY
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 100
const fetchAllRobots = async () => {
let robots = []
let from = 0
let to = PAGE_SIZE - 1
while (true) {
const { data, error } = await supabase
.from('robots')
.select('user_agent')
.range(from, to)
if (error) {
console.error('Error fetching robot data:', error)
return null
}
robots = robots.concat(data)
if (data.length < PAGE_SIZE) break
}
return robots.map(robot => robot['user_agent'])
}
export default async function () {
return await fetchAllRobots()
}

12
src/data/status.js Normal file
View file

@ -0,0 +1,12 @@
import EleventyFetch from '@11ty/eleventy-fetch'
export default async function () {
const url = 'https://api.omg.lol/address/cory/statuses/'
const res = EleventyFetch(url, {
duration: '1h',
type: 'json',
}).catch()
const status = await res
return status['response']['statuses'][0]
}

165
src/data/tv.js Normal file
View file

@ -0,0 +1,165 @@
import { createClient } from '@supabase/supabase-js'
const SUPABASE_URL = process.env['SUPABASE_URL']
const SUPABASE_KEY = process.env['SUPABASE_KEY']
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY)
const PAGE_SIZE = 1000
const fetchAllShows = async () => {
let shows = []
let rangeStart = 0
while (true) {
const { data, error } = await supabase
.from('shows')
.select(`
title,
tmdb_id,
collected,
favorite,
year,
description,
review,
art(filename_disk),
backdrop(filename_disk),
episodes (
episode_number,
season_number,
last_watched_at
)
`)
.range(rangeStart, rangeStart + PAGE_SIZE - 1)
if (error) {
console.error(error)
break
}
shows = shows.concat(data)
if (data.length < PAGE_SIZE) break
rangeStart += PAGE_SIZE
}
return shows
}
const prepareShowData = (show) => {
return {
...show,
image: show['art']?.['filename_disk'] ? `/${show['art']['filename_disk']}` : '',
backdrop: show['backdrop']?.['filename_disk'] ? `/${show['backdrop']['filename_disk']}` : '',
url: `/watching/shows/${show['tmdb_id']}`,
}
}
const prepareEpisodeData = (show) => {
return show['episodes'].map(episode => ({
...episode,
show_title: show['title'],
show_tmdb_id: show['tmdb_id'],
collected: show['collected'],
favorite: show['favorite'],
image: show['image'],
backdrop: show['backdrop']
}))
}
export default async function () {
const rawShows = await fetchAllShows()
const shows = rawShows.map(prepareShowData)
const episodes = shows.flatMap(prepareEpisodeData)
episodes.sort((a, b) => new Date(b.last_watched_at) - new Date(a.last_watched_at))
const formatEpisodeData = (episodes) => {
const showEpisodesMap = {}
episodes.forEach(episode => {
const showTitle = episode['show_title']
const showTmdbId = episode['show_tmdb_id']
const episodeNumber = episode['episode_number']
const seasonNumber = episode['season_number']
const lastWatchedAt = episode['last_watched_at']
const collected = episode['collected']
const favorite = episode['favorite']
const image = episode['image']
const backdrop = episode['backdrop']
if (!showEpisodesMap[showTmdbId]) {
showEpisodesMap[showTmdbId] = {
title: showTitle,
tmdbId: showTmdbId,
collected,
favorite,
dateAdded: lastWatchedAt,
lastWatchedAt,
episodes: [],
image,
backdrop
}
}
showEpisodesMap[showTmdbId].episodes.push({
name: showTitle,
url: `/watching/shows/${showTmdbId}`,
subtext: `S${seasonNumber}E${episodeNumber}`,
episode: episodeNumber,
season: seasonNumber,
tmdbId: showTmdbId,
type: 'tv',
dateAdded: lastWatchedAt,
lastWatchedAt,
image,
backdrop
})
})
const sortedShows = Object.values(showEpisodesMap).sort((a, b) => new Date(b.episodes[0]['lastWatchedAt']) - new Date(a.episodes[0]['lastWatchedAt']))
const episodeData = []
sortedShows.forEach(show => {
const startingEpisode = show['episodes'][show['episodes'].length - 1]['episode']
const startingSeason = show['episodes'][show['episodes'].length - 1]['season']
const endingEpisode = show['episodes'][0]['episode']
const endingSeason = show['episodes'][0]['season']
if (show['episodes'].length > 1) {
episodeData.push({
name: show['title'],
url: `/watching/shows/${show['tmdbId']}`,
subtext: `S${startingSeason}E${startingEpisode} - S${endingSeason}E${endingEpisode}`,
startingEpisode,
startingSeason,
episode: endingEpisode,
season: endingSeason,
tmdbId: show['tmdbId'],
collected: show['collected'],
favorite: show['favorite'],
type: 'tv-range',
image: show['image'],
backdrop: show['backdrop']
})
} else {
const singleEpisode = show['episodes'][0]
singleEpisode.collected = show['collected']
singleEpisode.favorite = show['favorite']
singleEpisode.image = show['image']
singleEpisode.backdrop = show['backdrop']
episodeData.push(singleEpisode)
}
})
return episodeData
}
const favoriteShows = shows.filter(show => show['favorite'])
return {
shows,
watchHistory: formatEpisodeData(episodes),
recentlyWatched: formatEpisodeData(episodes.slice(0, 225)),
favorites: formatEpisodeData(favoriteShows.flatMap(prepareEpisodeData)).sort((a, b) => a['name'].localeCompare(b['name']))
}
}