Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
120 commits
Select commit Hold shift + click to select a range
b4a5a81
wip: Lexical prototype: it shows!
Soxasora Aug 30, 2025
a892a84
Merge branch 'master' into rich_text_prototype
Soxasora Sep 8, 2025
5d65fb8
wip: text rendering via lexical, toolbar, experimental barebones form…
Soxasora Sep 9, 2025
a40f1c6
lexical shiki for code highlighting, start lexical with markdown with…
Soxasora Sep 9, 2025
91ca0d0
code structure experiments
Soxasora Sep 10, 2025
c233c1b
Merge branch 'master' into rich_text
Soxasora Sep 10, 2025
2bcd7a4
refactor lexical structure, more styling, re-implement overflowing lo…
Soxasora Sep 11, 2025
098d0f6
DOMPurify for SSR and Client envs
Soxasora Sep 11, 2025
ecc1e77
experimental: store and serve server-sanitized HTML
Soxasora Sep 11, 2025
24b148a
wip: MediaOrLink plugin prototype with HTML and JSON exports
Soxasora Sep 14, 2025
c284b97
wip: Markdown Live Preview (gutter) plugin
Soxasora Sep 15, 2025
1eab10b
wip: Live Markdown, support for item.html SN-wide, delete functionali…
Soxasora Sep 15, 2025
dc0460a
edit functionality, restore an editor state from DB via formik defaul…
Soxasora Sep 17, 2025
ecf2f75
sanitize HTML during validation, lexical support for comments and dis…
Soxasora Sep 17, 2025
615c974
wip: Shiki custom styling for a more Live Markdown; formatting button…
Soxasora Sep 18, 2025
664303f
wip: file upload proof of concept
Soxasora Sep 18, 2025
546a75a
wip: mentions node
Soxasora Sep 18, 2025
6dc044b
add license to placeholder
Soxasora Sep 18, 2025
7be0288
wip: render JSON as JSX via RenderMan (experimental), mentions suppor…
Soxasora Sep 19, 2025
867ae5a
wip: tweet embeds poc
Soxasora Sep 19, 2025
14f9bb5
refactor Embed component, attach to TweetNode
Soxasora Sep 19, 2025
7257516
embeds: youtube, wavlake, spotify, rumble, peertube, nostr; generic C…
Soxasora Sep 19, 2025
a4dc3cc
correct background color for contenteditable=false by default
Soxasora Sep 19, 2025
98a6a18
support file upload progress with XHR
Soxasora Sep 19, 2025
d151b6f
parallel file upload placeholders, preserve scroll on MediaOrLink Med…
Soxasora Sep 19, 2025
fa051a9
wip: Math node via KaTeX; apply SN style to mentions
Soxasora Sep 20, 2025
7c9ce9e
fix mentions placeholder before lexical hydration, render SSR HTML be…
Soxasora Sep 20, 2025
cc4752f
MentionNode markdown transformer, fix textarea resize
Soxasora Sep 20, 2025
803cbc2
generate HTML on SSR via Lexical Headless, remove all html inputs, ad…
Soxasora Sep 21, 2025
8342cfb
agnostic JSDOM implementation, generate a DOM for lexical headless
Soxasora Sep 21, 2025
7f0b773
appropriate fake DOM usage in SSR HTML conversions, remove duplicate …
Soxasora Sep 21, 2025
6d0c46e
switch to linkedom from JSDOM, enhance SSR HTML gen
Soxasora Sep 21, 2025
fbbdd12
SSR Markdown to Lexical converter and wip migration strategy, refacto…
Soxasora Sep 22, 2025
38cd9f4
wip: 1:1 styling, ensure CSS between MD, JSON and HTML
Soxasora Sep 22, 2025
9ce01ee
Lexical support for subs and all types of items, attempt images fix, …
Soxasora Sep 23, 2025
1f305a1
wip: smart ImageNode supporting captions, ImageComponent; filter and …
Soxasora Sep 23, 2025
cab44c4
tweaks to media handling
Soxasora Sep 23, 2025
6021b47
Territory mentions support, links in a new tab, continue 1:1 styling,…
Soxasora Sep 24, 2025
8aacf7a
wip: placeholder for embeds in HTML mode
Soxasora Sep 24, 2025
d3a9761
fix lexical import conflict, dev debug tool to refresh legacy content
Soxasora Sep 25, 2025
4b477c2
markdown transformation for bare links, to embeds, media or link
Soxasora Sep 25, 2025
4e4d288
wip: Resizeable images with carousel support if not editing, experime…
Soxasora Sep 26, 2025
85e5be9
Embed load timeout with error component, touchups to media dimensions
Soxasora Sep 27, 2025
2c6dc95
Merge branch 'master' into rich_text
Soxasora Sep 27, 2025
529b4cc
markdown mode: protect from unwanted removal; preferences context, so…
Soxasora Sep 29, 2025
758109a
tweaks to formik bridge onChange save actions
Soxasora Sep 29, 2025
99e6894
cleanup: text overflow hook
Soxasora Sep 30, 2025
f4486eb
fix embed nodes requirements
Soxasora Sep 30, 2025
64cadd5
MediaNode for images and videos, support video resizing
Soxasora Sep 30, 2025
2f42adc
Link Editor for interactive mode
Soxasora Oct 3, 2025
81380e7
light link editor cleanup, fix link markdown transformations
Soxasora Oct 3, 2025
958d405
paste a link into selection;
Soxasora Oct 3, 2025
945003e
custom link toggle; transform captions into markdown and viceversa, f…
Soxasora Oct 3, 2025
c982ce3
keyboard shortcuts, universal toolbar for markdown and hybrid mode, c…
Soxasora Oct 4, 2025
4198042
tweaks to MEDIA_OR_LINK and LINK regexs, light cleanup
Soxasora Oct 4, 2025
4448b25
shortcut for upload and links, insert dropdown, fileupload plugin and…
Soxasora Oct 5, 2025
0918ce8
native alignments support, tweaks to shortcuts
Soxasora Oct 5, 2025
9f7ce14
restructure components/lexical, remove some unused code, tweaks to to…
Soxasora Oct 5, 2025
1c8ee30
Merge branch 'master' into rich_text
Soxasora Oct 6, 2025
923def0
migrate to Lexical 0.36.2, add simple skeleton
Soxasora Oct 6, 2025
3353ff3
show LinkEditor only if we're actually (still) in a link node, port E…
Soxasora Oct 6, 2025
d3a4ed3
cleanup: avoid using editor reads and updates in listeners, lexical b…
Soxasora Oct 6, 2025
a17e6f9
fix: support resetForm from Formik in formik bridge
Soxasora Oct 6, 2025
cc5ddd6
alignments, indentations, lists support
Soxasora Oct 7, 2025
5d41e17
fix: show correct type on selection; enhance: batch toolbar state upd…
Soxasora Oct 8, 2025
cf25401
extend markdown to support alignments; recognize code blocks, lists a…
Soxasora Oct 8, 2025
36345a5
refactor some of the formatting markdown<->wysiwyg options
Soxasora Oct 9, 2025
aa45884
early tables support; use proper placeholder mechanism; support markd…
Soxasora Oct 10, 2025
22f53cf
clearer shortcuts handling; DX: partially restructure lexical impleme…
Soxasora Oct 10, 2025
9d24185
fix: checklists MD transformer; fix: numbered and check lists toolbar…
Soxasora Oct 10, 2025
afe529c
add Math support via KaTeX, with keyboard shortcuts
Soxasora Oct 10, 2025
0a4d175
primitive table markdown transformer; some bug fixes
Soxasora Oct 10, 2025
86c6fe3
wip: spoiler nodes
Soxasora Oct 11, 2025
22110e7
exp: Floating lite Toolbar on selection; Toolbar UI/UX tweaks
Soxasora Oct 14, 2025
9649f39
refactor Floating Toolbar, prevent toolbar from stealing focus on mou…
Soxasora Oct 14, 2025
2758123
update Lexical to 0.37.0
Soxasora Oct 14, 2025
4dba6e0
html as loading placeholder for LexicalReader, many small fixes
Soxasora Oct 14, 2025
60c3769
fix: highlight MarkdownNode on switch; tweaks to styling; light cleanup
Soxasora Oct 15, 2025
a291f60
separate FloatingToolbar from Toolbar, tweaks to styling, fix re-rend…
Soxasora Oct 16, 2025
c174ad9
wip: local preferences [showToolbar, showFloatingToolbar, startInMark…
Soxasora Oct 16, 2025
a1026c5
wip: preferences button; fix: mode switch keyboard shortcut; light cl…
Soxasora Oct 16, 2025
99aca82
toolbar with overflow for small display sizes; fix: respect dark mode…
Soxasora Oct 16, 2025
3bf8b9d
quick workaround for overflow conflict with dropdowns
Soxasora Oct 16, 2025
27e9016
light cleanup; import re-order
Soxasora Oct 16, 2025
43e37cf
fix lint
Soxasora Oct 17, 2025
f658fac
truncate HTML for FlatComment by sanitizing and parsing it with SSR s…
Soxasora Oct 17, 2025
84ffbad
imgproxy, outlawed support via LexicalItemContext, outlawed HTML supp…
Soxasora Oct 18, 2025
98a9484
imgproxy HTML support
Soxasora Oct 18, 2025
813c82e
rewrite Formik plugin, fix: remove item from local storage if content…
Soxasora Oct 18, 2025
a117cee
refactors, safer SSR, migrate to Lexical Extensions paradigm
Soxasora Oct 18, 2025
07eccf6
fix maxLength lint issues
Soxasora Oct 19, 2025
2eb5843
Merge branch 'master' into rich_text
Soxasora Oct 19, 2025
4e14f45
fix: SSR withDOM wrapper with correct LinkeDOM names; fix: WIP embed …
Soxasora Oct 20, 2025
3cac48a
fix lexical.md structure
Soxasora Oct 20, 2025
eb31ae9
micromark extensions factory based on gfm strikethrough; custom micro…
Soxasora Oct 21, 2025
59cad1d
markdown: parse markdown with micromark, toggle markdown formatting v…
Soxasora Oct 21, 2025
673e57d
gradually switch to Transformer Bridge, detailed micromark return values
Soxasora Oct 22, 2025
846a89d
CodeShiki SN custom extension with dark mode toggling, gradually swit…
Soxasora Oct 22, 2025
c0aaf2d
CodeActions plugin for a nice overlay on code blocks hover; patch Shi…
Soxasora Oct 23, 2025
368c238
override happyDOM used in lexical 0.37.0 for CVE-2025-62410
Soxasora Oct 23, 2025
fdf9c3f
extend CodeActions plugin to editor to choose the language of a code …
Soxasora Oct 23, 2025
35ca4d6
port better_media_recognition
Soxasora Oct 23, 2025
5e36bed
MarkdownMode extension, gradual migration to Lexical Extensions
Soxasora Oct 24, 2025
cfcd800
recognize and persist media types in media nodes; proper media recogn…
Soxasora Oct 24, 2025
a80deed
wip: ensure media checks are done before submitting; exp: useImperati…
Soxasora Oct 25, 2025
0c08793
fix: enforce maxWidth of 500px for MediaNode, don't force the chosen …
Soxasora Oct 26, 2025
af800f7
media: uploaded boolean flag, to distinguish uploaded media nodes fro…
Soxasora Oct 26, 2025
aa1529d
add support for upload fees with FileUploadPlugin, revert uploaded bo…
Soxasora Oct 26, 2025
7045805
feature parity: add support for useQuoteReply
Soxasora Oct 26, 2025
ed5b963
migrate eligible plugins to Lexical Extension; log editor errors inst…
Soxasora Oct 27, 2025
4f35370
support rich mode with useQuoteReply; fix: preserve markdown inside a…
Soxasora Oct 27, 2025
e34536f
media resize enhancements; trying to fix markdown node exit bug; expe…
Soxasora Oct 27, 2025
e94a574
port better_media_recognition
Soxasora Oct 28, 2025
c82bd0a
automatically create a lexical state if only the markdown text has be…
Soxasora Oct 28, 2025
d8e53ae
prepare markdown and execute final media checks server-side, send onl…
Soxasora Oct 28, 2025
3487771
update Lexical to 0.38.1
Soxasora Oct 28, 2025
6c34594
re-establish resetForm for the formik bridge
Soxasora Oct 28, 2025
ec55f10
auto-link in rich mode, transforming in link/embed/media node; refact…
Soxasora Oct 28, 2025
7ebd21e
add support for item mentions
Soxasora Oct 28, 2025
a467fe2
lexical 0.38.1: Editor doesn't need a dynamic loader anymore; fix aut…
Soxasora Oct 29, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env.development
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@ NEXT_PUBLIC_EXTRA_LONG_POLL_INTERVAL_MS=300000
IMGPROXY_URL_DOCKER=http://imgproxy:8080
MEDIA_URL_DOCKER=http://s3:4566/uploads

# media check with capture container
MEDIA_CHECK_URL_DOCKER=http://capture:5678/media
NEXT_PUBLIC_MEDIA_CHECK_URL=http://localhost:5678/media

# postgres container stuff
POSTGRES_PASSWORD=password
POSTGRES_USER=sn
Expand Down
1 change: 1 addition & 0 deletions .env.production
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ NEXTAUTH_URL=https://stacker.news
NEXTAUTH_URL_INTERNAL=http://127.0.0.1:8080/api/auth
NEXT_PUBLIC_AWS_UPLOAD_BUCKET=snuploads
NEXT_PUBLIC_IMGPROXY_URL=https://imgprxy.stacker.news/
NEXT_PUBLIC_MEDIA_CHECK_URL=https://capture.stacker.news/media
NEXT_PUBLIC_MEDIA_DOMAIN=m.stacker.news
PUBLIC_URL=https://stacker.news
SELF_URL=http://127.0.0.1:8080
Expand Down
56 changes: 54 additions & 2 deletions api/resolvers/item.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
import { verifyHmac } from './wallet'
import { parse } from 'tldts'
import { shuffleArray } from '@/lib/rand'
import { $ssrLexicalHTMLGenerator } from '@/lib/lexical/utils/server/lexicalToHTML'
import { prepareLexicalState } from '@/lib/lexical/utils/server/interpolator'

function commentsOrderByClause (me, models, sort) {
const sharedSortsArray = []
Expand Down Expand Up @@ -112,6 +114,7 @@ export async function getItem (parent, { id }, { me, models }) {
activeOrMine(me)
)}`
}, Number(id))

return item
}

Expand Down Expand Up @@ -1091,6 +1094,33 @@ export default {
})

return result.lastViewedAt
},
executeConversion: async (parent, { itemId, fullRefresh }, { models }) => {
if (process.env.NODE_ENV !== 'development') {
throw new GqlInputError('only allowed in sndev')
}
console.log('executing conversion for itemId', itemId)

const alreadyScheduled = await models.$queryRaw`
SELECT 1
FROM pgboss.job
WHERE name = 'migrateLegacyContent' AND data->>'itemId' = ${itemId}::TEXT AND state <> 'completed'
`
if (alreadyScheduled.length > 0) return false

// singleton job, so that we don't run the same job multiple times
// if on concurrent requests the check above fails
await models.$executeRaw`
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil, singletonKey)
VALUES ('migrateLegacyContent',
jsonb_build_object('itemId', ${itemId}::INTEGER, 'fullRefresh', ${fullRefresh}::BOOLEAN),
21,
true,
now(),
now() + interval '15 seconds',
'migrateLegacyContent:' || ${itemId}::TEXT)
`
return true
}
},
ItemAct: {
Expand Down Expand Up @@ -1516,12 +1546,17 @@ export const updateItem = async (parent, { sub: subName, forward, hash, hmac, ..
item.url = removeTracking(item.url)
}

// create markdown from a lexical state
const { text, lexicalState } = await prepareLexicalState({ text: item.text, lexicalState: item.lexicalState })
item.text = text
item.lexicalState = lexicalState

if (old.bio) {
// prevent editing a bio like a regular item
item = { id: Number(item.id), text: item.text, title: `@${user.name}'s bio` }
item = { id: Number(item.id), text: item.text, lexicalState: item.lexicalState, title: `@${user.name}'s bio` }
} else if (old.parentId) {
// prevent editing a comment like a post
item = { id: Number(item.id), text: item.text, boost: item.boost }
item = { id: Number(item.id), text: item.text, lexicalState: item.lexicalState, boost: item.boost }
} else {
item = { subName, ...item }
item.forwardUsers = await getForwardUsers(models, forward)
Expand All @@ -1531,6 +1566,11 @@ export const updateItem = async (parent, { sub: subName, forward, hash, hmac, ..
// never change author of item
item.userId = old.userId

// sanitize html
// if the html conversion fails, we'll use the lexicalState directly
// this might be a problem for instant content
item.html = $ssrLexicalHTMLGenerator(item.lexicalState)

const resultItem = await performPaidAction('ITEM_UPDATE', item, { models, me, lnd })

resultItem.comments = []
Expand All @@ -1545,6 +1585,13 @@ export const createItem = async (parent, { forward, ...item }, { me, models, lnd
item.userId = me ? Number(me.id) : USER_ID.anon

item.forwardUsers = await getForwardUsers(models, forward)

// create markdown from a lexical state
const { text, lexicalState } = await prepareLexicalState({ text: item.text, lexicalState: item.lexicalState })
item.text = text
item.lexicalState = lexicalState

// TODO: we could probably gather them from the lexical state
item.uploadIds = uploadIdsFromText(item.text)

if (item.url && !isJob(item)) {
Expand All @@ -1562,6 +1609,11 @@ export const createItem = async (parent, { forward, ...item }, { me, models, lnd
// mark item as created with API key
item.apiKey = me?.apiKey

// sanitize html
// if the html conversion fails, we'll use the lexicalState directly
// this might be a problem for instant content
item.html = $ssrLexicalHTMLGenerator(item.lexicalState)

const resultItem = await performPaidAction('ITEM_CREATE', item, { models, me, lnd })

resultItem.comments = []
Expand Down
13 changes: 13 additions & 0 deletions api/resolvers/sub.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import performPaidAction from '../paidAction'
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
import { uploadIdsFromText } from './upload'
import { Prisma } from '@prisma/client'
import { prepareLexicalState } from '@/lib/lexical/utils/server/interpolator'

export async function getSub (parent, { name }, { models, me }) {
if (!name) return null
Expand Down Expand Up @@ -363,6 +364,12 @@ export default {

async function createSub (parent, data, { me, models, lnd }) {
try {
// QUIRK
// if we have a lexicalState, we'll convert it to markdown to fit the schema
if (data.lexicalState) {
const { text } = prepareLexicalState({ lexicalState: data.lexicalState })
data.desc = text
}
return await performPaidAction('TERRITORY_CREATE', data, { me, models, lnd })
} catch (error) {
if (error.code === 'P2002') {
Expand Down Expand Up @@ -390,6 +397,12 @@ async function updateSub (parent, { oldName, ...data }, { me, models, lnd }) {
}

try {
// QUIRK
// if we have a lexicalState, we'll convert it to markdown to fit the schema
if (data.lexicalState) {
const { text } = prepareLexicalState({ lexicalState: data.lexicalState })
data.desc = text
}
return await performPaidAction('TERRITORY_UPDATE', { oldName, ...data }, { me, models, lnd })
} catch (error) {
if (error.code === 'P2002') {
Expand Down
8 changes: 4 additions & 4 deletions api/resolvers/user.js
Original file line number Diff line number Diff line change
Expand Up @@ -756,19 +756,19 @@ export default {

return Number(photoId)
},
upsertBio: async (parent, { text }, { me, models, lnd }) => {
upsertBio: async (parent, { text, lexicalState }, { me, models, lnd }) => {
if (!me) {
throw new GqlAuthenticationError()
}

await validateSchema(bioSchema, { text })
await validateSchema(bioSchema, { text, lexicalState })

const user = await models.user.findUnique({ where: { id: me.id } })

if (user.bioId) {
return await updateItem(parent, { id: user.bioId, bio: true, text, title: `@${user.name}'s bio` }, { me, models, lnd })
return await updateItem(parent, { id: user.bioId, bio: true, text, lexicalState, title: `@${user.name}'s bio` }, { me, models, lnd })
} else {
return await createItem(parent, { bio: true, text, title: `@${user.name}'s bio` }, { me, models, lnd })
return await createItem(parent, { bio: true, text, lexicalState, title: `@${user.name}'s bio` }, { me, models, lnd })
}
},
generateApiKey: async (parent, { id }, { models, me }) => {
Expand Down
15 changes: 9 additions & 6 deletions api/typeDefs/item.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,26 +46,27 @@ export default gql`
subscribeItem(id: ID): Item
deleteItem(id: ID): Item
upsertLink(
id: ID, sub: String, title: String!, url: String!, text: String, boost: Int, forward: [ItemForwardInput],
id: ID, sub: String, title: String!, url: String!, text: String, lexicalState: String, boost: Int, forward: [ItemForwardInput],
hash: String, hmac: String): ItemPaidAction!
upsertDiscussion(
id: ID, sub: String, title: String!, text: String, boost: Int, forward: [ItemForwardInput],
id: ID, sub: String, title: String!, text: String, lexicalState: String, boost: Int, forward: [ItemForwardInput],
hash: String, hmac: String): ItemPaidAction!
upsertBounty(
id: ID, sub: String, title: String!, text: String, bounty: Int, boost: Int, forward: [ItemForwardInput],
id: ID, sub: String, title: String!, text: String, lexicalState: String, bounty: Int, boost: Int, forward: [ItemForwardInput],
hash: String, hmac: String): ItemPaidAction!
upsertJob(
id: ID, sub: String!, title: String!, company: String!, location: String, remote: Boolean,
text: String!, url: String!, boost: Int, status: String, logo: Int): ItemPaidAction!
text: String!, lexicalState: String, url: String!, boost: Int, status: String, logo: Int): ItemPaidAction!
upsertPoll(
id: ID, sub: String, title: String!, text: String, options: [String!]!, boost: Int, forward: [ItemForwardInput], pollExpiresAt: Date,
id: ID, sub: String, title: String!, text: String, lexicalState: String, options: [String!]!, boost: Int, forward: [ItemForwardInput], pollExpiresAt: Date,
randPollOptions: Boolean, hash: String, hmac: String): ItemPaidAction!
updateNoteId(id: ID!, noteId: String!): Item!
upsertComment(id: ID, text: String!, parentId: ID, boost: Int, hash: String, hmac: String): ItemPaidAction!
upsertComment(id: ID, text: String, lexicalState: String, parentId: ID, boost: Int, hash: String, hmac: String): ItemPaidAction!
act(id: ID!, sats: Int, act: String, hasSendWallet: Boolean): ItemActPaidAction!
pollVote(id: ID!): PollVotePaidAction!
toggleOutlaw(id: ID!): Item!
updateCommentsViewAt(id: ID!, meCommentsViewedAt: Date!): Date
executeConversion(itemId: ID!, fullRefresh: Boolean!): Boolean!
}

type PollVoteResult {
Expand Down Expand Up @@ -120,6 +121,8 @@ export default gql`
url: String
searchText: String
text: String
lexicalState: String
html: String
parentId: Int
parent: Item
root: Item
Expand Down
6 changes: 4 additions & 2 deletions api/typeDefs/sub.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ export default gql`
}

extend type Mutation {
upsertSub(oldName: String, name: String!, desc: String, baseCost: Int!,
upsertSub(oldName: String, name: String!, desc: String, lexicalState: String, baseCost: Int!,
replyCost: Int!,
postTypes: [String!]!,
billingType: String!, billingAutoRenew: Boolean!,
Expand All @@ -27,7 +27,7 @@ export default gql`
toggleMuteSub(name: String!): Boolean!
toggleSubSubscription(name: String!): Boolean!
transferTerritory(subName: String!, userName: String!): Sub
unarchiveTerritory(name: String!, desc: String, baseCost: Int!,
unarchiveTerritory(name: String!, desc: String, lexicalState: String, baseCost: Int!,
replyCost: Int!, postTypes: [String!]!,
billingType: String!, billingAutoRenew: Boolean!,
moderated: Boolean!, nsfw: Boolean!): SubPaidAction!
Expand All @@ -39,6 +39,8 @@ export default gql`
userId: Int!
user: User!
desc: String
lexicalState: String
html: String
updatedAt: Date!
postTypes: [String!]!
allowFreebies: Boolean!
Expand Down
2 changes: 1 addition & 1 deletion api/typeDefs/user.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export default gql`
setSettings(settings: SettingsInput!): User
cropPhoto(photoId: ID!, cropData: CropData): String!
setPhoto(photoId: ID!): Int!
upsertBio(text: String!): ItemPaidAction!
upsertBio(text: String!, lexicalState: String!): ItemPaidAction!
setWalkthrough(tipPopover: Boolean, upvotePopover: Boolean): Boolean
unlinkAuth(authType: String!): AuthMethods!
linkUnverifiedEmail(email: String!): Boolean
Expand Down
8 changes: 8 additions & 0 deletions capture/index.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import express from 'express'
import puppeteer from 'puppeteer'
import mediaCheck from './media-check.js'
import cors from 'cors'

const captureUrl = process.env.CAPTURE_URL || 'http://host.docker.internal:3000/'
const port = process.env.PORT || 5678
Expand Down Expand Up @@ -55,6 +57,12 @@ app.get('/health', (req, res) => {
res.status(200).end()
})

app.get('/media/:url', cors({
origin: process.env.NEXT_PUBLIC_URL,
methods: ['GET', 'OPTIONS'],
credentials: false
}), mediaCheck)

app.get('/*', async (req, res) => {
const url = new URL(req.originalUrl, captureUrl)
const timeLabel = `${Date.now()}-${url.href}`
Expand Down
105 changes: 105 additions & 0 deletions capture/media-check.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import { filetypemime } from 'magic-bytes.js'

const TIMEOUT_HEAD = 2000
const TIMEOUT_GET = 10000
const BYTE_LIMIT = 8192

export function isImageMime (mime) { return typeof mime === 'string' && mime.startsWith('image/') }

export function isVideoMime (mime) { return typeof mime === 'string' && mime.startsWith('video/') }

// adapted from lib/time.js
function timeoutSignal (timeout) {
const controller = new AbortController()

if (timeout) {
setTimeout(() => {
controller.abort(new Error(`timeout after ${timeout / 1000}s`))
}, timeout)
}

return controller.signal
}

const requiresAuth = (res) => res.status === 401 || res.status === 403

async function headMime (url, timeout = TIMEOUT_HEAD) {
const res = await fetch(url, { method: 'HEAD', signal: timeoutSignal(timeout) })
// bail on auth or forbidden
if (requiresAuth(res)) return null

return res.headers.get('content-type')
}

async function readMagicBytes (url, { timeout = TIMEOUT_GET, byteLimit = BYTE_LIMIT } = {}) {
const res = await fetch(url, {
method: 'GET',
// accept image and video, but not other types
headers: { Range: `bytes=0-${byteLimit - 1}`, Accept: 'image/*,video/*;q=0.9,*/*;q=0.8' },
signal: timeoutSignal(timeout)
})
// bail on auth or forbidden
if (requiresAuth(res)) return { bytes: null, headers: res.headers }

// stream a small chunk if possible, otherwise read buffer
if (res.body?.getReader) {
const reader = res.body.getReader()
let received = 0
const chunks = []
try {
while (received < byteLimit) {
const { done, value } = await reader.read()
if (done) break
chunks.push(value)
received += value.byteLength
}
} finally {
try { reader.releaseLock?.() } catch {}
try { res.body?.cancel?.() } catch {}
}
const buf = new Uint8Array(received)
let offset = 0
for (const c of chunks) {
buf.set(c, offset)
offset += c.byteLength
}
return { bytes: buf, headers: res.headers }
} else {
const ab = await res.arrayBuffer()
const buf = new Uint8Array(ab.slice(0, byteLimit))
return { bytes: buf, headers: res.headers }
}
}

export default async function mediaCheck (req, res) {
// express automatically decodes the values in req.params (using decodeURIComponent)
let url = req.params.url
if (typeof url !== 'string' || !/^(https?:\/\/)/.test(url)) {
return res.status(400).json({ error: 'Invalid URL' })
}

try {
// in development, the capture container can't reach the public media url,
// so we need to replace it with its docker equivalent, e.g. http://s3:4566/uploads
if (url.startsWith(process.env.NEXT_PUBLIC_MEDIA_URL) && process.env.NODE_ENV === 'development') {
url = url.replace(process.env.NEXT_PUBLIC_MEDIA_URL, process.env.MEDIA_URL_DOCKER)
}

// trying with HEAD first, as it's the cheapest option
try {
const ct = await headMime(url)
if (isImageMime(ct) || isVideoMime(ct)) {
return res.status(200).json({ mime: ct, isImage: isImageMime(ct), isVideo: isVideoMime(ct) })
}
} catch {}

// otherwise, read the first bytes
const { bytes, headers } = await readMagicBytes(url)
const mimes = bytes ? filetypemime(bytes) : null
const mime = mimes?.[0] ?? headers.get('content-type') ?? null
return res.status(200).json({ mime, isImage: isImageMime(mime), isVideo: isVideoMime(mime) })
} catch (err) {
console.log('media check error:', err)
return res.status(500).json({ mime: null, isImage: false, isVideo: false })
}
}
Loading