diff --git a/.ai-audit.sh b/.ai-audit.sh deleted file mode 100644 index 1911795f..00000000 --- a/.ai-audit.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -# AI Operations Audit Log -# Logs every git operation attempted by the AI assistant - -TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S') -BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown") -COMMAND="$*" -FILES_CHANGED=$(git status --porcelain 2>/dev/null | wc -l) - -LOG_ENTRY="[$TIMESTAMP] BRANCH=$BRANCH | COMMAND=$COMMAND | FILES_STAGED=$FILES_CHANGED" - -# Write to audit log -echo "$LOG_ENTRY" >> .ai-operations.log - -# Also print for visibility -echo "$LOG_ENTRY" diff --git a/.augment-guidelines b/.augment/.augment-guidelines similarity index 100% rename from .augment-guidelines rename to .augment/.augment-guidelines diff --git a/.github/scripts/fetch-forum-data.js b/.github/scripts/fetch-forum-data.js new file mode 100644 index 00000000..fa6f6fc8 --- /dev/null +++ b/.github/scripts/fetch-forum-data.js @@ -0,0 +1,198 @@ +const https = require("https"); +const fs = require("fs"); + +// Fetch JSON from URL +function fetchJSON(url) { + return new Promise((resolve, reject) => { + https + .get(url, (res) => { + let data = ""; + res.on("data", (chunk) => { + data += chunk; + }); + res.on("end", () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(e); + } + }); + }) + .on("error", reject); + }); +} + +// Check if topic is old pinned +function isOldPinned(topic) { + const pinned = topic.pinned === true || topic.pinned_globally === true; + if (!pinned) return false; + const created = new Date(topic.created_at); + const now = new Date(); + const ageDays = (now - created) / (1000 * 60 * 60 * 24); + return ageDays > 30; +} + +// Clean and format HTML +function cleanAndFormatHTML(html) { + let cleanHTML = html; + + // Remove anchor navigation links + cleanHTML = cleanHTML.replace( + /]*name="[^"]*"[^>]*class="anchor"[^>]*>.*?<\/a>/g, + "" + ); + + // Clean up headings + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h1>/g, "

$1

"); + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h2>/g, "

$1

"); + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h3>/g, "
$1
"); + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h[4-6]>/g, "
$1
"); + + // Clean up images and their references + cleanHTML = cleanHTML.replace(/]*class="lightbox"[^>]*>.*?<\/a>/g, ""); + cleanHTML = cleanHTML.replace( + /]*class="lightbox-wrapper"[^>]*>.*?<\/div>/g, + "" + ); + cleanHTML = cleanHTML.replace(/]*>/g, ""); + cleanHTML = cleanHTML.replace(/\[!\[.*?\]\(.*?\)\]\(.*?\)/g, ""); + cleanHTML = cleanHTML.replace(/image\d+×\d+\s+[\d.]+\s*[KM]B/gi, ""); + + // Keep paragraphs, lists, emphasis, code + cleanHTML = cleanHTML.replace(/

/g, "

"); + cleanHTML = cleanHTML.replace(/<\/p>/g, "

"); + cleanHTML = cleanHTML.replace(/
    /g, "
      "); + cleanHTML = cleanHTML.replace(/<\/ul>/g, "
    "); + cleanHTML = cleanHTML.replace(/
      /g, "
        "); + cleanHTML = cleanHTML.replace(/<\/ol>/g, "
      "); + cleanHTML = cleanHTML.replace(/
    1. /g, "
    2. "); + cleanHTML = cleanHTML.replace(/<\/li>/g, "
    3. "); + cleanHTML = cleanHTML.replace( + /(.*?)<\/strong>/g, + "$1" + ); + cleanHTML = cleanHTML.replace(/(.*?)<\/em>/g, "$1"); + cleanHTML = cleanHTML.replace(/(.*?)<\/code>/g, "$1"); + + // Simplify links + cleanHTML = cleanHTML.replace( + /]*href="([^"]*)"[^>]*>(.*?)<\/a>/g, + '$2' + ); + + // Decode HTML entities + cleanHTML = cleanHTML.replace(/&/g, "&"); + cleanHTML = cleanHTML.replace(/</g, "<"); + cleanHTML = cleanHTML.replace(/>/g, ">"); + cleanHTML = cleanHTML.replace(/"/g, '"'); + cleanHTML = cleanHTML.replace(/'/g, "'"); + cleanHTML = cleanHTML.replace(/ /g, " "); + + // Clean up whitespace + cleanHTML = cleanHTML.replace(/\s+/g, " "); + cleanHTML = cleanHTML.replace(/

      \s*<\/p>/g, ""); + + return cleanHTML.trim(); +} + +async function main() { + console.log("Fetching latest topics..."); + const latestData = await fetchJSON("https://forum.livepeer.org/latest.json"); + + const topics = latestData.topic_list?.topics || []; + console.log(`Found ${topics.length} topics`); + + // Filter out old pinned topics + const filteredTopics = topics.filter((t) => !isOldPinned(t)); + console.log(`After filtering: ${filteredTopics.length} topics`); + + // Get top 4 + const top4 = filteredTopics.slice(0, 4); + console.log(`Processing top 4 topics...`); + + const processedTopics = []; + + for (const topic of top4) { + console.log(`Processing topic ${topic.id}: ${topic.title}`); + + // Fetch full topic data + const topicData = await fetchJSON( + `https://forum.livepeer.org/t/${topic.id}.json` + ); + + // Extract first post + const firstPost = topicData.post_stream?.posts?.find( + (p) => p.post_number === 1 + ); + + if (!firstPost) { + console.log(` No first post found, skipping`); + continue; + } + + const htmlContent = cleanAndFormatHTML(firstPost.cooked || ""); + const datePosted = topic.created_at + ? new Date(topic.created_at).toLocaleDateString("en-US", { + year: "numeric", + month: "short", + day: "numeric", + }) + : ""; + + processedTopics.push({ + title: topic.title, + href: `https://forum.livepeer.org/t/${topic.id}`, + author: `By ${firstPost.name || firstPost.username || "Unknown"} (@${ + firstPost.username || "unknown" + })`, + content: htmlContent, + replyCount: (topic.posts_count || 1) - 1, + datePosted: datePosted, + }); + } + + console.log(`Processed ${processedTopics.length} topics`); + + // Generate JavaScript export with exact formatting + let jsExport = "export const forumData = [\n"; + + processedTopics.forEach((item, index) => { + jsExport += " {\n"; + jsExport += ` title: "${item.title + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"')}",\n`; + jsExport += ` href: "${item.href}",\n`; + jsExport += ` author: "${item.author + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"')}",\n`; + + // Content with proper escaping and indentation + const escapedContent = item.content + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"') + .replace(/\n/g, " "); + + jsExport += ` content:\n "${escapedContent}",\n`; + jsExport += ` replyCount: ${item.replyCount},\n`; + jsExport += ` datePosted: "${item.datePosted}",\n`; + jsExport += " }"; + + if (index < processedTopics.length - 1) { + jsExport += ","; + } + jsExport += "\n"; + }); + + jsExport += "];\n"; + + // Write to file + const outputPath = "snippets/automations/forum/forumData.jsx"; + fs.mkdirSync("snippets/automations/forum", { recursive: true }); + fs.writeFileSync(outputPath, jsExport); + console.log(`Written to ${outputPath}`); +} + +main().catch((err) => { + console.error("Error:", err); + process.exit(1); +}); diff --git a/.github/scripts/fetch-ghost-blog-data.js b/.github/scripts/fetch-ghost-blog-data.js new file mode 100644 index 00000000..44e0d2f0 --- /dev/null +++ b/.github/scripts/fetch-ghost-blog-data.js @@ -0,0 +1,101 @@ +const https = require("https"); +const fs = require("fs"); + +// Fetch JSON from URL +function fetchJSON(url) { + return new Promise((resolve, reject) => { + https + .get(url, (res) => { + let data = ""; + res.on("data", (chunk) => { + data += chunk; + }); + res.on("end", () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(e); + } + }); + }) + .on("error", reject); + }); +} + +// Safe HTML escape - only escape backticks for template literals +function safeHTML(html) { + return (html || "").replace(/`/g, "\\`"); +} + +// Format date +function formatDate(iso) { + return new Date(iso).toLocaleDateString("en-US", { + month: "short", + day: "numeric", + year: "numeric", + }); +} + +async function main() { + console.log("Fetching Ghost blog posts..."); + + const apiUrl = + "https://livepeer-studio.ghost.io/ghost/api/content/posts/?key=eaf54ba5c9d4ab35ce268663b0&limit=4&include=tags,authors"; + + const response = await fetchJSON(apiUrl); + + if (!response.posts || response.posts.length === 0) { + console.log("No posts found"); + return; + } + + console.log(`Found ${response.posts.length} posts`); + + // Process posts + const posts = response.posts.map((p) => ({ + title: p.title, + href: p.url, + author: p.primary_author?.name + ? `By ${p.primary_author.name}` + : "By Livepeer Team", + content: safeHTML(p.html), + datePosted: formatDate(p.published_at), + img: p.feature_image || "", + excerpt: safeHTML(p.excerpt), + readingTime: p.reading_time || 0, + })); + + // Generate JavaScript export with template literals + let jsExport = "export const ghostData = [\n"; + + posts.forEach((post, index) => { + jsExport += "{\n"; + jsExport += ` title: \`${post.title}\`,\n`; + jsExport += ` href: \`${post.href}\`,\n`; + jsExport += ` author: \`${post.author}\`,\n`; + jsExport += ` content: \`${post.content}\`,\n`; + jsExport += ` datePosted: \`${post.datePosted}\`,\n`; + jsExport += ` img: \`${post.img}\`,\n`; + jsExport += ` excerpt: \`${post.excerpt}\`,\n`; + jsExport += ` readingTime: ${post.readingTime}\n`; + jsExport += "}"; + + if (index < posts.length - 1) { + jsExport += ","; + } + jsExport += "\n"; + }); + + jsExport += "];\n"; + + // Write to file + const outputPath = "snippets/automations/ghost/ghostBlogData.jsx"; + fs.mkdirSync("snippets/automations/ghost", { recursive: true }); + fs.writeFileSync(outputPath, jsExport); + console.log(`Written to ${outputPath}`); +} + +main().catch((err) => { + console.error("Error:", err); + process.exit(1); +}); diff --git a/.github/scripts/fetch-youtube-data.js b/.github/scripts/fetch-youtube-data.js new file mode 100644 index 00000000..63d35ddd --- /dev/null +++ b/.github/scripts/fetch-youtube-data.js @@ -0,0 +1,122 @@ +const https = require("https"); +const fs = require("fs"); + +const YOUTUBE_API_KEY = process.env.YOUTUBE_API_KEY; +const CHANNEL_ID = process.env.CHANNEL_ID || "UCzfHtZnmUzMbJDxGCwIgY2g"; + +function httpsGet(url) { + return new Promise((resolve, reject) => { + https + .get(url, (res) => { + let data = ""; + res.on("data", (chunk) => (data += chunk)); + res.on("end", () => resolve(JSON.parse(data))); + }) + .on("error", reject); + }); +} + +function parseDuration(duration) { + const match = duration.match(/PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?/); + if (!match) return 0; + + const hours = parseInt(match[1] || 0); + const minutes = parseInt(match[2] || 0); + const seconds = parseInt(match[3] || 0); + + return hours * 3600 + minutes * 60 + seconds; +} + +function escapeForJSX(str) { + return str + .replace(/\\/g, "\\\\") + .replace(/'/g, "\\'") + .replace(/"/g, '\\"') + .replace(/\n/g, " ") + .replace(/\r/g, "") + .replace(/\t/g, " "); +} + +async function main() { + // Step 1: Get recent videos + console.log("Fetching recent videos..."); + const searchUrl = `https://www.googleapis.com/youtube/v3/search?part=snippet&channelId=${CHANNEL_ID}&maxResults=50&order=date&type=video&key=${YOUTUBE_API_KEY}`; + const searchResults = await httpsGet(searchUrl); + + if (!searchResults.items || searchResults.items.length === 0) { + console.log("No videos found"); + return; + } + + // Step 2: Get video details for each video + console.log( + `Found ${searchResults.items.length} videos, fetching details...` + ); + const videoIds = searchResults.items.map((item) => item.id.videoId).join(","); + const detailsUrl = `https://www.googleapis.com/youtube/v3/videos?part=contentDetails,snippet&id=${videoIds}&key=${YOUTUBE_API_KEY}`; + const detailsResults = await httpsGet(detailsUrl); + + // Step 3: Process and filter videos + const videos = []; + for (const video of detailsResults.items) { + const duration = video.contentDetails.duration; + const durationSeconds = parseDuration(duration); + const snippet = video.snippet; + + // Check if it's a livestream + const isLivestream = + snippet.liveBroadcastContent === "live" || + snippet.liveBroadcastContent === "upcoming" || + duration === "PT0S" || + snippet.title.toLowerCase().includes("watercooler") || + snippet.title.toLowerCase().includes("fireside"); + + // Filter out Shorts (≤60 seconds and not livestreams) + const isShort = + durationSeconds <= 60 && durationSeconds > 0 && !isLivestream; + + if (!isShort) { + videos.push({ + title: snippet.title, + href: `https://www.youtube.com/watch?v=${video.id}`, + author: `By ${snippet.channelTitle || "Livepeer"}`, + content: (snippet.description || "").substring(0, 500), + publishedDate: new Date(snippet.publishedAt).toLocaleDateString( + "en-US", + { month: "short", day: "numeric", year: "numeric" } + ), + duration: duration, + thumbnailUrl: snippet.thumbnails.high.url, + }); + } + } + + console.log(`Filtered to ${videos.length} non-Short videos`); + + // Step 4: Generate JSX content + const jsxContent = `export const youtubeData = [ +${videos + .map( + (v) => ` { + title: '${escapeForJSX(v.title)}', + href: '${v.href}', + author: '${v.author}', + content: '${escapeForJSX(v.content)}...', + publishedDate: '${v.publishedDate}', + duration: '${v.duration}', + thumbnailUrl: '${v.thumbnailUrl}' + }` + ) + .join(",\n")} +]; +`; + + // Step 5: Write to file + fs.writeFileSync("snippets/automations/youtube/youtubeData.jsx", jsxContent); + console.log("Successfully wrote youtubeData.jsx"); +} + +main().catch((err) => { + console.error("Error:", err); + process.exit(1); +}); diff --git a/.github/workflows/update-blog-data.yml b/.github/workflows/update-blog-data.yml new file mode 100644 index 00000000..cabd9ff6 --- /dev/null +++ b/.github/workflows/update-blog-data.yml @@ -0,0 +1,60 @@ +name: Update Blog and Forum Data + +on: + schedule: + - cron: "0 0 * * *" # Runs daily at midnight UTC + workflow_dispatch: # Allows manual trigger from GitHub UI + +jobs: + update-data: + runs-on: ubuntu-latest + + permissions: + contents: write # Required to push changes + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Fetch Ghost blog data + run: | + curl -f -o ghost-data.json "https://livepeer.org/ghost/api/content/posts/?key=YOUR_CONTENT_API_KEY&limit=all&include=tags,authors" || echo "[]" > ghost-data.json + continue-on-error: true + + - name: Fetch Forum data + run: | + curl -f -o forum-data.json "https://forum.livepeer.org/latest.json" || echo "[]" > forum-data.json + continue-on-error: true + + - name: Update Ghost data file + run: | + echo "export const ghostData = " > snippets/automations/blog/ghostBlogData.jsx + cat ghost-data.json >> snippets/automations/blog/ghostBlogData.jsx + echo ";" >> snippets/automations/blog/ghostBlogData.jsx + + - name: Update Forum data file + run: | + echo "export const forumData = " > snippets/automations/forum/forumData.jsx + cat forum-data.json >> snippets/automations/forum/forumData.jsx + echo ";" >> snippets/automations/forum/forumData.jsx + + - name: Check for changes + id: git-check + run: | + git diff --exit-code snippets/automations/ || echo "changed=true" >> $GITHUB_OUTPUT + + - name: Commit and push if changed + if: steps.git-check.outputs.changed == 'true' + run: | + git config --global user.name 'github-actions[bot]' + git config --global user.email 'github-actions[bot]@users.noreply.github.com' + git add snippets/automations/blog/ghostBlogData.jsx + git add snippets/automations/forum/forumData.jsx + git commit -m "chore: update blog and forum data [skip ci]" + git push + + - name: Cleanup + run: | + rm -f ghost-data.json forum-data.json diff --git a/.github/workflows/update-forum-data.yml b/.github/workflows/update-forum-data.yml new file mode 100644 index 00000000..91e658b9 --- /dev/null +++ b/.github/workflows/update-forum-data.yml @@ -0,0 +1,38 @@ +# NOTE: THIS GITHUB ACTION WILL ONLY RUN ON MAIN BRANCH. +# N8N IS BEING USING AS AN ALTERNATIVE UNTIL THEN. +# N8N workflow is in /snippets/automations/n8n-workflows/forum-to-mintlify-latest-topics.json +name: Update Forum Data + +on: + schedule: + # Run daily at 00:00 UTC + - cron: "0 0 * * *" + workflow_dispatch: # Allow manual trigger + +jobs: + update-forum-data: + runs-on: ubuntu-latest + + steps: + - name: Checkout docs repository + uses: actions/checkout@v4 + with: + repository: livepeer/docs + ref: docs-v2-preview + token: ${{ secrets.DOCS_V2 }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Fetch and process forum data + run: | + node .github/scripts/fetch-forum-data.js + + - name: Commit and push if changed + run: | + git config user.name "GitHub Action" + git config user.email "action@github.com" + git add snippets/automations/forum/forumData.jsx + git diff --quiet && git diff --staged --quiet || (git commit -m "Update forum data - $(date -u +"%Y-%m-%dT%H:%M:%SZ")" && git push) diff --git a/.github/workflows/update-ghost-blog-data.yml b/.github/workflows/update-ghost-blog-data.yml new file mode 100644 index 00000000..b3d44c1b --- /dev/null +++ b/.github/workflows/update-ghost-blog-data.yml @@ -0,0 +1,35 @@ +# NOTE: THIS GITHUB ACTION WILL ONLY RUN ON MAIN BRANCH. +# N8N IS BEING USING AS AN ALTERNATIVE UNTIL THEN. +# N8N workflow is in /snippets/automations/n8n-workflows/ghost-to-mintlify.json +name: Update Ghost Blog Data + +on: + schedule: + - cron: "0 0 * * *" + workflow_dispatch: + +jobs: + update-ghost-data: + runs-on: ubuntu-latest + + steps: + - name: Checkout docs repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.DOCS_V2 }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Fetch and process Ghost blog data + run: | + node .github/scripts/fetch-ghost-data.js + + - name: Commit and push if changed + run: | + git config user.name "GitHub Action" + git config user.email "action@github.com" + git add snippets/automations/ghost/ghostBlogData.jsx + git diff --quiet && git diff --staged --quiet || (git commit -m "Update Ghost blog data - $(date -u +"%Y-%m-%dT%H:%M:%SZ")" && git push) diff --git a/.github/workflows/update-youtube-data.yml b/.github/workflows/update-youtube-data.yml new file mode 100644 index 00000000..05dfd5e1 --- /dev/null +++ b/.github/workflows/update-youtube-data.yml @@ -0,0 +1,158 @@ +# NOTE: THIS GITHUB ACTION WILL ONLY RUN ON MAIN BRANCH. +# N8N IS BEING USING AS AN ALTERNATIVE UNTIL THEN. +# N8N workflow is in /snippets/automations/n8n-workflows/youtube-to-mintlify.json +# You will need to Add YOUTUBE_API_KEY secret in repo settings (Settings → Secrets → Actions) for this github action to work. + +name: Update YouTube Data + +on: + schedule: + - cron: "0 0 * * 0" # Weekly on Sunday at midnight UTC + workflow_dispatch: # Allow manual trigger + +jobs: + update-youtube: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: main + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Fetch and process YouTube videos + env: + YOUTUBE_API_KEY: ${{ secrets.YOUTUBE_API_KEY }} + CHANNEL_ID: UCzfHtZnmUzMbJDxGCwIgY2g + run: | + node << 'EOF' + const https = require('https'); + const fs = require('fs'); + + const YOUTUBE_API_KEY = process.env.YOUTUBE_API_KEY; + const CHANNEL_ID = process.env.CHANNEL_ID; + + function httpsGet(url) { + return new Promise((resolve, reject) => { + https.get(url, (res) => { + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', () => resolve(JSON.parse(data))); + }).on('error', reject); + }); + } + + function parseDuration(duration) { + const match = duration.match(/PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?/); + if (!match) return 0; + + const hours = parseInt(match[1] || 0); + const minutes = parseInt(match[2] || 0); + const seconds = parseInt(match[3] || 0); + + return hours * 3600 + minutes * 60 + seconds; + } + + function escapeForJSX(str) { + return str + .replace(/\\/g, '\\\\') + .replace(/'/g, "\\'") + .replace(/"/g, '\\"') + .replace(/\n/g, ' ') + .replace(/\r/g, '') + .replace(/\t/g, ' '); + } + + async function main() { + // Step 1: Get recent videos + console.log('Fetching recent videos...'); + const searchUrl = `https://www.googleapis.com/youtube/v3/search?part=snippet&channelId=${CHANNEL_ID}&maxResults=50&order=date&type=video&key=${YOUTUBE_API_KEY}`; + const searchResults = await httpsGet(searchUrl); + + if (!searchResults.items || searchResults.items.length === 0) { + console.log('No videos found'); + return; + } + + // Step 2: Get video details for each video + console.log(`Found ${searchResults.items.length} videos, fetching details...`); + const videoIds = searchResults.items.map(item => item.id.videoId).join(','); + const detailsUrl = `https://www.googleapis.com/youtube/v3/videos?part=contentDetails,snippet&id=${videoIds}&key=${YOUTUBE_API_KEY}`; + const detailsResults = await httpsGet(detailsUrl); + + // Step 3: Process and filter videos + const videos = []; + for (const video of detailsResults.items) { + const duration = video.contentDetails.duration; + const durationSeconds = parseDuration(duration); + const snippet = video.snippet; + + // Check if it's a livestream + const isLivestream = snippet.liveBroadcastContent === 'live' || + snippet.liveBroadcastContent === 'upcoming' || + duration === 'PT0S' || + snippet.title.toLowerCase().includes('watercooler') || + snippet.title.toLowerCase().includes('fireside'); + + // Filter out Shorts (≤60 seconds and not livestreams) + const isShort = durationSeconds <= 60 && durationSeconds > 0 && !isLivestream; + + if (!isShort) { + videos.push({ + title: snippet.title, + href: `https://www.youtube.com/watch?v=${video.id}`, + author: `By ${snippet.channelTitle || 'Livepeer'}`, + content: (snippet.description || '').substring(0, 500), + publishedDate: new Date(snippet.publishedAt).toLocaleDateString('en-US', { month: 'short', day: 'numeric', year: 'numeric' }), + duration: duration, + thumbnailUrl: snippet.thumbnails.high.url + }); + } + } + + console.log(`Filtered to ${videos.length} non-Short videos`); + + // Step 4: Generate JSX content + const jsxContent = `export const youtubeData = [ + ${videos.map(v => ` { + title: '${escapeForJSX(v.title)}', + href: '${v.href}', + author: '${v.author}', + content: '${escapeForJSX(v.content)}...', + publishedDate: '${v.publishedDate}', + duration: '${v.duration}', + thumbnailUrl: '${v.thumbnailUrl}' + }`).join(',\n')} + ]; + `; + + // Step 5: Write to file + fs.writeFileSync('snippets/automations/youtube/youtubeData.jsx', jsxContent); + console.log('Successfully wrote youtubeData.jsx'); + } + + main().catch(err => { + console.error('Error:', err); + process.exit(1); + }); + EOF + + - name: Check for changes + id: git-check + run: | + git diff --exit-code snippets/automations/youtube/youtubeData.jsx || echo "changed=true" >> $GITHUB_OUTPUT + + - name: Commit and push if changed + if: steps.git-check.outputs.changed == 'true' + run: | + git config user.name "GitHub Actions Bot" + git config user.email "actions@github.com" + git add snippets/automations/youtube/youtubeData.jsx + git commit -m "Update YouTube videos - $(date -u +"%Y-%m-%dT%H:%M:%SZ")" + git push diff --git a/.gitignore b/.gitignore index e7e1da61..90d473fe 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,9 @@ pnpm-lock.yaml .env .env.*local +# Google OAuth secrets +**/client_secret*.json + # ------------------------------------ # Logs # ------------------------------------ diff --git a/.mintignore b/.mintignore new file mode 100644 index 00000000..e69de29b diff --git a/docs.json b/docs.json index 15de3b4a..be521d2b 100644 --- a/docs.json +++ b/docs.json @@ -39,20 +39,21 @@ }, { "group": "Livepeer", - "icon": "/snippets/assets/logos/Livepeer-Logo-Symbol-Light.svg", + "icon": "/snippets/assets/logos/Livepeer-Logo-Symbol-Theme.svg", "pages": [ - "v2/pages/00_home/introduction/livepeer-story", - "v2/pages/00_home/introduction/livepeer-vision", - "v2/pages/00_home/introduction/livepeer-future", - "v2/pages/00_home/introduction/livepeer-ecosystem" + "v2/pages/00_home/introduction/vision", + "v2/pages/00_home/introduction/evolution", + "v2/pages/00_home/introduction/roadmap", + "v2/pages/00_home/introduction/ecosystem" ] }, { "group": "Showcase", "icon": "clapperboard-play", "pages": [ - "v2/pages/00_home/project-showcase/projects-built-on-livepeer", - "v2/pages/00_home/project-showcase/livepeer-applications", + "v2/pages/00_home/project-showcase/why-livepeer", + "v2/pages/00_home/project-showcase/showcase", + "v2/pages/00_home/project-showcase/applications", "v2/pages/00_home/project-showcase/industry-verticals" ] } diff --git a/mintOld.json b/mintOld.json deleted file mode 100644 index ab684bf8..00000000 --- a/mintOld.json +++ /dev/null @@ -1,941 +0,0 @@ -{ - "$schema": "https://mintlify.com/schema.json", - "name": "Livepeer Docs", - "logo": { - "dark": "/logo/dark.svg", - "light": "/logo/light.svg" - }, - "redirects": [ - { - "source": "/guides/developing/quickstart", - "destination": "/developers/quick-start" - }, - { - "source": "/guides/overview", - "destination": "/developers/guides/overview" - }, - { - "source": "/guides/developing/player", - "destination": "/developers/guides/playback-an-asset" - }, - { - "source": "/guides/developing/create-a-livestream", - "destination": "/developers/guides/create-livestream" - }, - { - "source": "/guides/developing/stream-via-obs", - "destination": "/developers/guides/stream-via-obs" - }, - { - "source": "/developing/stream-via-browser", - "destination": "/developers/guides/livestream-from-browser" - }, - { - "source": "/guides/developing/upload-a-video-asset", - "destination": "/developers/guides/upload-video-asset" - }, - { - "source": "/guides/developing/mint-a-video-nft", - "destination": "/developers/guides/mint-video-nft" - }, - { - "source": "/guides/developing/dstorage-playback", - "destination": "/developers/guides/dstorage-playback" - }, - { - "source": "/developers/guides/dstorage-playback", - "destination": "/developers/guides/upload-video-asset" - }, - { - "source": "/guides/developing/access-control", - "destination": "/developers/guides/access-control-webhooks" - }, - { - "source": "/guides/developing/access-control-vod", - "destination": "/developers/guides/access-control-webhooks" - }, - { - "source": "/guides/developing/encrypted-vod", - "destination": "/developers/guides/encrypted-asset" - }, - { - "source": "/guides/developing/listen-for-webhooks", - "destination": "/developers/guides/setup-and-listen-to-webhooks" - }, - { - "source": "/guides/developing/multistream", - "destination": "/developers/guides/multistream" - }, - { - "source": "/guides/developing/monitor-stream-health", - "destination": "/developers/guides/monitor-stream-health" - }, - { - "source": "/guides/developing/viewer-engagement", - "destination": "/developers/guides/get-engagement-analytics-via-api" - }, - { - "source": "/guides/developing/transcode-video-storj", - "destination": "/developers/guides/transcode-video-storj" - }, - { - "source": "/guides/developing/transcode-video-w3s", - "destination": "/developers/guides/transcode-video-w3s" - }, - { - "source": "/tutorials/developing/optimize-latency", - "destination": "/developers/guides/optimize-latency-of-a-livestream" - }, - { - "source": "/tutorials/developing/analyze-engagement-timeplus", - "destination": "/developers/guides/get-engagement-analytics-via-timeplus" - }, - { - "source": "/tutorials/developing/visualize-engagement-metrics-grafana", - "destination": "/developers/guides/get-engagement-analytics-via-grafana" - }, - { - "source": "/tutorials/developing/token-gate-videos-using-guildxyz", - "destination": "/developers/tutorials/token-gate-videos-with-lit" - }, - { - "source": "/tutorials/developing/token-gate-videos-using-lit", - "destination": "/developers/tutorials/token-gate-videos-with-lit" - }, - { - "source": "/tutorials/developing/build-decentralized-video-app-with-fvm", - "destination": "/developers/tutorials/decentralized-app-with-fvm" - }, - { - "source": "/tutorials/developing/upload-playback-videos-on-ipfs-4everland", - "destination": "/developers/tutorials/upload-playback-videos-4everland" - }, - { - "source": "/tutorials/developing/upload-playback-videos-on-ipfs", - "destination": "/developers/tutorials/upload-playback-videos-on-ipfs" - }, - { - "source": "/tutorials/developing/upload-playback-videos-on-arweave", - "destination": "/developers/tutorials/upload-playback-videos-on-arweave" - }, - { - "source": "/reference/api", - "destination": "/api-reference/overview/introduction" - }, - { - "source": "/reference/deployed-contract-addresses", - "destination": "/references/contract-addresses" - }, - { - "source": "/reference/example-applications", - "destination": "/references/example-applications" - }, - { - "source": "/reference/api-support-matrix", - "destination": "/references/api-support-matrix" - }, - { - "source": "/reference/go-livepeer", - "destination": "/references/go-livepeer/bandwidth-requirements" - }, - { - "source": "/reference/go-livepeer/cli-reference", - "destination": "/references/go-livepeer/cli-reference" - }, - { - "source": "/reference/go-livepeer/gpu-support", - "destination": "/references/go-livepeer/gpu-support" - }, - { - "source": "/reference/go-livepeer/hardware-requirements", - "destination": "/references/go-livepeer/hardware-requirements" - }, - { - "source": "/reference/go-livepeer/bandwidth-requirements", - "destination": "/references/go-livepeer/bandwidth-requirements" - }, - { - "source": "/reference/go-livepeer/prometheus-metrics", - "destination": "/references/go-livepeer/prometheus-metrics" - }, - { - "source": "/guides/delegating/bridge-lpt-to-arbitrum", - "destination": "/delegators/guides/bridge-lpt-to-arbitrum" - }, - { - "source": "/guides/delegating/migrate-stake-to-arbitrum", - "destination": "/delegators/guides/migrate-stake-to-arbitrum" - }, - { - "source": "/delegators/reference/yield-calculation", - "destination": "/delegators/guides/yield-calculation" - }, - { - "source": "/guides/orchestrating/get-started", - "destination": "/orchestrators/guides/get-started" - }, - { - "source": "/guides/orchestrating/install-go-livepeer", - "destination": "/orchestrators/guides/install-go-livepeer" - }, - { - "source": "/guides/orchestrating/connect-to-arbitrum", - "destination": "/orchestrators/guides/connect-to-arbitrum" - }, - { - "source": "/guides/orchestrating/configure-reward-calling", - "destination": "/orchestrators/guides/configure-reward-calling" - }, - { - "source": "/guides/orchestrating/set-session-limits", - "destination": "/orchestrators/guides/set-session-limits" - }, - { - "source": "/guides/orchestrating/set-pricing", - "destination": "/orchestrators/guides/set-pricing" - }, - { - "source": "/guides/orchestrating/benchmark-transcoding", - "destination": "/orchestrators/guides/benchmark-transcoding" - }, - { - "source": "/guides/orchestrating/assess-capabilities", - "destination": "/orchestrators/guides/assess-capabilities" - }, - { - "source": "/guides/orchestrating/monitor-metrics", - "destination": "/orchestrators/guides/monitor-metrics" - }, - { - "source": "/guides/orchestrating/vote", - "destination": "/orchestrators/guides/vote" - }, - { - "source": "/guides/orchestrating/dual-mine", - "destination": "/orchestrators/guides/dual-mine" - }, - { - "source": "/guides/orchestrating/o-t-split", - "destination": "/orchestrators/guides/o-t-split" - }, - { - "source": "/guides/orchestrating/migrate-to-arbitrum", - "destination": "/orchestrators/guides/migrate-to-arbitrum" - }, - { - "source": "/guides/orchestrating/migrate-from-contract-wallet", - "destination": "/orchestrators/guides/migrate-from-contract-wallet" - }, - { - "source": "/guides/orchestrating/gateway-introspection", - "destination": "/orchestrators/guides/gateway-introspection" - }, - { - "source": "/guides/orchestrating/troubleshoot", - "destination": "/orchestrators/guides/troubleshoot" - }, - { - "source": "/reference/react", - "destination": "/react/getting-started" - }, - { - "source": "/reference/react/getting-started", - "destination": "/react/getting-started" - }, - { - "source": "/reference/react/client", - "destination": "/react/getting-started" - }, - { - "source": "/reference/react/LivepeerConfig", - "destination": "/sdks/react/migration/3.x/LivepeerConfig" - }, - { - "source": "/reference/react/Player", - "destination": "/react/player/Root" - }, - { - "source": "/reference/react/Broadcast", - "destination": "/react/broadcast/Root" - }, - { - "source": "/reference/react/providers/studio", - "destination": "/sdks/react/migration/3.x/providers/studio" - }, - { - "source": "/reference/react/asset/useAsset", - "destination": "/sdks/react/migration/3.x/asset/useAsset" - }, - { - "source": "/reference/react/asset/useCreateAsset", - "destination": "/sdks/react/migration/3.x/asset/useCreateAsset" - }, - { - "source": "/reference/react/asset/useAssetMetrics", - "destination": "/sdks/react/migration/3.x/asset/useAssetMetrics" - }, - { - "source": "/reference/react/asset/useUpdateAsset", - "destination": "/sdks/react/migration/3.x/asset/useUpdateAsset" - }, - { - "source": "/reference/react/stream/useStream", - "destination": "/sdks/react/migration/3.x/stream/useStream" - }, - { - "source": "/reference/react/stream/useStreamSession", - "destination": "/sdks/react/migration/3.x/stream/useStreamSession" - }, - { - "source": "/reference/react/stream/useStreamSessions", - "destination": "/sdks/react/migration/3.x/stream/useStreamSessions" - }, - { - "source": "/reference/react/stream/useCreateStream", - "destination": "/sdks/react/migration/3.x/stream/useCreateStream" - }, - { - "source": "/reference/react/stream/useUpdateStream", - "destination": "/sdks/react/migration/3.x/stream/useUpdateStream" - }, - { - "source": "/reference/react/playback/usePlaybackInfo", - "destination": "/sdks/react/migration/3.x/playback/usePlaybackInfo" - }, - { - "source": "/reference/react/constants/abis", - "destination": "/sdks/react/migration/3.x/constants/abis" - }, - { - "source": "/reference/react/constants/contract-addresses", - "destination": "/sdks/react/migration/3.x/constants/contract-addresses" - } - ], - "favicon": "/favicon.png", - "colors": { - "primary": "#18794E", - "light": "#2b9a66", - "dark": "#18794E" - }, - "theme": "prism", - "feedback": { - "suggestEdit": true, - "raiseIssue": true, - "thumbsRating": true - }, - "openapi": "./openapi.yaml", - "api": { - "baseUrl": "https://livepeer.studio/api" - }, - "modeToggle": { - "default": "dark" - }, - "topbarLinks": [ - { - "name": "Discord", - "url": "https://discord.gg/livepeer" - } - ], - "versions": ["Developers", "Delegators", "Orchestrators", "Gateways"], - "topbarCtaButton": { - "name": "Dashboard", - "url": "https://livepeer.studio" - }, - "topAnchor": { - "name": "Documentation", - "icon": "code" - }, - "anchors": [ - { - "name": "API Reference", - "icon": "rectangle-terminal", - "url": "api-reference" - }, - { - "name": "SDKs", - "icon": "brackets-curly", - "url": "sdks" - }, - { - "name": "AI Video (Beta)", - "icon": "microchip-ai", - "iconType": "regular", - "url": "ai" - }, - { - "name": "What's New", - "icon": "rocket", - "url": "https://livepeer.canny.io/changelog" - }, - { - "name": "Community", - "icon": "discord", - "url": "https://discord.gg/livepeer", - "iconType": "brands" - } - ], - "navigation": [ - { - "group": "Getting Started", - "pages": [ - "developers/introduction", - "developers/quick-start", - "developers/livepeer-studio-cli" - ] - }, - { - "group": "Guides", - "pages": [ - "developers/guides/overview", - { - "group": "Assets", - "icon": "video", - "pages": [ - "developers/guides/upload-video-asset", - "developers/guides/playback-an-asset", - "developers/guides/listen-to-asset-events", - "developers/guides/encrypted-asset", - "developers/guides/thumbnails-vod" - ] - }, - { - "group": "Livestream", - "icon": "camera", - "pages": [ - "developers/guides/create-livestream", - "developers/guides/playback-a-livestream", - "developers/guides/stream-via-obs", - "developers/guides/livestream-from-browser", - "developers/guides/optimize-latency-of-a-livestream", - "developers/guides/monitor-stream-health", - "developers/guides/listen-to-stream-events", - "developers/guides/multistream", - "developers/guides/clip-a-livestream", - "developers/guides/thumbnails-live" - ] - }, - { - "group": "Access control", - "icon": "lock", - "pages": [ - "developers/guides/access-control-webhooks", - "developers/guides/access-control-jwt" - ] - }, - { - "group": "Webhooks", - "icon": "bell", - "pages": ["developers/guides/setup-and-listen-to-webhooks"] - }, - { - "group": "Transcode API", - "icon": "photo-film", - "pages": [ - "developers/guides/transcode-video-storj", - "developers/guides/transcode-video-w3s" - ] - }, - { - "group": "Viewership Metrics", - "icon": "chart-bar", - "pages": [ - "developers/guides/get-engagement-analytics-via-api", - "developers/guides/get-engagement-analytics-via-grafana", - "developers/guides/get-engagement-analytics-via-timeplus" - ] - }, - { - "group": "Projects", - "icon": "folder-open", - "pages": ["developers/guides/managing-projects"] - }, - { - "group": "Integrations", - "icon": "puzzle-piece", - "pages": [ - "developers/tutorials/decentralized-app-with-fvm", - "developers/tutorials/token-gate-videos-with-lit", - { - "group": "Storage Provider Integration", - "pages": [ - "developers/tutorials/upload-playback-videos-4everland", - "developers/tutorials/upload-playback-videos-on-arweave", - "developers/tutorials/upload-playback-videos-on-ipfs" - ] - } - ] - } - ], - "version": "Developers" - }, - { - "group": "Guides", - "pages": [ - "delegators/guides/bridge-lpt-to-arbitrum", - "delegators/guides/migrate-stake-to-arbitrum", - "delegators/guides/yield-calculation" - ], - "version": "Delegators" - }, - { - "group": "Guides", - "pages": [ - "orchestrators/guides/get-started", - "orchestrators/guides/install-go-livepeer", - "orchestrators/guides/connect-to-arbitrum", - "orchestrators/guides/configure-reward-calling", - "orchestrators/guides/set-session-limits", - "orchestrators/guides/set-pricing", - "orchestrators/guides/benchmark-transcoding", - "orchestrators/guides/assess-capabilities", - "orchestrators/guides/monitor-metrics", - "orchestrators/guides/vote", - "orchestrators/guides/dual-mine", - "orchestrators/guides/o-t-split", - "orchestrators/guides/migrate-to-arbitrum", - "orchestrators/guides/migrate-from-contract-wallet", - "orchestrators/guides/gateway-introspection", - "orchestrators/guides/troubleshoot" - ], - "version": "Orchestrators" - }, - { - "group": "Guides", - "pages": [ - "gateways/guides/gateway-overview", - "gateways/guides/docker-install", - "gateways/guides/linux-install", - "gateways/guides/windows-install", - "gateways/guides/transcoding-options", - "gateways/guides/fund-gateway", - "gateways/guides/publish-content", - "gateways/guides/playback-content" - ], - "version": "Gateways" - }, - { - "group": "AI Video", - "pages": [ - "ai/introduction", - "ai/whats-new", - { - "group": "AI Pipelines", - "icon": "wand-magic-sparkles", - "iconType": "solid", - "pages": [ - "ai/pipelines/overview", - "ai/pipelines/audio-to-text", - "ai/pipelines/image-to-image", - "ai/pipelines/image-to-text", - "ai/pipelines/image-to-video", - "ai/pipelines/llm", - "ai/pipelines/segment-anything-2", - "ai/pipelines/text-to-image", - "ai/pipelines/text-to-speech", - "ai/pipelines/upscale" - ] - }, - { - "group": "Setup an AI Orchestrator", - "icon": "robot", - "iconType": "solid", - "pages": [ - "ai/orchestrators/get-started", - "ai/orchestrators/models-config", - "ai/orchestrators/models-download", - "ai/orchestrators/start-orchestrator", - "ai/orchestrators/ai-worker", - "ai/orchestrators/benchmarking", - "ai/orchestrators/onchain" - ] - }, - { - "group": "Setup an AI Gateway", - "icon": "signal-stream", - "iconType": "solid", - "pages": [ - "ai/gateways/get-started", - "ai/gateways/start-gateway", - "ai/gateways/onchain" - ] - }, - { - "group": "AI Builders", - "icon": "screwdriver-wrench", - "iconType": "solid", - "pages": [ - "ai/builders/get-started", - "ai/builders/gateways", - "ai/builders/showcase" - ] - }, - { - "group": "How to Contribute", - "icon": "heart", - "iconType": "solid", - "pages": ["ai/contributors/coming-soon"] - }, - { - "group": "SDKs", - "icon": "brackets-curly", - "pages": [ - "ai/sdks/overview", - "ai/sdks/go", - "ai/sdks/javascript", - "ai/sdks/python" - ] - }, - { - "group": "AI API Reference", - "icon": "rectangle-terminal", - "pages": [ - "ai/api-reference/overview", - "ai/api-reference/audio-to-text", - "ai/api-reference/image-to-image", - "ai/api-reference/image-to-text", - "ai/api-reference/image-to-video", - "ai/api-reference/llm", - "ai/api-reference/segment-anything-2", - "ai/api-reference/text-to-image", - "ai/api-reference/text-to-speech", - "ai/api-reference/upscale" - ] - } - ] - }, - { - "group": "References", - "pages": [ - "references/api-support-matrix", - { - "group": "Livepeer Node Software", - "icon": "golang", - "pages": [ - "references/go-livepeer/bandwidth-requirements", - "references/go-livepeer/cli-reference", - "references/go-livepeer/gpu-support", - "references/go-livepeer/hardware-requirements", - "references/go-livepeer/prometheus-metrics" - ] - }, - "references/contract-addresses", - "references/example-applications", - "references/awesome-livepeer", - { - "group": "FAQs", - "icon": "book", - "pages": [ - "references/knowledge-base/livestream", - "references/knowledge-base/playback", - "references/knowledge-base/vod" - ] - } - ] - }, - { - "group": "Overview", - "pages": ["sdks/introduction"] - }, - { - "group": "Server-side SDKs", - "pages": ["sdks/javascript", "sdks/go", "sdks/python"] - }, - { - "group": "React Components", - "icon": "circle-play", - "pages": [ - "sdks/react/getting-started", - { - "group": "Player", - "icon": "circle-play", - "pages": [ - "sdks/react/player/Root", - "sdks/react/player/Container", - "sdks/react/player/Video", - "sdks/react/player/Error", - "sdks/react/player/Loading", - "sdks/react/player/Portal", - "sdks/react/player/Poster", - { - "group": "Controls", - "pages": [ - "sdks/react/player/Controls", - "sdks/react/player/Clip", - "sdks/react/player/Fullscreen", - "sdks/react/player/Live", - "sdks/react/player/PictureInPicture", - "sdks/react/player/Play", - "sdks/react/player/RateSelect", - "sdks/react/player/Seek", - "sdks/react/player/Time", - "sdks/react/player/VideoQualitySelect", - "sdks/react/player/Volume" - ] - }, - { - "group": "Functions", - "pages": [ - "sdks/react/player/get-src", - "sdks/react/player/useMediaContext" - ] - } - ] - }, - { - "group": "Broadcast", - "icon": "signal-stream", - "pages": [ - "sdks/react/broadcast/Root", - "sdks/react/broadcast/Container", - "sdks/react/broadcast/Video", - "sdks/react/broadcast/Enabled", - "sdks/react/broadcast/Error", - "sdks/react/broadcast/Loading", - "sdks/react/broadcast/Portal", - { - "group": "Controls", - "pages": [ - "sdks/react/broadcast/Controls", - "sdks/react/broadcast/Audio", - "sdks/react/broadcast/Camera", - "sdks/react/broadcast/Fullscreen", - "sdks/react/broadcast/PictureInPicture", - "sdks/react/broadcast/Screenshare", - "sdks/react/broadcast/Source", - "sdks/react/broadcast/Status" - ] - }, - { - "group": "Functions", - "pages": [ - "sdks/react/broadcast/get-ingest", - "sdks/react/broadcast/useBroadcastContext" - ] - } - ] - }, - { - "group": "Examples", - "icon": "clipboard", - "pages": ["sdks/react/Player", "sdks/react/Broadcast"] - }, - { - "group": "Migration", - "icon": "right-left", - "pages": [ - "sdks/react/migration/migration-4.x", - { - "group": "Livepeer React (3.x and below)", - "pages": [ - "sdks/react/migration/3.x/getting-started", - "sdks/react/migration/3.x/client", - "sdks/react/migration/3.x/LivepeerConfig", - "sdks/react/migration/3.x/Player", - "sdks/react/migration/3.x/Broadcast", - { - "group": "Asset", - "pages": [ - "sdks/react/migration/3.x/asset/useCreateAsset", - "sdks/react/migration/3.x/asset/useAsset", - "sdks/react/migration/3.x/asset/useUpdateAsset", - "sdks/react/migration/3.x/asset/useAssetMetrics" - ] - }, - { - "group": "Stream", - "pages": [ - "sdks/react/migration/3.x/stream/useCreateStream", - "sdks/react/migration/3.x/stream/useStream", - "sdks/react/migration/3.x/stream/useUpdateStream", - "sdks/react/migration/3.x/stream/useStreamSession", - "sdks/react/migration/3.x/stream/useStreamSessions" - ] - }, - { - "group": "Playback", - "pages": ["sdks/react/migration/3.x/playback/usePlaybackInfo"] - }, - { - "group": "Constants", - "pages": [ - "sdks/react/migration/3.x/constants/abis", - "sdks/react/migration/3.x/constants/contract-addresses" - ] - } - ] - } - ] - } - ] - }, - { - "group": "Overview", - "pages": [ - "api-reference/overview/introduction", - "api-reference/overview/authentication" - ] - }, - { - "group": "APIs", - "pages": [ - { - "group": "Asset", - "icon": "video", - "pages": [ - "api-reference/asset/overview", - "api-reference/asset/upload", - "api-reference/asset/upload-via-url", - "api-reference/asset/get", - "api-reference/asset/update", - "api-reference/asset/delete", - "api-reference/asset/get-all" - ] - }, - { - "group": "Livestream", - "icon": "camera", - "pages": [ - "api-reference/stream/overview", - "api-reference/stream/create", - "api-reference/stream/get", - "api-reference/stream/update", - "api-reference/stream/terminate", - "api-reference/stream/add-multistream-target", - "api-reference/stream/delete-multistream-target", - "api-reference/stream/delete", - "api-reference/stream/get-all", - "api-reference/stream/create-clip", - "api-reference/stream/get-clip" - ] - }, - { - "group": "Generate", - "icon": "microchip-ai", - "pages": [ - "api-reference/generate/overview", - "api-reference/generate/audio-to-text", - "api-reference/generate/text-to-image", - "api-reference/generate/image-to-image", - "api-reference/generate/image-to-video", - "api-reference/generate/llm", - "api-reference/generate/segment-anything-2", - "api-reference/generate/upscale" - ] - }, - { - "group": "Multistream target", - "icon": "arrows-split-up-and-left", - "pages": [ - "api-reference/multistream/overview", - "api-reference/multistream/create", - "api-reference/multistream/get", - "api-reference/multistream/update", - "api-reference/multistream/delete", - "api-reference/multistream/get-all" - ] - }, - { - "group": "Session", - "icon": "film", - "pages": [ - "api-reference/session/overview", - "api-reference/session/get", - "api-reference/session/get-all", - "api-reference/session/get-recording", - "api-reference/session/get-clip" - ] - }, - { - "group": "Access control", - "icon": "lock", - "pages": [ - "api-reference/signing-key/overview", - "api-reference/signing-key/create", - "api-reference/signing-key/get", - "api-reference/signing-key/update", - "api-reference/signing-key/delete", - "api-reference/signing-key/get-all" - ] - }, - { - "group": "Webhook", - "icon": "bell", - "pages": [ - "api-reference/webhook/overview", - "api-reference/webhook/create", - "api-reference/webhook/get", - "api-reference/webhook/update", - "api-reference/webhook/delete", - "api-reference/webhook/get-all" - ] - }, - { - "group": "Task", - "icon": "gear", - "pages": [ - "api-reference/task/overview", - "api-reference/task/get-all", - "api-reference/task/get" - ] - }, - { - "group": "Playback", - "icon": "play", - "pages": [ - "api-reference/playback/overview", - "api-reference/playback/get" - ] - }, - { - "group": "Transcode", - "icon": "photo-film", - "pages": [ - "api-reference/transcode/overview", - "api-reference/transcode/create" - ] - }, - { - "group": "Viewership", - "icon": "chart-bar", - "pages": [ - "api-reference/viewership/get-realtime-viewership", - "api-reference/viewership/get-viewership-metrics", - "api-reference/viewership/get-usage-metrics", - "api-reference/viewership/get-public-total-views", - "api-reference/viewership/get-creators-metrics" - ] - } - ] - } - ], - "footerSocials": [ - { - "type": "website", - "url": "https://livepeer.org" - }, - { - "type": "github", - "url": "https://github.com/livepeer" - }, - { - "type": "twitter", - "url": "https://twitter.com/livepeer" - }, - { - "type": "discord", - "url": "https://discord.gg/livepeer" - }, - { - "type": "website", - "url": "https://forum.livepeer.org" - } - ], - "search": { - "prompt": "Need help? Ask our AI" - }, - "analytics": { - "ga4": { - "measurementId": "G-P1Z15F6NX4" - } - } -} diff --git a/snippets/assets/domain/01_ABOUT/Building the Decentralized Generative AI Tech Stack.png b/snippets/assets/domain/01_ABOUT/Building the Decentralized Generative AI Tech Stack.png deleted file mode 100644 index 2d77a6e3..00000000 Binary files a/snippets/assets/domain/01_ABOUT/Building the Decentralized Generative AI Tech Stack.png and /dev/null differ diff --git a/snippets/assets/domain/01_ABOUT/Eric Shreck Gif.gif b/snippets/assets/domain/01_ABOUT/Eric Shreck Gif.gif deleted file mode 100644 index d046da7d..00000000 Binary files a/snippets/assets/domain/01_ABOUT/Eric Shreck Gif.gif and /dev/null differ diff --git "a/snippets/assets/domain/01_ABOUT/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" "b/snippets/assets/domain/01_ABOUT/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" deleted file mode 100644 index 282155e3..00000000 Binary files "a/snippets/assets/domain/01_ABOUT/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" and /dev/null differ diff --git a/snippets/assets/domain/01_ABOUT/api-reference.jpg b/snippets/assets/domain/01_ABOUT/api-reference.jpg deleted file mode 100644 index 13440be7..00000000 Binary files a/snippets/assets/domain/01_ABOUT/api-reference.jpg and /dev/null differ diff --git a/snippets/assets/domain/01_ABOUT/hosted.jpg b/snippets/assets/domain/01_ABOUT/hosted.jpg deleted file mode 100644 index d5f275eb..00000000 Binary files a/snippets/assets/domain/01_ABOUT/hosted.jpg and /dev/null differ diff --git a/snippets/assets/domain/01_ABOUT/image (1).png b/snippets/assets/domain/01_ABOUT/image (1).png deleted file mode 100644 index 0ced5059..00000000 Binary files a/snippets/assets/domain/01_ABOUT/image (1).png and /dev/null differ diff --git a/snippets/assets/domain/01_ABOUT/no-code.jpg b/snippets/assets/domain/01_ABOUT/no-code.jpg deleted file mode 100644 index fdf54211..00000000 Binary files a/snippets/assets/domain/01_ABOUT/no-code.jpg and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/Building the Decentralized Generative AI Tech Stack.png b/snippets/assets/domain/02_COMMUNITY/Building the Decentralized Generative AI Tech Stack.png deleted file mode 100644 index 2d77a6e3..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/Building the Decentralized Generative AI Tech Stack.png and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/Eric Shreck Gif.gif b/snippets/assets/domain/02_COMMUNITY/Eric Shreck Gif.gif deleted file mode 100644 index d046da7d..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/Eric Shreck Gif.gif and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_90_Youtube.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_90_Youtube.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_90_Youtube.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_90_Youtube.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Blogging.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Blogging.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Blogging.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Blogging.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Calendar.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Calendar.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Calendar.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Calendar.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Discord.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Discord.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Discord.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Discord.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Events.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Events.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Events.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Events.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Follow.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Follow.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Follow.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Follow.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Forum.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Forum.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Forum.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Forum.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Github.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Github.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Github.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Github.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_LinkedIn.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_LinkedIn.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_LinkedIn.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_LinkedIn.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Medium.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Medium.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Medium.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Medium.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Meeting.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Meeting.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Meeting.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Meeting.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Newsletter.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Newsletter.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Newsletter.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Newsletter.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Reddit.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Reddit.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Reddit.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Reddit.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Telegram.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegram.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Telegram.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegram.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Telegran.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegran.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Telegran.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegran.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_X (1).png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X (1).png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_X (1).png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X (1).png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_X.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_X.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Yotube.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Yotube.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Yotube.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Yotube.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Youtube.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Youtube.png similarity index 100% rename from snippets/assets/domain/02_COMMUNITY/Hero_Youtube.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Youtube.png diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Discord (1).png b/snippets/assets/domain/02_COMMUNITY/Hero_Discord (1).png deleted file mode 100644 index 97a50e7d..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/Hero_Discord (1).png and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Follow (1).png b/snippets/assets/domain/02_COMMUNITY/Hero_Follow (1).png deleted file mode 100644 index aad9b419..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/Hero_Follow (1).png and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_LinkedIn (1).png b/snippets/assets/domain/02_COMMUNITY/Hero_LinkedIn (1).png deleted file mode 100644 index 98a4ff31..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/Hero_LinkedIn (1).png and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/Hero_Youtube (1).png b/snippets/assets/domain/02_COMMUNITY/Hero_Youtube (1).png deleted file mode 100644 index 7374586e..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/Hero_Youtube (1).png and /dev/null differ diff --git "a/snippets/assets/domain/02_COMMUNITY/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" "b/snippets/assets/domain/02_COMMUNITY/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" deleted file mode 100644 index 282155e3..00000000 Binary files "a/snippets/assets/domain/02_COMMUNITY/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/api-reference.jpg b/snippets/assets/domain/02_COMMUNITY/api-reference.jpg deleted file mode 100644 index 13440be7..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/api-reference.jpg and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/hosted.jpg b/snippets/assets/domain/02_COMMUNITY/hosted.jpg deleted file mode 100644 index d5f275eb..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/hosted.jpg and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/image (1).png b/snippets/assets/domain/02_COMMUNITY/image (1).png deleted file mode 100644 index 0ced5059..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/image (1).png and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/image.png b/snippets/assets/domain/02_COMMUNITY/image.png deleted file mode 100644 index 196b4389..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/image.png and /dev/null differ diff --git a/snippets/assets/domain/02_COMMUNITY/no-code.jpg b/snippets/assets/domain/02_COMMUNITY/no-code.jpg deleted file mode 100644 index fdf54211..00000000 Binary files a/snippets/assets/domain/02_COMMUNITY/no-code.jpg and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/Building the Decentralized Generative AI Tech Stack.png b/snippets/assets/domain/03_DEVELOPERS/Building the Decentralized Generative AI Tech Stack.png deleted file mode 100644 index 2d77a6e3..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/Building the Decentralized Generative AI Tech Stack.png and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/Eric Shreck Gif.gif b/snippets/assets/domain/03_DEVELOPERS/Eric Shreck Gif.gif deleted file mode 100644 index d046da7d..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/Eric Shreck Gif.gif and /dev/null differ diff --git "a/snippets/assets/domain/03_DEVELOPERS/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" "b/snippets/assets/domain/03_DEVELOPERS/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" deleted file mode 100644 index 282155e3..00000000 Binary files "a/snippets/assets/domain/03_DEVELOPERS/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/api-reference.jpg b/snippets/assets/domain/03_DEVELOPERS/api-reference.jpg deleted file mode 100644 index 13440be7..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/api-reference.jpg and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/comfystream.png b/snippets/assets/domain/03_DEVELOPERS/comfystream.png deleted file mode 100644 index d144cc94..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/comfystream.png and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/hosted.jpg b/snippets/assets/domain/03_DEVELOPERS/hosted.jpg deleted file mode 100644 index d5f275eb..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/hosted.jpg and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/image (1).png b/snippets/assets/domain/03_DEVELOPERS/image (1).png deleted file mode 100644 index 0ced5059..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/image (1).png and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/image.png b/snippets/assets/domain/03_DEVELOPERS/image.png deleted file mode 100644 index 196b4389..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/image.png and /dev/null differ diff --git a/snippets/assets/domain/03_DEVELOPERS/no-code.jpg b/snippets/assets/domain/03_DEVELOPERS/no-code.jpg deleted file mode 100644 index fdf54211..00000000 Binary files a/snippets/assets/domain/03_DEVELOPERS/no-code.jpg and /dev/null differ diff --git a/snippets/assets/domain/05_ORCHESTRATORS/Building the Decentralized Generative AI Tech Stack.png b/snippets/assets/domain/05_ORCHESTRATORS/Building the Decentralized Generative AI Tech Stack.png deleted file mode 100644 index 2d77a6e3..00000000 Binary files a/snippets/assets/domain/05_ORCHESTRATORS/Building the Decentralized Generative AI Tech Stack.png and /dev/null differ diff --git a/snippets/assets/domain/05_ORCHESTRATORS/Eric Shreck Gif.gif b/snippets/assets/domain/05_ORCHESTRATORS/Eric Shreck Gif.gif deleted file mode 100644 index d046da7d..00000000 Binary files a/snippets/assets/domain/05_ORCHESTRATORS/Eric Shreck Gif.gif and /dev/null differ diff --git "a/snippets/assets/domain/05_ORCHESTRATORS/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" "b/snippets/assets/domain/05_ORCHESTRATORS/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" deleted file mode 100644 index 282155e3..00000000 Binary files "a/snippets/assets/domain/05_ORCHESTRATORS/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" and /dev/null differ diff --git a/snippets/assets/domain/05_ORCHESTRATORS/api-reference.jpg b/snippets/assets/domain/05_ORCHESTRATORS/api-reference.jpg deleted file mode 100644 index 13440be7..00000000 Binary files a/snippets/assets/domain/05_ORCHESTRATORS/api-reference.jpg and /dev/null differ diff --git a/snippets/assets/domain/05_ORCHESTRATORS/hosted.jpg b/snippets/assets/domain/05_ORCHESTRATORS/hosted.jpg deleted file mode 100644 index d5f275eb..00000000 Binary files a/snippets/assets/domain/05_ORCHESTRATORS/hosted.jpg and /dev/null differ diff --git a/snippets/assets/domain/05_ORCHESTRATORS/image (1).png b/snippets/assets/domain/05_ORCHESTRATORS/image (1).png deleted file mode 100644 index 0ced5059..00000000 Binary files a/snippets/assets/domain/05_ORCHESTRATORS/image (1).png and /dev/null differ diff --git a/snippets/assets/domain/05_ORCHESTRATORS/image.png b/snippets/assets/domain/05_ORCHESTRATORS/image.png deleted file mode 100644 index 196b4389..00000000 Binary files a/snippets/assets/domain/05_ORCHESTRATORS/image.png and /dev/null differ diff --git a/snippets/assets/domain/05_ORCHESTRATORS/no-code.jpg b/snippets/assets/domain/05_ORCHESTRATORS/no-code.jpg deleted file mode 100644 index fdf54211..00000000 Binary files a/snippets/assets/domain/05_ORCHESTRATORS/no-code.jpg and /dev/null differ diff --git a/snippets/automationData/blog/ghostBlogData.jsx b/snippets/automationData/blog/ghostBlogData.jsx deleted file mode 100644 index 3cf1fd8f..00000000 --- a/snippets/automationData/blog/ghostBlogData.jsx +++ /dev/null @@ -1,191 +0,0 @@ -export const ghostData = [ - { - title: `A Real-time Update to the Livepeer Network Vision`, - href: `https://blog.livepeer.org/a-real-time-update-to-the-livepeer-network-vision/`, - author: `By Livepeer Team`, - content: `

      For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirational examples emerging from Daydream powered real-time AI, and real-time Agent avatar generation through Embody and the Agent SPE.

      Source: Livepeer Q3 2025 Report by Messari

      This shift has been an ecosystem wide effort – ranging from branding and communications, to productization and go to market, to hardware upgrades for orchestrators. It has successfully shifted the project under an updated mission and direction, however it has still left ambiguity in terms of what the Livepeer network itself offers as killer value propositions to new builders outside of the existing ecosystem. Is it a GPU cloud? A transcoding infra? An API engine? Now that there are signs of validation and accelerated momentum around an exciting opportunity, it’s time to really hone in on a refined vision for the future of the Livepeer network as a product itself. 

      The market for video is set to massively expand

      The concept of live video itself is expanding well beyond a simple single stream of video captured from a camera. Now entire worlds and scenes are generated or enhanced in real-time via AI assistance, leading to more immersive and interactive experiences than possible via old-school streaming alone. For a taste of the future, see the following examples:

      1. The future of gaming will be AI generated video and worlds in real-time:
      -
      1. Video streams can be analyzed and data leveraged programmatically in real-time, for instant insight generation and decision making:
      -
      1. Real-time style transfer can enable avatars and agents to participate in the global economy:
      -

      Video world models and real-time AI video are merging, as they both use AI to generate frame-by-frame video output with low latency on the fly, based on user input and AI inference. This requires a tremendous amount of GPU compute, and requires an amazing low latency video streaming and compute stack – two areas in which the Livepeer network and community thrive, and two areas to which the many other generic GPU inference providers in the market bring no unique skillset, experience, or software advantage. 

      The big opportunity for the Livepeer network is to be the leading AI Infrastructure For Real-Time Video.
      From interactive live streaming to generative world models, Livepeer’s open-access, low-latency network of GPUs will be the best compute solution for cutting edge AI video workflows. 

      World models are a game changing category, and Livepeer is well suited to offer a unique and differentiated product here, that serves a huge market of diverse and varying use cases. These range from creative entertainment, to gaming, to robotics, to data analysis, to monitoring and security, to synthetic data generation for AGI itself.

      While an ambitious stretch, Nvidia executives responsible for the category have even projected that due to the impact in robotics, the economic opportunity for world models could exceed $100 trillion, or approximately the size of the entire global economic output itself!  

      What does it mean to productize the Livepeer network to succeed as a valuable infrastructure in this category?

      From a simplified viewpoint, it needs to deliver on the following:

      1. Ability for users to deploy real-time AI workflows to the Livepeer network and request inference on them

      2. Industry leading latency for providing inference on real-time AI and world model workflows.

      3. Cost effective scalability – users can pay as they go to scale up and down capacity and the network automagically delivers the scale required.

      Imagine a gaming platform is powering world-model generated games using their unique workflows that generate game levels or areas in a certain style by combining several real-time models, LLMs, and style transfer mechanisms. Each game its powering has users exploring and creating their own corners of the interactive worlds, based on prompts and gameplay inputs. Every gamer that joins a game represents a new stream of AI video compute, and the Livepeer network is the backing infrastructure that provides the compute for this video world generation, leveraging hundreds or thousands of GPUs concurrently.

      For this to be possible the Livepeer network needs to enable that game platform to deploy their game generation workflow. It needs to offer low latency on the inference that runs this workflow, relative to the generic GPU compute clouds. The pricing needs to be competitive vs alternative options in the market for this GPU compute. And the network needs to allow this company to scale up and down the number of GPUs that are currently live ready to accept new real-time inference streams based on the number of users currently live on the games it is powering.

      All of this is possible on the Livepeer network, and it isn’t far away from where we are now. If we work to build, test, and iterate on the Livepeer network itself towards supporting the latency and scale required for these types of workflows, we’ll be set up to power them.
      Now multiply this example gaming company by the high number of diverse industries and verticals that real-time AI and world models will touch. Each category can have one or multiple companies competing to leverage this scalable and cost effective infrastructure for unique go to markets targeting different segments. And they can all be powered by the Livepeer network’s unique value propositions.

      Livepeer’s core network is strategically positioned

      What are these value propositions that make the Livepeer network differentiated relative to alternative options in the market? I’d argue that there are three primary, table stakes, must-have value propositions if Livepeer is to succeed. 

      1. Industry standard low latency infrastructure specializing in real-time AI and world model workflows: First of all, the network needs to let its users deploy custom workflows. Inference alone on base models is not enough and does not represent scaled demand. Users want to take base models, chain them together with other models and pre/post processors, and create unique and specialized capabilities. When one of these capabilities is defined as a workflow, that is the unit that needs to be deployed as a job on the Livepeer network, and the network needs to be able to run inference on it. Secondly, for these real-time interactive use cases, latency matters a lot. Generic GPU clouds don’t offer the specialized low latency video stacks to ingest, process, and serve video with optimal latency, but Livepeer does. And Livepeer needs to benchmark itself to have lower or equal latency to alternative GPU clouds for these particular real-time and world model use cases.

      2. Cost effective scalability: GPU provisioning, reservations, and competing for scarce supply procurement creates major challenges for AI companies – often overpaying for GPUs that sit idle most of the time in order to guarantee the capacity that they need. The Livepeer network’s value proposition is that users should be able to “automagically” scale up almost instantly and pay on demand for the compute that they use, rather than having to pre-pay for reservations and let capacity sit idle. This is enabled by Livepeer taking advantage of otherwise existing idle longtail compute through its open marketplace, and its supply side incentives. The Livepeer network needs to be more cost effective than alternative GPU clouds within this category - with impacts comparable to the 10x+ cost reduction already demonstrated in live video transcoding delivered by the network.

      3. Community driven, open source, open access: The Livepeer project and software stack is open source. Users can control, update, and contribute to the software they are using. They also can be owners in the infrastructure itself through the Livepeer Token, and can benefit from the network’s improvements and adoption, creating a network effect. The community that cares about its success and pushes it forward collectively, can be a superpower, relative to the uncertain and shaky relationship between builders and centralized platform providers, who have a history of getting rugged based on limitations to access, changes in functionality, or discontinuity of the platforms. Anyone can build on the Livepeer network regardless of location, jurisdiction, use case, or central party control.

      The above are primary value propositions that should appeal to nearly all users. And we must work to close the gaps to live up to those value props before we could successfully hope to go to market and attract new vertical-specific companies to build directly on top of the network. Luckily, in addition to all of Livepeer’s streaming users, we have a great realtime AI design partner in Daydream, which is already going to market around creative real-time AI, using the network, and contributing to its development to live up to these requirements. While building with this design partner, the ecosystem should be working to productize to live up to these promises in a more generic perspective – it should be setting up benchmarks, testing frameworks, and building mechanisms for scaling up supply ahead of demand, so that it can represent this power to the world alongside successful Daydream case studies.

      Opportunities to push towards this vision

      To truly live up to these value propositions, there are a number of opportunities for the community to focus on in order to close some key gaps. There are many details to come in more technical posts laying out roadmaps and execution frameworks, but at a high level, consider a series of milestones that take the network as a product from technically functional, to production usable, to extensible, to infinitely scalable:

      1. Network MVP - Measure what matters: Establish key network performance SLAs, measure latency and performance benchmarks, and enhance the low latency client to support realtime AI workflows above industry grade standards.
      2. Network as a Product - Self adaptability and scalability: Network delivers against these SLAs and core value props for supported realtime AI workflows. Selection algorithms, failovers and redundancy, and competitive market price discovery established for realtime AI.
      3. Extensibility - Toolkit for community to deploy workflows and provision resources: Workflow deployment and signaling, LPT incentive updates to ensure compute supply for popular AI workflows exceeds demand.
      4. Parallel Scalability: Manage clusters of resources on the network for parallel workflow execution, truly unlocking job types beyond single-GPU inference. 

      Many teams within the ecosystem, from the Foundation, to Livepeer Inc, to various SPEs have already started operationalizing around how they’ll be contributing to milestones 1 and 2 to upgrade the network to deliver against these key realtime AI value propositions. 

      Conclusion and Livepeer’s opportunity

       The market for the opportunity to be the GPU infrastructure that powers real-time AI and world models is absolutely massive – the compute requirements are tremendous - 1000x that of AI text or images - and real-time interaction with media represents a new platform that will affect all of the above-mentioned industries. The Livepeer network can be the infrastructure that powers it. How we plan to close the needed gaps and achieve this will be the subject of an upcoming post. But when we do prove these value propositions, Livepeer will have a clear path to 100x the demand on the network

      The likely target market users for the network are those startups that are building out vertical specific businesses on top of real-time AI and world model workflows. The ecosystem should look to enable one (or multiple!) startups in each category going after building real-time AI platforms that serve gaming, that serve robotics, that serve synthetic data generation, that serve monitoring and analysis, and all the additional relevant categories. The network’s value propositions will hopefully speak for themselves, but in the early stages of this journey, it is likely the ecosystem will want to use incentives (like investment or credits) to bootstrap these businesses into existence. Each will represent a chance at success, and will bring more demand and proof.

      Ultimately, many users of these platforms may choose to build direct on the network themselves. Similarly to how startups start to build on platforms like Heroku, Netlify, or Vercel, and then as they scale and need more control and cost savings they build direct on AWS, and then ultimately move to their own datacenters after reaching even more scale – users of Daydream or a real-time Agent platform built on Livepeer, may ultimately choose to run their own gateways to recognize the cost savings and control and full feature set that comes from doing so. This is a good thing! As it represents even more usage and scale for the network, more proof that as an infrastructure the Livepeer network has product market fit, and that it can absorb all workflows directly. The businesses built on top will provide their own vertical specific bundles of features and services that onboard that vertical specific capacity, but they’ll be complemented by and enabled by the Livepeer Network’s superpowers.

      While there’s a lot of work ahead, the Livepeer community has already stepped up to cover tremendous ground on this mission. At the moment by already powering millions of minutes of real-time AI inference per week, by our orchestrators already upgrading their capacity and procurement mechanisms to provide real-time AI-capable compute, and by the Foundation groups already working to evaluate the networks incentives and cryptoeconomics to sustainably fund and reward those contributing to this effort, we’re set up well to capture this enormous opportunity!

      `, - datePosted: `Nov 13, 2025`, - img: `https://blog.livepeer.org/content/images/2025/11/LP_Blog-Header_Nov25_01_moshed-1.png`, - excerpt: `For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirat`, - readingTime: 9, - }, - { - title: `Livepeer Onchain Builders - Streamplace: Building the Video Backbone of Decentralized Social`, - href: `https://blog.livepeer.org/livepeer-onchain-builders-streamplace-building-the-video-backbone-of-decentralized-social/`, - author: `By Livepeer Team`, - content: `

      Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI.

      Streamplace is an open-source video streaming platform designed to power decentralized social applications with real-time, creator-first infrastructure. It aims to make livestreaming and video hosting as seamless as TikTok or YouTube, but built on open protocols and self-sovereign identity.

      What makes it ambitious? Streamplace is not only building full-stack video infra for federated social networks, it's doing so in a way that prioritizes interoperability, scalability, and public goods. From developer SDKs to end-user apps, Streamplace is building an entire ecosystem.

      What is an SPE? 

      A Special Purpose Entity (SPE) is a focused, community-funded team contributing to the Livepeer ecosystem. SPEs are typically mission-driven groups that operate independently to build infrastructure, applications, or tooling that expand and improve the Livepeer protocol. These teams are funded through proposals to the onchain treasury and are accountable to the community.

      SPEs are necessary for the ecosystem because no single team can build every part of a decentralized protocol. SPEs decentralize development, fund public goods, and allow the community to direct resources where they're most needed.

      Why do they matter to delegators and stakeholders? Because SPEs grow in usage. More usage = more fees = more rewards. Delegators benefit when the protocol succeeds, and SPEs are among the most direct ways to make that happen.

      From Aquareum to Streamplace

      A clear goal drives the team behind Streamplace: to build the foundational video infrastructure for the next generation of decentralized social platforms. These platforms, such as Farcaster and the AT Protocol, promise user-owned identity and interoperability, but have thus far lacked robust support for live and on-demand video.

      Streamplace solves this by providing a full-stack, developer-friendly video layer that anyone can plug into. It's a bold attempt to make decentralized video feel as native and easy as its Web2 counterparts.

      Streamplace started as Aquareum, a project with the same mission and team. This evolution into Streamplace is a rebranding, not a restart, building on past momentum with a sharper focus.

      Their vision is to give every user the ability to publish, stream, and remix content with the same ease as TikTok or YouTube, but backed by self-sovereign identity and decentralized networks.

      Streamplace homepage

      The first proposal delivered:

      • A unified Aquareum node: bundling the Livepeer stack with indexing and playback.
      • App releases on iOS, Android, and Web.
      • Native integrations with AT Protocol and Farcaster.
      • Support for C2PA metadata and content provenance.

      Now, Streamplace continues that momentum with 100,000 LPT in treasury funding and a clear mandate to scale.

      Streamplace Graphana dashboard

      Why Streamplace Matters

      Video is the heart of online social interaction. Yet decentralized social networks have lagged in providing seamless, user-friendly video experiences. Streamplace addresses this by:

      • Transcoding every livestream through Livepeer, providing decentralized, low-cost processing for global delivery.
      • Powering partner platforms like Skylight Social, a TikTok alternative backed by Mark Cuban, that recently hit #1 in entertainment on the App Store.
      • Making it dead-simple to stream or host video through single-binary nodes that anyone can deploy.
      • Championing public goods, 100% of their code is open source, with a commitment to infrastructure, not monetization lock-in.

      Decentralized social, spanning protocols like Farcaster, AT Protocol, and Bluesky, represents a movement toward user-owned networks and open standards. These networks are gaining traction, but video remains a missing layer. That’s where Streamplace comes in.

      Video is essential because it's the most engaging, expressive medium for creators and communities. And as these decentralized platforms scale, having real-time, composable video becomes non-negotiable.

      Streamplace positions itself as the default video infra layer for this new social stack, and with every stream transcoded through Livepeer, it's also a major driver of protocol usage and visibility.

      What Streamplace 2.0 Will Deliver

      This new phase of work, funded by the Livepeer treasury, focuses on scale, performance, and ecosystem integration:

      Infrastructure Enhancements

      • Expand server capacity to support growing user bases like Skylight.
      • Harden video nodes for reliability under real-world load.
      • Deliver high-quality performance on all platforms: Web, iOS, Android.

      Protocol and Developer Growth

      • Deepen native integration with AT Protocol.
      • Build SDKs and NPM packages to embed Streamplace easily into other apps.
      • Ship VOD functionality and new moderation tools.

      Community-First Ethos

      • Launch creator monetization models and stream incentive programs.
      • Empower streamers with self-hosted app capabilities ("Twitch, but it's your own app").
      • Maintain full transparency and livestream development.

      The Livepeer Angle

      Livepeer's decentralized video infrastructure powers every second of video on Streamplace. That means more work for orchestrators, more fees flowing through the protocol, and more incentive for high-quality node operation.

      Streamplace strengthens the Livepeer ecosystem in three key ways:

      • Demand generation: Real-world usage at scale means more consistent transcoding work.
      • Protocol visibility: High-impact apps like Skylight drive awareness of Livepeer beyond its native circles.
      • Infrastructure robustness: Streamplace's nodes enhance the distributed capacity of the Livepeer network.

      Without Livepeer, a decentralized video stack like Streamplace wouldn’t be possible. And without ambitious apps like Streamplace, Livepeer wouldn’t have the same opportunity to prove its value at scale.

      Final Thoughts

      Streamplace is a keystone piece of open video infrastructure and a cornerstone in the emerging world of decentralized social media. By fusing creator-first tooling with Livepeer’s scalable infrastructure, it offers a glimpse into what the open internet can become.

      As decentralized protocols shift from vision to adoption, the need for native video is urgent. Streamplace, with the support of the Livepeer treasury and a relentless commitment to open-source infrastructure, is meeting that need head-on.

      If you're a developer, creator, or community builder, now is the time to get involved.

      Do you want to contribute to Streamplace's success? Explore the open roles here.

      Interested in building or contributing to the Livepeer ecosystem? Learn more about current and past SPEs, open opportunities, and how to submit your own proposal here.

      Follow along, fork the code, or join a stream — the future of social video is open.

      Streamplace App

      Streamplace Proposal

      Aquareum Proposal


      Livepeer is a decentralized video infrastructure network for live and on-demand streaming. It has integrated AI Video Compute capabilities (Livepeer AI) by harnessing its massive GPU network and is not building the future of real-time AI video.

      Twitter | Discord | Website

      `, - datePosted: `Aug 14, 2025`, - img: `https://blog.livepeer.org/content/images/2025/08/Onchain-Builders-Streamplace.jpg`, - excerpt: `Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI. - -Streamplace is an open-source `, - readingTime: 5, - }, - { - title: `Builder Story: dotsimulate x Daydream`, - href: `https://blog.livepeer.org/builder-story-dotsimulate-x-daydream/`, - author: `By Livepeer Team`, - content: `

      Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API

      Creator:
      Lyell Hintz (@dotsimulate)
      Operator: StreamDiffusionTD
      Backends Supported: Local + Daydream (Livepeer)

      -
      - -
      - -
      -
      -
      - - - 0:00 -
      - /0:34 -
      - - - - - -
      -
      -
      - -

      Overview

      StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. With the Daydream API, it adds remote inference capabilities on top of the existing local GPU inference and unlocks more flexibility for users.

      Built by Lyell Hintz, a technical artist and TouchDesigner developer, the operator is used in live shows, installations, and experimental workflows.

      Why It Was Built

      Lyell began working on the operator a few hours after StreamDiffusion was released on GitHub. He wanted to use it in TouchDesigner - a powerful tool for real time interactive content creation.

      “TouchDesigner is the only place this could be controlled from… it can hook into everything else.”

      From the start, he avoided creating a “black box.” The operator exposes core parameters like prompt, seed, and ControlNet weights, allowing users to adjust values and see results immediately.

      Key Features

      • Real-time video generation
      • Prompt and seed morphing
      • Dynamic ControlNet weighting
      • Live input support: audio, sensors, camera
      • Local GPU and Daydream backend options
      • Instant visual feedback in TouchDesigner
      -
      - -
      - -
      -
      -
      - - - 0:00 -
      - /0:26 -
      - - - - - -
      -
      -
      - -

      Daydream API Integration

      StreamDiffusionTD works with the Daydream API, which allows the operator to run on a remote GPU backend. This eliminates the major barrier of requiring a high-end PC with an NVIDIA RTX 4090 to run StreamDiffusion at professional quality, unlocking the flexibility to run it from any location, on any device form factor.

      Just drop in your API key and hit “Start Stream.” The backend handles orchestration, model hosting, and frame delivery, so builders can stay focused on their creative and technical workflows.

      Setup takes less than 1 minute and once installed, the configuration is remembered for future use.Daydream’s API brings new features to StreamDiffusion:

      • Multi-controlnet: Mixing different controlnets for better artistic control
      • IPAdapter: Use images as powerful style guides
      • TensorRT: Better frame rate for smooth video output

      Daydream is adding support for more real time video generation models, and developers can request features, suggest improvements, or build on top of the API itself. It aligns with the values of open tooling and community-led infrastructure.

      How Artists can use StreamDiffusionTD in TouchDesigner

      • Audio-reactive visuals for concerts
      • Camera-driven generative visuals
      • Real-time visuals for LED walls and stages
      • TouchDesigner automation workflows

      Because it's built inside TouchDesigner, the operator can be extended using Python, MIDI, OSC, or any other input TouchDesigner supports.

      Current State

      The operator is live and ready to use, with active development underway for new features and improved performance. It’s a great time to jump in, explore, and help shape what comes next.

      Try it Yourself

      Operator Access: patreon.com/dotsimulate
      Community and Support: discord.gg/daydreamlive
      API Keys can be requested here

      `, - datePosted: `Aug 5, 2025`, - img: `https://blog.livepeer.org/content/images/2025/08/DD_Builder-Story_dotsimulate_01.png`, - excerpt: `Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API - -Creator: Lyell Hintz (@dotsimulate) -Operator: StreamDiffusionTD -Backends Supported: Local + Daydream (Livepeer) - - - - - - - - - - - - - - - - - - - - - - - - -0:00 - -/0:34 - - -1× - - - - - - - - - - - - - - - - - -Overview - -StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. Wit`, - readingTime: 2, - }, - { - title: `Livepeer Incorporated! (and realtime AI)`, - href: `https://blog.livepeer.org/livepeer-incorporated-and-realtime-ai/`, - author: `By Livepeer Team`, - content: `

      Written by Doug Petkanics, Co-founder and CEO at Livepeer Inc

      The past 18 months have been an energizing time to be in the Livepeer Ecosystem. An onchain treasury was introduced to fund public goods via community governance, the community has coalesced around Livepeer’s opportunity to be the leading infrastructure for realtime AI video, and fees and usage of the network have been steadily increasing due to this focus. The Livepeer Foundation has recently launched to steward the 10+ entities in the ecosystem that are core contributors to the project, and is unlocking even more funding around the opportunities recommended in the project’s strategic pillars.

      With so much core development, marketing, and growth driven by the ecosystem at large, the company that I co-founded and operate, Livepeer Incorporated, has had the opportunity to shift its focus to what we deem to be the highest priority area of the project where we feel uniquely suited to make an outsized impact: executing a high conviction go to market motion in an attempt to dramatically grow demand on the Livepeer network. We, like many in the ecosystem, are fully bought in to the realtime AI video vision laid out in Livepeer Cascade, and are solely focused on productization to find product market fit for the Livepeer network as the leading infrastructure in the coming world of live video AI. Here is a bit about what Livepeer Inc is focused on, and almost equally as importantly, what we are not focused on in the coming 12 months.

      Product Market Fit for Realtime AI Video 

      As mentioned, the number one priority is to prove that the Livepeer network has product market fit as an infrastructure that runs the latest and greatest in realtime AI video workflows for developers. To do this, we’ll focus on three core things:

      1. Contribute to core network development to ensure Livepeer is an infrastructure that can run realtime AI video workflows.
      2. Build the developer APIs to run these workflows that developers use to build them into applications. This is a natural extension of Livepeer Studio
      3. Cultivate the leading realtime AI video community. Researchers, builders, and creators interested in this coming category need a home. They will provide the moat that ensures that an open, community led infrastructure will always be more responsive, cost effective, and full featured than centralized alternatives.

      We’re going to provide the full stack product, engineering, community, and go to market motion to validate product market fit for this opportunity. This will drive significant fees and growth into the Livepeer network. We’re aligned as large LPT token holders and want the network to succeed - which represents a far bigger opportunity for Livepeer Inc than any revenue related opportunity via SaaS services in the short term. Let’s grow those network fees!

      What Livepeer Inc is Not Focused On

      While there are many potential products and go to markets that can be executed upon under an ambitious vision of being the world’s open video infrastructure, a single company is more likely to succeed by focusing on only one opportunity at a time. Many alternative demand generating bets will be better served by other self-motivated actors in the ecosystem - especially as the open source software around Livepeer, and the broader ecosystem has matured to the point of providing reliable access points for different categories of use cases.Regarding Livepeer Inc’s learnings on some of these categories:

      • Transcoding alone has been proven out technically and economically, however the market hasn’t accepted the standalone infrastructure without significant productization, support, SLAs, and enterprise services around it.
      • Similarly, when bundled with end to end streaming, the offering isn’t significantly differentiated in a crowded and consolidating market. 
      • Livepeer Studio will continue to support existing users at the enterprise level that pay for these surrounding services, while passing the transcoding jobs through to the Livepeer network, but due to the long sales cycle and slow growth, it will not be actively competing to grow this source of demand. 
      • The ecosystem can support aspiring users of transcoding and streaming via projects like Streamplace, the Frameworks SPE, and their supporting teams. One of the core pillars of the Livepeer Foundation’s GTM recommendations is to tackle being the open video infrastructure for web3 social and decentralized streaming, so the ecosystem will prioritize support. This includes aspiring web3-centric streaming users, who culturally align with the values of the project community, but to date have not shown significant growth nor driven significant fees to the network. There’s an opportunity for these projects to crack this nut and help these users grow, if they deem it to be worth the effort!
      • There are also additional bets that the ecosystem is interested in around the realtime AI mission. These are laid out by the Livepeer Foundation’s GTM Strategy post. Visual avatars for live AI agents is one example. Realtime video analysis and understanding are others. These areas do overlap with the broad theme that Livepeer Inc is focused on - running realtime AI models on live video on the Livepeer network. However as Inc pursues creative AI use cases initially to inspire the broader world in what’s possible, we welcome others in the ecosystem building commercial entities to go after these opportunities. And we will certainly collaborate. If the ecosystem efforts make technical progress, but stop short of commercializing and going to market, these are areas for collaboration with Inc to consider productizing for commercial purposes. 

      A Simplified View: Foundation and Inc

      While the above contains a lot of details about realtime AI and specific demand generating bets on the Livepeer network, there’s a simplified view:

      • The Livepeer Foundation will steward the Livepeer community, project marketing, and public goods funding to enable recommendations on the project roadmap.
      • Livepeer Inc will focus on driving demand to the network by building the realtime AI products, go to market services, and AI community - initially in the creative realtime AI video space.

      If you’re interested in building within this ecosystem, there are lots of opportunities that both contribute to the core development and operations of the project in service of the realtime AI mission, but also to develop companies that service additional markets not currently being focused on. Hopefully the above post gives you a view into what some of those opportunities and gaps are. Then check out the Livepeer Foundation’s recent forum posts on tactical recommendations, and raise your hand to get involved in the ones of interest.

      `, - datePosted: `Jul 31, 2025`, - img: `https://blog.livepeer.org/content/images/2025/07/e.png`, - excerpt: `Written by Doug Petkanics, Co-founder and CEO at Livepeer Inc - -The past 18 months have been an energizing time to be in the Livepeer Ecosystem. An onchain treasury was introduced to fund public goods via community governance, the community has coalesced around Livepeer’s opportunity to be the leading infrastructure for realtime AI video, and fees and usage of the network have been steadily increasing due to this focus. The Livepeer Foundation has recently launched to steward the 10+ entities in `, - readingTime: 5, - }, -]; diff --git a/snippets/automationData/forum/forumData.jsx b/snippets/automationData/forum/forumData.jsx deleted file mode 100644 index 5026ebdd..00000000 --- a/snippets/automationData/forum/forumData.jsx +++ /dev/null @@ -1,38 +0,0 @@ -export const forumData = [ - { - title: "It's time to ACT! Accumulation & the Treasury Ceiling", - href: "https://forum.livepeer.org/t/3153", - author: "By b3nnn (@b3nnn)", - content: - "

      The onchain treasury was designed to provide sustainable public goods funding. It has supported many important and strategic contributions to the Livepeer Ecosystem. The AI SPE, Streamplace, Agent SPE and Cloud have all received funds and made important contributions. And through our onchain governance, the community have shown time and again their thoughtfulness and care for getting decisions right. Your desire to align decisions with long-term health has made us a shining example of simple but effective governance and how people can working together onchain.

      The treasury is key to supporting strategic investments to improve UX for stakeholders, effectively manage protocol security, and fund other capital and resource needs for this exciting phase of the project.

      As of now, the onchain treasury is currently not accumulating LPT. It was designed not to accept unlimited funding, hit the initial value set as the ceiling, and reset treasury contributions to 0% on or around 31st of March this year. There are a backlog of upcoming projects on highly strategic initiatives that will need treasury support, and we will all feel better about how to allocate funds if we have certainty that new funds are coming into the treasury.

      I intend to post a LIP to turn on the treasury rewards again at their initial values:

      • treasuryRewardCutRate: 10%

      • treasuryBalanceCeiling: 750000 LPT

      The rate of 750000 LPT is currently set as the ceiling so would not be updated in the formal proposal

      For what it’s worth, my personal bias is to increase one of these values, but I’m happy to punt that discussion to another day. Having seen the exciting things in the background that will require treasury support in coming weeks, the most pressing item for us as a community is to start getting the treasury repopulated.

      I’ll be on the watercooler next week to discuss and am happy to set up an office hours to discuss direct if there is support for that. I look forward to proposing this for a community vote . If you have any input on the contribution percentage that goes into my proposal, please also share your input here.

      ", - replyCount: 7, - datePosted: "Dec 3, 2025", - }, - { - title: "Pre-proposal: IDOL - Improving Dex / Onchain Liquidity", - href: "https://forum.livepeer.org/t/3151", - author: "By b3nnn (@b3nnn)", - content: - '
      TLDR

      We propose to address known UX issues and ease and costs to participate by increasing DEX liquidity. Arrakis offers an optimal solution for our specific needs, and we are requesting 250,000 LPT for deployment to a Uniswap v4 pool which will significantly reduce slippage for ecosystem participants

      Motivation

      The Capital Markets Advisory board made improving onchain liquidity a tactical recommendation, specifically sighting:

      • Low liquidity levels on our DEX pools (primarily Uniswap on Arbitrum). This creates high slippage when trying to transact with any size, and might refrain larger stakeholders or participants from buying LPT

      • The much higher ratio of available liquidity on centralized exchanges compared to DEXs drives participants to rely on centralized platforms, exposing them to the inherent risks associated with centralized providers

      • Further, centralised exchanges often don’t support L2 withdrawals. This results in delayed bridging and withdrawal processing between L1 & L2, impairing overall UX and the efficiency of orchestrators as it relates to capital allocation

      In short, improved L2 Dex liquidity is essential for both current and future participants in Livepeer.

      Recommended Solution

      How to address our challenges is relatively straightforward to describe:

      • Increase the amount of liquidity on targeted DEX pool/s

      • Ensure the solution is executing against this goal as agreed

      • Use funds wisely, ensuring a good balance between what we pay and what we receive

      Any solution will require liquidity from the on-chain treasury to start bootstrapping an optimal asset mix. In addition to this liquidity requirement, using a traditional market maker is likely a major expense (in the range of $15-20K per month). While traditional market makers can do a good job in actively managing liquidity, especially on centralised exchanges, they often present new or additional challenges:

      • Market makers typically operate through asset loan agreements, using our capital to actively manage liquidity across venues. While this model provides flexibility and professional management, it can make visibility into how and where assets are deployed more challenging.

      • Compared to centralized venues, on-chain liquidity provision is often less economically attractive for market makers. As a result, they may prioritize other strategies or venues where returns are higher, which can limit incentives to deepen on-chain liquidity.

      • Ensuring that capital is being used effectively by traditional market makers remains challenging, as it requires clear visibility into capital deployment and a deep understanding of the alternative strategies they pursue.

      While none of this is insurmountable, it requires significant thought, effort and time to ensure oversight and manage risk.

      Arrakis pro is an ideal solution to addresses these challenges.

      Arrakis specifically addresses each of these challenges because:

      • It is built specifically for managing onchain liquidity on DEXs

      • The assets are stored in a vault controlled by a multisig made up of Livpeer Foundation members. This means the treasury, via the Foundation, can withdraw and return the liquidity at any time

      • Because it is onchain, and through the features provided in Arrakis pro, we can check and confirm at any time where our assets are and what strategies are being applied.

      • It rebalances positions by setting up ranges / limit orders, no swaps involved. The solution algorithmically minimises price impact given the allocated capital and bootstraps base asset liquidity without causing negative selling pressure.

      • Arrakis leverages sophisticated algorithms to increase capital efficiency for the deployed capital and reduce slippage for traders on the DEX pools.

      Arrakis vaults hold ~$170M TVL and the team actively manages the on-chain liquidity for over 100 protocols. Projects such as MakerDAO, Lido, Morpho, Gelato, Redstone, Wormhole, Across, Euler, Usual, Syrup, Venice.ai, Ether.fi, etc. are benefiting from the high capital efficiency and cost effectiveness for DEX liquidity optimization enabled by Arrakis PRO.

      For more information regarding Arrakis and Arrakis Pro, feel free to have a look at their docs or join their community:

      Arrakis | Twitter | Resources

      In addition, the team are present here and will address any questions directly - hello @Arrakis

      The Ask

      We want to significantly decrease slippage and costs for orchestrators and other participants to interact with the network through onchain liquidity.

      We are asking for 250,000 LPT (approx. $1M in USD value) to be held in a multisig controlled by the Livepeer Foundation, to be deployed via an onchain vault with Arrakis as a concentrated pool on Uniswap v4.

      Management of concentrated liquidity on Uniswap V4 allows for larger trades with minimal price impact, improving the overall trading experience. Savings to participants are substantial at approx. $1500 in slippage reduction on a $25,000 sale of LPT (estimate based on data below).

      Comparison of current and estimated price impact (after successful ETH liquidity bootstrapping) for buying LPT and ETH across different amounts

      Specification for Livepeer
      1. The Arrakis team uses the existing LPT/ETH pool on the 0.3% fee tier for UniswapV4

      2. Arrakis then deploys a dedicated vault managed by the Arrakis Pro smart contract for this LPT/ETH Uniswap pool.

      3. The Livepeer Foundation team establish a ⅔ Multisig for custody of the funds. If the proposal passes, funds are transferred onchain to this multisig account

      4. Through this Livepeer Foundation multisig, we deposit $1 million worth of $LPT into the Arrakis Pro vault. Transfers in and out of the vault are controlled by the multisig, meaning they cannot be deployed or moved by Arrakis elsewhere

      5. Arrakis Pro will allocate the provided liquidity in a concentrated and fully active market making strategy to facilitate trading on UniswapV4.

      6. The strategy initially operates to bootstrap ETH to establish a 50/50 inventory ratio over the first months. The primary objective is to create price stability by generating deep liquidity and reaching an even inventory over time.

      For the services provided, Arrakis charges the following fees:

      Arrakis Asset-under-Management (AUM) fee: 1% per year, waived for the first 6 months

      Arrakis performance fee: 50% of trading fees the vault generates

      FAQ

      What are the risks of this model?

      • Deploying funds to DEX pools bears smart contract risk and general market risk (e.g. token exposure, impermanent loss). Arrakis smart contracts have been audited by leading security firms and currently secure +$150M TVL (https://docs.arrakis.finance/text/resources/audits.html)

      What happens to the capital required?

      • The capital required is deployed by the Livepeer DAO, via a Foundation controlled multisig, to a self-custodial smart contract vault and can be withdrawn at any point in time. Arrakis does not hold custody, nor control the funds deployed outside of the mandate to manage DEX liquidity on Uniswap V4 for the respective trading pair.

      Will this impact the current liquidity on CEXs?

      • Arrakis mandate is to gradually improve on-chain markets and provide deeper liquidity for the respective pair over time on DEX markets. CEX markets will not be affected.

      How does the Arrakis model differ from standard AMMs (like Uniswap v3)?

      • Arrakis provides a sophisticated on-chain market making service, running dedicated algorithmic market making strategies.

      • Instead of manually deploying funds into the CLAMM pool, Arrakis algorithmically rebalances the position and runs active liquidity management strategies.

      Will our liquidity still be actively managed, or will it be passively allocated in a vault?

      • Close to 100% of the liquidity deployed with an Arrakis vault is actively deployed to the Uniswap CLAMM pool and provides liquidity. Small shares of liquidity remain in the vault as token reserves for rebalancing purposes.

      How is the strategy for the vault determined — who sets the parameters, and how often are they rebalanced?

      • Arrakis quant team fine tunes the strategies and engages in period review cycles along with 24h-365day monitoring and alerting.

      Who controls or can modify the AMM strategy parameters?

      • Arrakis strategies are designed, deployed and maintained by professional quant traders. The Foundation can be involved in discussion in regular intervals as needed to further align on achieving the stated goals.

      Will the community have visibility into performance and strategy updates?

      • The Foundation delegates will receive access to a custom real time analytics dashboard and can share periodic updates to the forum for the community.

      What happens to the liquidity if the vault underperforms or becomes unbalanced?

      • Liquidity is actively rebalanced towards a 50:50 ratio by placing one sided limit maker orders. In adverse market scenarios strategies will adjust to certain market volatility settings.

      How do fees compare to centralized market makers?

      • Centralized market makers work in two models: a) Loan & Option b) Retainer Fix Fee payment. Arrakis works on a profit sharing of trading fees earned (50% captured by the Livepeer DAO, 50% retained by Arrakis for the services provided)

      How will LP performance be measured?

      • LP performance will be measured by market depth, price impact, slippage improvement, total volumes facilitated.

      What happens after funds are returned?

      • It’s important to note that the liquidity in the vault can remain deployed indefinitely, but also returned to the onchain treasury or control by the voters at any time. As funds will now be held in both ETH and LPT, the community can be involved in discussions about how returned funds are stored or used.

      This is a large proportion of the current treasury. What gives?

      • We recognise that this is a large ask relative to the current size and value of the treasury. The size and value of the treasury will be addressed in a separate proposal. As it relates to this proposal, consider that we will reduce slippage costs by approx 2-3X on every dex transaction. The ROI on this proposal will be quite substantial.
      ', - replyCount: 3, - datePosted: "Dec 1, 2025", - }, - { - title: "Transformation SPE Release Notes", - href: "https://forum.livepeer.org/t/3142", - author: "By Mehrdad (@Mehrdad)", - content: - "

      Release notes are a way to share work being completed by the Transformation SPE and it’s various contributors. Dive in and explore what has been happening and please reach out or reply with any questions and we will happily expand further.

      ", - replyCount: 2, - datePosted: "Nov 10, 2025", - }, - { - title: "Transcoder Campaign: organic-node.eth", - href: "https://forum.livepeer.org/t/1970", - author: "By Ron (@ron)", - content: - "

      Hello fellow video enthusiast and web3 supporters,

      Thanks for your time in reading my post. (organic-node.eth) Node has been active for about 6 months and everyday has been a great learning experience. My node has been highly reliable with 4 Orchestrators across the globe with possibility to expand more depending on the demand. If you are looking to get in touch with me please reach out to me on discord Organic-Node#9009.

      It gives me great pleasure when looking at lenstube videos, thinking that some of these vides may have been transcoded by my Orch. Stakes and delegators enjoy passive income with my low reward cuts and low fee cut and help support robust Orch for a fairer web3 platforms

      Stake here:
      (organic-node.eth)

      ", - replyCount: 1, - datePosted: "Dec 6, 2022", - }, -]; diff --git a/snippets/automationData/README.mdx b/snippets/automations/README.mdx similarity index 100% rename from snippets/automationData/README.mdx rename to snippets/automations/README.mdx diff --git a/snippets/automations/blog/ghostBlogData.jsx b/snippets/automations/blog/ghostBlogData.jsx new file mode 100644 index 00000000..f9f512fd --- /dev/null +++ b/snippets/automations/blog/ghostBlogData.jsx @@ -0,0 +1,191 @@ +export const ghostData = [ +{ + title: `AI X Open Media Forum: Building New Wave Creativity`, + href: `https://blog.livepeer.org/ai-x-open-media-forum-building-new-wave-creativity/`, + author: `By Livepeer Team`, + content: `

      The AI x Open Media Forum, hosted by the Livepeer Foundation and Refraction during Devconnect Buenos Aires, brought together artists, technologists, curators, protocol designers, founders and researchers at a moment when media is being reshaped at its foundations. Real-time AI has moved from experimental edges into active use, influencing how creative work is made, how it circulates, how it is authenticated and how value flows through entire ecosystems.

      The Forum was designed as a symposium rather than a conventional conference. Instead of panels, participants sat together in tightly focused groups, comparing lived experience with emerging technical capabilities and identifying where the next wave of open media infrastructure must come from. The premise was simple:

      If AI is rewriting the conditions of cultural production, the people building the tools and the people using them need to be in the same room.

      Across the day, it became clear that AI has begun to reconfigure creative labour. Participants described shifts in authorship, changes in access to tools and compute and growing pressure to navigate accelerated production cycles. The discussions documented in this report trace how these changes are being felt on the ground and outline the early primitives that may support an open, verifiable and creatively expansive media ecosystem.

      I. Methodology and framing questions for the forum 

      The Forum opened with a set of framing questions that clarified the core pressures at the intersection of AI and culture. They were selected because they touch the foundations of creative practice, technical design and the incentives that organise contemporary media systems. These questions served as a shared structure for the day, guiding both creative and technical groups toward the points where their worlds intersect most directly.

      These questions created a common orientation for participants with very different backgrounds. Artists used them to describe how these pressures appear in their work. Technologists used them to identify where current systems break and where new primitives might be possible. The result was a focused dialogue in which creative insight and technical reasoning informed one another. As the day progressed, these initial questions became more specific, grounded in concrete examples and shaped by the experiences of the people who are building and creating with AI right now.

      II. Creative track: New wave creativity in the age of AI

      The creative discussions opened a clear window into how AI is reshaping cultural practice. Artists, designers and musicians described shifts they are already living through: changes in authorship, new pressures around speed, and the expanding role of computation in what can be made and shared. Their experiences formed the human foundation for understanding the technical challenges that surfaced later in the day.

      1. The persistence of authorship and the idea of “code”

      One of the most important contributions came a Venezuelan 3D artist artist who articulated how personal history and cultural memory form a kind of creative signature. They described this as their “code”: a composite of experience, environment and emotional texture that cannot be reduced to visual style alone.

      Argentine Daydream ambassador Franco presents his work

      “My code is my personal language, shaped by the places I come from,” they explained. “I photograph the decadence of Venezuela and turn it into something romantic. AI can remix it, but it cannot replace where I’m from.”

      This idea resonated widely across the room. Participants recognised that while AI can convincingly emulate aesthetics, it cannot reconstruct lived experience. The concern is not simply stylistic mimicry; it is the potential erosion of the cultural grounding that gives creative work its meaning.

      Serpentine Gallery curator Alice Scope added context from contemporary art: “Some artists will use these tools to push aesthetic extremes. Others will return to minimalism. That tension has always driven art history.” The consensus was that AI is entering a lineage of tools that have historically reshaped creative practice, but its scale introduces new stakes around identity and authorship.

      2. Compute access as a determinant of creative possibility

      A structural insight emerged as creators discussed their workflows: access to compute is not evenly distributed. Several participants from Latin America and other regions described how GPU scarcity and cost have become the limiting factor in pursuing their practice.

      One participant underscored the issue: “I couldn’t do what I do without Daydream. GPUs are too expensive here. This is the only way I can work at the level I want.”

      This was not framed as a complaint but as a recognition that compute access is now a primary determinant of who can participate in emerging creative forms. It became clear that compute, not talent or tools, is increasingly the gatekeeper of participation. This topic resurfaced repeatedly across both tracks and became one of the keystones of the entire Forum.

      3. Discovery systems and the changing behaviour of audiences

      Creators then turned to the challenge of reaching audiences. Traditional distribution remains shaped by opaque algorithms and engagement-driven incentives, often misaligned with the values and intentions of artists.

      Almond Hernandez from Base described the dilemma: “If you remove algorithms entirely, you place the burden of discovery back on users. But if you keep them, they can distort culture. We need ways for people to shape their own feeds.”

      This tension produced no single consensus, but it clarified a shared frustration: discovery should not force creators into optimising for platform dynamics. Instead, systems must emerge where identity, provenance and community input meaningfully influence what is surfaced.

      Friends With Benefits CEO Greg Breznitz articulated the broader implication: “Culture and technology cannot be separated anymore. What gets rewarded changes the art that gets made.” The group recognised that discovery systems are not neutral and actively shape the evolution of cultural forms.

      4. How AI is reshaping the creative process from the inside

      Refraction founder Malcolm Levy and Serpentine Gallery curator Alice Scope

      Perhaps the most nuanced discussion centred on how AI alters creative labour. Participants avoided easy dichotomies of “AI as threat” versus “AI as tool.” Instead, they articulated a more layered understanding: AI accelerates exploration but also compresses the time available for deeper creative development.

      Franco noted that the pressure to produce quickly “can corrupt the process,” a sentiment echoed by musicians and digital artists who described being pulled toward workflows optimised for speed, not refinement.

      A music platform founder contextualised this through the lens of distribution: “Platforms can train bots to listen to the AI music they create, just to farm plays.” This raised concerns about synthetic ecosystems that siphon attention away from human artists.

      Yet the group also acknowledged that AI unlocks new capacities. It lowers technical barriers, enabling more people to express ideas without specialised training. For many, it expands the field of imagination.

      Malcolm Levy of Refraction offered a framing rooted in art history: “Every movement in art is shaped by the tools of its time. Digital art was marginal until suddenly it wasn’t. AI will be the same. What matters is who shapes it.”

      Across this discussion, an essential truth emerged: AI does not eliminate creativity. It redistributes the labour involved, elevates the importance of intention and shifts the points at which authorship is asserted.

      III. Technical track: Shaping the infrastructure for trust, agency and scale

      While the Creative Track articulated what must be protected and what must remain possible, the Technical Track explored how to design systems that support those needs.

      1. Provenance as foundational infrastructure

      The technical discussion on provenance opened with a recognition that no single method can guarantee trust in an AI-saturated media environment. Participants approached provenance as an infrastructure layer that must operate across the entire lifecycle of media creation. They examined device-level capture signals, cryptographic attestations, model watermarking, social proof, dataset lineage and content signatures, emphasising that each approach addresses a different vector of uncertainty.

      The importance of this layered approach became clear through the most grounded example offered during the session. A team building a voice-data contribution platform described their experience collecting human audio samples. Even after implementing voice-signature checks and running deepfake detectors, they found that “about ten percent of the data was actually faked.” Contributors were training small voice models on their own samples and then using those models to fake additional submissions. “Validation needs human listeners, model detection and economic incentives working together,” they explained. It illustrated a key point: provenance is a dynamic adversarial problem and must be treated as such.

      This example shifted the discussion from idealised architectures to applied constraints. Participants concluded that provenance must be multi-layered, adversarially robust and economically grounded. A validator network that incorporates human judgment, machine detection and stake-based incentives was seen as a promising direction, not because it solves provenance outright but because it distributes trust across diverse mechanisms rather than centralising it in a single authority or detector. In a digital landscape stricken with antiquated copyright frameworks that hinder both the creation, dissemination and remuneration of artistic works, a multi-nodal, human-centric approach to provenance feels refreshing, urgent and necessary. 

      The discussion also connected provenance to discovery and reputation. If identity and content lineage can be verified at creation time, those signals can later inform how media is surfaced, filtered or contextualised. Provenance, in this framing, is not only about defending against deepfakes but about enabling a more trustworthy environment for cultural production, circulation and monetisation.

      2. Infrastructure for global creativity: compute, identity and discovery as interdependent primitives

      Over the course of the day, participants identified a pattern: compute, provenance and discovery are not separate concerns. They form an interdependent system that determines:

      Compute inequality emerged again as a core issue. Without access to real-time inference, creators are excluded from participating in emerging media forms. Provenance systems ensure that outputs can be trusted, and discovery mechanisms determine whether meaningful work reaches an audience.

      This preceded a rich conversation about discovery architecture. What if users could port their data across platforms to surface relevant content, instead of the platforms selling this data back to users? 

      Participants explored how portable identity, content signatures, verifiable histories and community-shaped surfacing could form a new discovery layer that operates independently of platform-level ranking algorithms. In this model, discovery becomes a protocol rather than a product: a configurable, interoperable layer where authorship, reputation and provenance act as first-class signals.

      Building open media requires a tightly interwoven stack. Compute enables creation; provenance secures identity and authorship; discovery amplifies credible work in ways that reflect the values of specific communities rather than a single optimisation function. 

      Treating these components as independent problems would reproduce the failures of existing platforms. Treating them as interdependent primitives opens the possibility for a healthier and more diverse media ecosystem.

      IV. Synthesis

      When the creative and Technical tracks were read side by side, several coherent themes emerged.

      VI. Conclusion

      The Forum made clear that the future of media will depend on coordination between creative and technical communities.

      Artists articulated what must be preserved: identity, context, agency and the integrity of the creative process. Technologists outlined the systems that can support those needs at scale.

      This event functioned as a working laboratory. The insights surfaced here will inform follow-up research, prototypes and collaborative development. Livepeer and Refraction will continue publishing materials from the Forum and supporting teams exploring these early ideas.

      Open media will not emerge from a single protocol or organisation, but from a community building the foundation together.

      `, + datePosted: `Dec 29, 2025`, + img: `https://blog.livepeer.org/content/images/2025/12/Header.png`, + excerpt: `The AI x Open Media Forum, hosted by the Livepeer Foundation and Refraction during Devconnect Buenos Aires, brought together artists, technologists, curators, protocol designers, founders and researchers at a moment when media is being reshaped at its foundations. Real-time AI has moved from experimental edges into active use, influencing how creative work is made, how it circulates, how it is authenticated and how value flows through entire ecosystems. + +The Forum was designed as a symposium rat`, + readingTime: 8 +}, +{ + title: `A Real-time Update to the Livepeer Network Vision`, + href: `https://blog.livepeer.org/a-real-time-update-to-the-livepeer-network-vision/`, + author: `By Livepeer Team`, + content: `

      For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirational examples emerging from Daydream powered real-time AI, and real-time Agent avatar generation through Embody and the Agent SPE.

      Source: Livepeer Q3 2025 Report by Messari

      This shift has been an ecosystem wide effort – ranging from branding and communications, to productization and go to market, to hardware upgrades for orchestrators. It has successfully shifted the project under an updated mission and direction, however it has still left ambiguity in terms of what the Livepeer network itself offers as killer value propositions to new builders outside of the existing ecosystem. Is it a GPU cloud? A transcoding infra? An API engine? Now that there are signs of validation and accelerated momentum around an exciting opportunity, it’s time to really hone in on a refined vision for the future of the Livepeer network as a product itself. 

      The market for video is set to massively expand

      The concept of live video itself is expanding well beyond a simple single stream of video captured from a camera. Now entire worlds and scenes are generated or enhanced in real-time via AI assistance, leading to more immersive and interactive experiences than possible via old-school streaming alone. For a taste of the future, see the following examples:

      1. The future of gaming will be AI generated video and worlds in real-time:
      +
      1. Video streams can be analyzed and data leveraged programmatically in real-time, for instant insight generation and decision making:
      +
      1. Real-time style transfer can enable avatars and agents to participate in the global economy:
      +

      Video world models and real-time AI video are merging, as they both use AI to generate frame-by-frame video output with low latency on the fly, based on user input and AI inference. This requires a tremendous amount of GPU compute, and requires an amazing low latency video streaming and compute stack – two areas in which the Livepeer network and community thrive, and two areas to which the many other generic GPU inference providers in the market bring no unique skillset, experience, or software advantage. 

      The big opportunity for the Livepeer network is to be the leading AI Infrastructure For Real-Time Video.
      From interactive live streaming to generative world models, Livepeer’s open-access, low-latency network of GPUs will be the best compute solution for cutting edge AI video workflows. 

      World models are a game changing category, and Livepeer is well suited to offer a unique and differentiated product here, that serves a huge market of diverse and varying use cases. These range from creative entertainment, to gaming, to robotics, to data analysis, to monitoring and security, to synthetic data generation for AGI itself.

      While an ambitious stretch, Nvidia executives responsible for the category have even projected that due to the impact in robotics, the economic opportunity for world models could exceed $100 trillion, or approximately the size of the entire global economic output itself!  

      What does it mean to productize the Livepeer network to succeed as a valuable infrastructure in this category?

      From a simplified viewpoint, it needs to deliver on the following:

      1. Ability for users to deploy real-time AI workflows to the Livepeer network and request inference on them

      2. Industry leading latency for providing inference on real-time AI and world model workflows.

      3. Cost effective scalability – users can pay as they go to scale up and down capacity and the network automagically delivers the scale required.

      Imagine a gaming platform is powering world-model generated games using their unique workflows that generate game levels or areas in a certain style by combining several real-time models, LLMs, and style transfer mechanisms. Each game its powering has users exploring and creating their own corners of the interactive worlds, based on prompts and gameplay inputs. Every gamer that joins a game represents a new stream of AI video compute, and the Livepeer network is the backing infrastructure that provides the compute for this video world generation, leveraging hundreds or thousands of GPUs concurrently.

      For this to be possible the Livepeer network needs to enable that game platform to deploy their game generation workflow. It needs to offer low latency on the inference that runs this workflow, relative to the generic GPU compute clouds. The pricing needs to be competitive vs alternative options in the market for this GPU compute. And the network needs to allow this company to scale up and down the number of GPUs that are currently live ready to accept new real-time inference streams based on the number of users currently live on the games it is powering.

      All of this is possible on the Livepeer network, and it isn’t far away from where we are now. If we work to build, test, and iterate on the Livepeer network itself towards supporting the latency and scale required for these types of workflows, we’ll be set up to power them.
      Now multiply this example gaming company by the high number of diverse industries and verticals that real-time AI and world models will touch. Each category can have one or multiple companies competing to leverage this scalable and cost effective infrastructure for unique go to markets targeting different segments. And they can all be powered by the Livepeer network’s unique value propositions.

      Livepeer’s core network is strategically positioned

      What are these value propositions that make the Livepeer network differentiated relative to alternative options in the market? I’d argue that there are three primary, table stakes, must-have value propositions if Livepeer is to succeed. 

      1. Industry standard low latency infrastructure specializing in real-time AI and world model workflows: First of all, the network needs to let its users deploy custom workflows. Inference alone on base models is not enough and does not represent scaled demand. Users want to take base models, chain them together with other models and pre/post processors, and create unique and specialized capabilities. When one of these capabilities is defined as a workflow, that is the unit that needs to be deployed as a job on the Livepeer network, and the network needs to be able to run inference on it. Secondly, for these real-time interactive use cases, latency matters a lot. Generic GPU clouds don’t offer the specialized low latency video stacks to ingest, process, and serve video with optimal latency, but Livepeer does. And Livepeer needs to benchmark itself to have lower or equal latency to alternative GPU clouds for these particular real-time and world model use cases.

      2. Cost effective scalability: GPU provisioning, reservations, and competing for scarce supply procurement creates major challenges for AI companies – often overpaying for GPUs that sit idle most of the time in order to guarantee the capacity that they need. The Livepeer network’s value proposition is that users should be able to “automagically” scale up almost instantly and pay on demand for the compute that they use, rather than having to pre-pay for reservations and let capacity sit idle. This is enabled by Livepeer taking advantage of otherwise existing idle longtail compute through its open marketplace, and its supply side incentives. The Livepeer network needs to be more cost effective than alternative GPU clouds within this category - with impacts comparable to the 10x+ cost reduction already demonstrated in live video transcoding delivered by the network.

      3. Community driven, open source, open access: The Livepeer project and software stack is open source. Users can control, update, and contribute to the software they are using. They also can be owners in the infrastructure itself through the Livepeer Token, and can benefit from the network’s improvements and adoption, creating a network effect. The community that cares about its success and pushes it forward collectively, can be a superpower, relative to the uncertain and shaky relationship between builders and centralized platform providers, who have a history of getting rugged based on limitations to access, changes in functionality, or discontinuity of the platforms. Anyone can build on the Livepeer network regardless of location, jurisdiction, use case, or central party control.

      The above are primary value propositions that should appeal to nearly all users. And we must work to close the gaps to live up to those value props before we could successfully hope to go to market and attract new vertical-specific companies to build directly on top of the network. Luckily, in addition to all of Livepeer’s streaming users, we have a great realtime AI design partner in Daydream, which is already going to market around creative real-time AI, using the network, and contributing to its development to live up to these requirements. While building with this design partner, the ecosystem should be working to productize to live up to these promises in a more generic perspective – it should be setting up benchmarks, testing frameworks, and building mechanisms for scaling up supply ahead of demand, so that it can represent this power to the world alongside successful Daydream case studies.

      Opportunities to push towards this vision

      To truly live up to these value propositions, there are a number of opportunities for the community to focus on in order to close some key gaps. There are many details to come in more technical posts laying out roadmaps and execution frameworks, but at a high level, consider a series of milestones that take the network as a product from technically functional, to production usable, to extensible, to infinitely scalable:

      1. Network MVP - Measure what matters: Establish key network performance SLAs, measure latency and performance benchmarks, and enhance the low latency client to support realtime AI workflows above industry grade standards.
      2. Network as a Product - Self adaptability and scalability: Network delivers against these SLAs and core value props for supported realtime AI workflows. Selection algorithms, failovers and redundancy, and competitive market price discovery established for realtime AI.
      3. Extensibility - Toolkit for community to deploy workflows and provision resources: Workflow deployment and signaling, LPT incentive updates to ensure compute supply for popular AI workflows exceeds demand.
      4. Parallel Scalability: Manage clusters of resources on the network for parallel workflow execution, truly unlocking job types beyond single-GPU inference. 

      Many teams within the ecosystem, from the Foundation, to Livepeer Inc, to various SPEs have already started operationalizing around how they’ll be contributing to milestones 1 and 2 to upgrade the network to deliver against these key realtime AI value propositions. 

      Conclusion and Livepeer’s opportunity

       The market for the opportunity to be the GPU infrastructure that powers real-time AI and world models is absolutely massive – the compute requirements are tremendous - 1000x that of AI text or images - and real-time interaction with media represents a new platform that will affect all of the above-mentioned industries. The Livepeer network can be the infrastructure that powers it. How we plan to close the needed gaps and achieve this will be the subject of an upcoming post. But when we do prove these value propositions, Livepeer will have a clear path to 100x the demand on the network

      The likely target market users for the network are those startups that are building out vertical specific businesses on top of real-time AI and world model workflows. The ecosystem should look to enable one (or multiple!) startups in each category going after building real-time AI platforms that serve gaming, that serve robotics, that serve synthetic data generation, that serve monitoring and analysis, and all the additional relevant categories. The network’s value propositions will hopefully speak for themselves, but in the early stages of this journey, it is likely the ecosystem will want to use incentives (like investment or credits) to bootstrap these businesses into existence. Each will represent a chance at success, and will bring more demand and proof.

      Ultimately, many users of these platforms may choose to build direct on the network themselves. Similarly to how startups start to build on platforms like Heroku, Netlify, or Vercel, and then as they scale and need more control and cost savings they build direct on AWS, and then ultimately move to their own datacenters after reaching even more scale – users of Daydream or a real-time Agent platform built on Livepeer, may ultimately choose to run their own gateways to recognize the cost savings and control and full feature set that comes from doing so. This is a good thing! As it represents even more usage and scale for the network, more proof that as an infrastructure the Livepeer network has product market fit, and that it can absorb all workflows directly. The businesses built on top will provide their own vertical specific bundles of features and services that onboard that vertical specific capacity, but they’ll be complemented by and enabled by the Livepeer Network’s superpowers.

      While there’s a lot of work ahead, the Livepeer community has already stepped up to cover tremendous ground on this mission. At the moment by already powering millions of minutes of real-time AI inference per week, by our orchestrators already upgrading their capacity and procurement mechanisms to provide real-time AI-capable compute, and by the Foundation groups already working to evaluate the networks incentives and cryptoeconomics to sustainably fund and reward those contributing to this effort, we’re set up well to capture this enormous opportunity!

      `, + datePosted: `Nov 13, 2025`, + img: `https://blog.livepeer.org/content/images/2025/11/LP_Blog-Header_Nov25_01_moshed-1.png`, + excerpt: `For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirat`, + readingTime: 9 +}, +{ + title: `Livepeer Onchain Builders - Streamplace: Building the Video Backbone of Decentralized Social`, + href: `https://blog.livepeer.org/livepeer-onchain-builders-streamplace-building-the-video-backbone-of-decentralized-social/`, + author: `By Livepeer Team`, + content: `

      Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI.

      Streamplace is an open-source video streaming platform designed to power decentralized social applications with real-time, creator-first infrastructure. It aims to make livestreaming and video hosting as seamless as TikTok or YouTube, but built on open protocols and self-sovereign identity.

      What makes it ambitious? Streamplace is not only building full-stack video infra for federated social networks, it's doing so in a way that prioritizes interoperability, scalability, and public goods. From developer SDKs to end-user apps, Streamplace is building an entire ecosystem.

      What is an SPE? 

      A Special Purpose Entity (SPE) is a focused, community-funded team contributing to the Livepeer ecosystem. SPEs are typically mission-driven groups that operate independently to build infrastructure, applications, or tooling that expand and improve the Livepeer protocol. These teams are funded through proposals to the onchain treasury and are accountable to the community.

      SPEs are necessary for the ecosystem because no single team can build every part of a decentralized protocol. SPEs decentralize development, fund public goods, and allow the community to direct resources where they're most needed.

      Why do they matter to delegators and stakeholders? Because SPEs grow in usage. More usage = more fees = more rewards. Delegators benefit when the protocol succeeds, and SPEs are among the most direct ways to make that happen.

      From Aquareum to Streamplace

      A clear goal drives the team behind Streamplace: to build the foundational video infrastructure for the next generation of decentralized social platforms. These platforms, such as Farcaster and the AT Protocol, promise user-owned identity and interoperability, but have thus far lacked robust support for live and on-demand video.

      Streamplace solves this by providing a full-stack, developer-friendly video layer that anyone can plug into. It's a bold attempt to make decentralized video feel as native and easy as its Web2 counterparts.

      Streamplace started as Aquareum, a project with the same mission and team. This evolution into Streamplace is a rebranding, not a restart, building on past momentum with a sharper focus.

      Their vision is to give every user the ability to publish, stream, and remix content with the same ease as TikTok or YouTube, but backed by self-sovereign identity and decentralized networks.

      Streamplace homepage

      The first proposal delivered:

      • A unified Aquareum node: bundling the Livepeer stack with indexing and playback.
      • App releases on iOS, Android, and Web.
      • Native integrations with AT Protocol and Farcaster.
      • Support for C2PA metadata and content provenance.

      Now, Streamplace continues that momentum with 100,000 LPT in treasury funding and a clear mandate to scale.

      Streamplace Graphana dashboard

      Why Streamplace Matters

      Video is the heart of online social interaction. Yet decentralized social networks have lagged in providing seamless, user-friendly video experiences. Streamplace addresses this by:

      • Transcoding every livestream through Livepeer, providing decentralized, low-cost processing for global delivery.
      • Powering partner platforms like Skylight Social, a TikTok alternative backed by Mark Cuban, that recently hit #1 in entertainment on the App Store.
      • Making it dead-simple to stream or host video through single-binary nodes that anyone can deploy.
      • Championing public goods, 100% of their code is open source, with a commitment to infrastructure, not monetization lock-in.

      Decentralized social, spanning protocols like Farcaster, AT Protocol, and Bluesky, represents a movement toward user-owned networks and open standards. These networks are gaining traction, but video remains a missing layer. That’s where Streamplace comes in.

      Video is essential because it's the most engaging, expressive medium for creators and communities. And as these decentralized platforms scale, having real-time, composable video becomes non-negotiable.

      Streamplace positions itself as the default video infra layer for this new social stack, and with every stream transcoded through Livepeer, it's also a major driver of protocol usage and visibility.

      What Streamplace 2.0 Will Deliver

      This new phase of work, funded by the Livepeer treasury, focuses on scale, performance, and ecosystem integration:

      Infrastructure Enhancements

      • Expand server capacity to support growing user bases like Skylight.
      • Harden video nodes for reliability under real-world load.
      • Deliver high-quality performance on all platforms: Web, iOS, Android.

      Protocol and Developer Growth

      • Deepen native integration with AT Protocol.
      • Build SDKs and NPM packages to embed Streamplace easily into other apps.
      • Ship VOD functionality and new moderation tools.

      Community-First Ethos

      • Launch creator monetization models and stream incentive programs.
      • Empower streamers with self-hosted app capabilities ("Twitch, but it's your own app").
      • Maintain full transparency and livestream development.

      The Livepeer Angle

      Livepeer's decentralized video infrastructure powers every second of video on Streamplace. That means more work for orchestrators, more fees flowing through the protocol, and more incentive for high-quality node operation.

      Streamplace strengthens the Livepeer ecosystem in three key ways:

      • Demand generation: Real-world usage at scale means more consistent transcoding work.
      • Protocol visibility: High-impact apps like Skylight drive awareness of Livepeer beyond its native circles.
      • Infrastructure robustness: Streamplace's nodes enhance the distributed capacity of the Livepeer network.

      Without Livepeer, a decentralized video stack like Streamplace wouldn’t be possible. And without ambitious apps like Streamplace, Livepeer wouldn’t have the same opportunity to prove its value at scale.

      Final Thoughts

      Streamplace is a keystone piece of open video infrastructure and a cornerstone in the emerging world of decentralized social media. By fusing creator-first tooling with Livepeer’s scalable infrastructure, it offers a glimpse into what the open internet can become.

      As decentralized protocols shift from vision to adoption, the need for native video is urgent. Streamplace, with the support of the Livepeer treasury and a relentless commitment to open-source infrastructure, is meeting that need head-on.

      If you're a developer, creator, or community builder, now is the time to get involved.

      Do you want to contribute to Streamplace's success? Explore the open roles here.

      Interested in building or contributing to the Livepeer ecosystem? Learn more about current and past SPEs, open opportunities, and how to submit your own proposal here.

      Follow along, fork the code, or join a stream — the future of social video is open.

      Streamplace App

      Streamplace Proposal

      Aquareum Proposal


      Livepeer is a decentralized video infrastructure network for live and on-demand streaming. It has integrated AI Video Compute capabilities (Livepeer AI) by harnessing its massive GPU network and is not building the future of real-time AI video.

      Twitter | Discord | Website

      `, + datePosted: `Aug 14, 2025`, + img: `https://blog.livepeer.org/content/images/2025/08/Onchain-Builders-Streamplace.jpg`, + excerpt: `Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI. + +Streamplace is an open-source `, + readingTime: 5 +}, +{ + title: `Builder Story: dotsimulate x Daydream`, + href: `https://blog.livepeer.org/builder-story-dotsimulate-x-daydream/`, + author: `By Livepeer Team`, + content: `

      Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API

      Creator:
      Lyell Hintz (@dotsimulate)
      Operator: StreamDiffusionTD
      Backends Supported: Local + Daydream (Livepeer)

      +
      + +
      + +
      +
      +
      + + + 0:00 +
      + /0:34 +
      + + + + + +
      +
      +
      + +

      Overview

      StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. With the Daydream API, it adds remote inference capabilities on top of the existing local GPU inference and unlocks more flexibility for users.

      Built by Lyell Hintz, a technical artist and TouchDesigner developer, the operator is used in live shows, installations, and experimental workflows.

      Why It Was Built

      Lyell began working on the operator a few hours after StreamDiffusion was released on GitHub. He wanted to use it in TouchDesigner - a powerful tool for real time interactive content creation.

      “TouchDesigner is the only place this could be controlled from… it can hook into everything else.”

      From the start, he avoided creating a “black box.” The operator exposes core parameters like prompt, seed, and ControlNet weights, allowing users to adjust values and see results immediately.

      Key Features

      • Real-time video generation
      • Prompt and seed morphing
      • Dynamic ControlNet weighting
      • Live input support: audio, sensors, camera
      • Local GPU and Daydream backend options
      • Instant visual feedback in TouchDesigner
      +
      + +
      + +
      +
      +
      + + + 0:00 +
      + /0:26 +
      + + + + + +
      +
      +
      + +

      Daydream API Integration

      StreamDiffusionTD works with the Daydream API, which allows the operator to run on a remote GPU backend. This eliminates the major barrier of requiring a high-end PC with an NVIDIA RTX 4090 to run StreamDiffusion at professional quality, unlocking the flexibility to run it from any location, on any device form factor.

      Just drop in your API key and hit “Start Stream.” The backend handles orchestration, model hosting, and frame delivery, so builders can stay focused on their creative and technical workflows.

      Setup takes less than 1 minute and once installed, the configuration is remembered for future use.Daydream’s API brings new features to StreamDiffusion:

      • Multi-controlnet: Mixing different controlnets for better artistic control
      • IPAdapter: Use images as powerful style guides
      • TensorRT: Better frame rate for smooth video output

      Daydream is adding support for more real time video generation models, and developers can request features, suggest improvements, or build on top of the API itself. It aligns with the values of open tooling and community-led infrastructure.

      How Artists can use StreamDiffusionTD in TouchDesigner

      • Audio-reactive visuals for concerts
      • Camera-driven generative visuals
      • Real-time visuals for LED walls and stages
      • TouchDesigner automation workflows

      Because it's built inside TouchDesigner, the operator can be extended using Python, MIDI, OSC, or any other input TouchDesigner supports.

      Current State

      The operator is live and ready to use, with active development underway for new features and improved performance. It’s a great time to jump in, explore, and help shape what comes next.

      Try it Yourself

      Operator Access: patreon.com/dotsimulate
      Community and Support: discord.gg/daydreamlive
      API Keys can be requested here

      `, + datePosted: `Aug 5, 2025`, + img: `https://blog.livepeer.org/content/images/2025/08/DD_Builder-Story_dotsimulate_01.png`, + excerpt: `Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API + +Creator: Lyell Hintz (@dotsimulate) +Operator: StreamDiffusionTD +Backends Supported: Local + Daydream (Livepeer) + + + + + + + + + + + + + + + + + + + + + + + + +0:00 + +/0:34 + + +1× + + + + + + + + + + + + + + + + + +Overview + +StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. Wit`, + readingTime: 2 +} +]; \ No newline at end of file diff --git a/snippets/automations/discord/discordAnnouncementsData.jsx b/snippets/automations/discord/discordAnnouncementsData.jsx new file mode 100644 index 00000000..2672ccb3 --- /dev/null +++ b/snippets/automations/discord/discordAnnouncementsData.jsx @@ -0,0 +1,63 @@ +export const discordAnnouncementsData = [ + { + id: "1463397885272920138", + content: + "📣 __The CloudSPE proposal is live.__ 🗳️ 📣

      The proposal funds Cloud SPE to build a focused MVP for standardized, publicly observable network performance, reliability, and demand metrics, making the network measurable and comparable while laying the groundwork for future SLA-aware routing and scaling.

      Vote Yes ✅ or No ❌ [here](https://explorer.livepeer.org/treasury/47675980806842999962173227987422002121354040219792725319563843023665050472833)", + author: "AlisonWonderland", + timestamp: "2026-01-19T18:27:40.785000+00:00", + url: "https://discord.com/channels/423160867534929930/428351836609576972/1462876182298103963", + }, + { + id: "1463397844890288351", + content: + "📣 __Vote now on the Protocol R&D SPE__ 🗳️ 📣

      All network value depends on protocol security. The proposal argues for a dedicated, continuously staffed function for protocol security, upgrades, and core improvements, replacing the current ad hoc model with a single accountable structure.

      Vote Yes ✅ or No ❌ [here](https://explorer.livepeer.org/treasury/67253869199932483234551664403036205881217777786063955710174984983936506090761)", + author: "AlisonWonderland", + timestamp: "2026-01-15T16:42:42.059000+00:00", + url: "https://discord.com/channels/423160867534929930/428351836609576972/1461400212063916114", + }, +]; + +export const DiscordAnnouncementsOld = () => { + const announcements = discordAnnouncementsData; + + return ( +
      +
      +

      Latest Livepeer Announcements

      +

      From Discord

      +
      +
      + {announcements.map((announcement) => ( +
      +
      + {announcement.author} + + +
      + + ))} +
      +
      + ); +}; diff --git a/snippets/automationData/forum/Hero_Livepeer_Forum.png b/snippets/automations/forum/Hero_Livepeer_Forum.png similarity index 100% rename from snippets/automationData/forum/Hero_Livepeer_Forum.png rename to snippets/automations/forum/Hero_Livepeer_Forum.png diff --git a/snippets/automations/forum/forumData.jsx b/snippets/automations/forum/forumData.jsx new file mode 100644 index 00000000..70ed6e6a --- /dev/null +++ b/snippets/automations/forum/forumData.jsx @@ -0,0 +1,34 @@ +export const forumData = [ + { + title: 'Metrics and SLA Foundations for NaaP', + href: 'https://forum.livepeer.org/t/3189', + author: 'By speedybird (@speedybird)', + content: '

      Thank you to everyone who reviewed the earlier pre-proposal and shared detailed feedback in the forum and during the Watercooler. The concerns raised around scope, cost, architectural risk, and MVP clarity were well-founded and directly informed this revision.

      This updated pre-proposal reflects a deliberate reset toward a smaller, clearer Network-as-a-Product MVP. The scope has been significantly narrowed, the budget reduced, and the architecture simplified to prioritize time-to-value, reuse of existing Livepeer infrastructure, and immediate usefulness to gateways, orchestrators, and ecosystem teams.

      Below is the revised pre-proposal. We welcome the community’s review and feedback on the updated scope, design, and framing. We will be present on this coming Monday’s Water Cooler for discussion.


      Cloud SPE Pre-Proposal: Network-as-a-Product (NaaP) MVP – SLA Metrics, Analytics, and Public Infrastructure

      Abstract

      This pre-proposal seeks treasury funding for the Livepeer Cloud Special Purpose Entity (SPE) to design, build, and operate a focused Network-as-a-Product (NaaP) MVP for SLA metrics, analytics, and public visibility.

      The objective of this work is to make the Livepeer network measurable, comparable, and trustworthy at a network level by delivering a small but complete set of standardized performance, reliability, and demand metrics. These metrics will be publicly observable and designed to support gateway providers, orchestrators, and ecosystem builders evaluating Livepeer as production infrastructure.

      This MVP intentionally prioritizes time-to-value, architectural simplicity, and reuse of existing Livepeer infrastructure, while establishing a durable foundation for future SLA-aware routing, scaling, and productization efforts led by Livepeer Inc, the Livepeer Foundation, and the community.


      Rationale

      As Livepeer advances toward the Network-as-a-Product vision, predictable service characteristics and transparent performance signals become essential. While the network supports real workloads today, participants lack a shared, network-wide view of performance, reliability, and demand that can be used to assess suitability for production use.

      Community discussions around earlier drafts of this initiative strongly aligned on the problem, while raising important concerns around scope, cost, architectural risk, and MVP clarity. This pre-proposal reflects that feedback by narrowing focus to a practical MVP that:

      • Demonstrates clear value with minimal complexity
      • Leverages existing data sources and pipelines wherever possible
      • Avoids protocol changes, enforcement mechanisms, or premature decentralization
      • Produces immediately usable outputs for real network participants

      Key challenges addressed by this proposal include:

      • Fragmented metrics: Existing performance and reliability data is dispersed across systems and difficult for non-core teams to consume.
      • Limited network-level visibility: Gateway providers and orchestrators cannot easily compare performance across regions, workloads, or peers.
      • Adoption friction: Without transparent, shared metrics, external developers and partners struggle to evaluate Livepeer for serious workloads.
      • Missing foundation for NaaP evolution: Future SLA-aware routing, scaling, and automation require a trusted measurement layer first.

      The Cloud SPE is well positioned to deliver this work as neutral, public infrastructure, building on its prior experience operating gateways, test tooling, dashboards, and analytics for the Livepeer network.

      Importantly, this proposal does not attempt to enforce SLAs, modify protocol incentives, or introduce new routing logic. Its purpose is to establish shared measurement and learning infrastructure as a prerequisite for those future decisions.


      Deliverables

      The NaaP MVP will deliver a constrained, end-to-end metrics system focused on observability and learning inspired by the NaaP product MVP and Foundation roadmap.

      1. Core SLA Metrics (MVP Scope)
      • A standardized set of network, performance, and reliability metrics sufficient to evaluate orchestrator and GPU behavior across workflows.
      • Metrics sourced primarily from job tester gateway and orchestrator-emitted telemetry, with targeted additions only when other Gateways opt-in.
      2. Network Test & Verification Signals
      • Operation of one or more reference load-test gateways to generate consistent, reproducible performance signals for live AI video pipelines.
      • Public test scenarios (aka test datasets) designed to reflect real workloads while remaining transparent and community-verifiable. These will be captured in Github.
      • Test results contributed into the same analytics layer as organic network traffic to enable comparison (when other Gateways participate).
      3. Analytics & Aggregation Layer
      • Lightweight ETL and aggregation pipelines to transform raw metrics into network-level views.
      • Computation of a small number of derived indicators as outlined in the Metrics Catalog
      • Data structured for efficient querying without requiring dashboards to load raw job data.
      4. Public Dashboard & APIs
      • A standalone public dashboard presenting live and historical metrics.
      • Public, read-only APIs for aggregate SLA scores and hardware.
      • Clear paths for gateways and ecosystem teams to consume the data directly or mirror it into their own analytics systems.
      5. Operations & Stewardship
      • Ongoing operation of testing, analytics, and dashboard infrastructure.
      • Maintenance, monitoring, and community support for the MVP for 1 year.

      Any scope not outlined here is not part of the Deliverables and out of the scope of this proposal.


      Key Milestones

      Milestone 1 – Metrics Collection & Aggregation

      • Define and implement the minimal metrics set
      • Aggregate existing telemetry into a unified analytics layer
      • A basic dashboard showing sample data flowing end to end

      Milestone 2 – Test Signals & Derived Analytics

      • Deploy reference load-test gateways
      • Launch a public dashboard with core views
      • APIs for ecosystem consumption

      Milestone 3 – Stabilization & Review

      • Harden infrastructure for reliability and cost efficiency
      • Document metrics, assumptions, and known gaps
      • Review outcomes with the community to determine next steps

      Timeline

      Delivery is anticipated to take approximately six months (and already underway as of November 2025). This is dependent on the team’s development velocity and subject to change. Preliminary design and validation work has begun to reduce delivery risk.

      • November 2025 - Works began on original proposal and discovery process
      • February 2026 – Milestone 1: Metrics Collection & Aggregation
      • March 2026 – Milestone 2 – Test Signals & Derived Analytics
      • April 2026 – Milestone 3 – Stabilization & Review

      Budget

      Total Requested Budget: $90,000

      This budget supports:

      • Engineering work to aggregate, validate, and expose SLA-relevant metrics
      • Development of Load Testing Gateway (AI Job Tester + Gateway enhancements) and Network Data Scraper
      • Development of minimal analytics and public-facing dashboards
      • Development of DevOps infrastructure and automation
      • Operation of testing, analytics, and storage infrastructure for approximately one year
      • Ongoing maintenance, documentation, and community support

      The budget is intentionally sized for a thin but complete MVP, designed to validate assumptions, inform future investment, and avoid long-term commitments before value is demonstrated.


      Closing Note

      This pre-proposal reflects extensive community and Livepeer Inc feedback and represents a deliberate step toward a simpler, clearer, and more actionable NaaP MVP.

      By focusing on shared measurement rather than enforcement or protocol change, this work aims to give the Livepeer ecosystem a common understanding of network behavior today — and a solid foundation for deciding what to build next.

      ', + replyCount: 6, + datePosted: 'Jan 9, 2026', + }, + { + title: 'Monthly Reporting', + href: 'https://forum.livepeer.org/t/3191', + author: 'By Mehrdad (@Mehrdad)', + content: '

      Hey everyone,

      We’re introducing a simple monthly update template for all SPEs and funded projects that makes it easy for people to see the value you’re creating. These reports will help get fast feedback on what you’ve shipped, create visibility around new features and put all the relevant evidence needed for future proposals in one place.

      What we’re asking you to do

      • At the end of each month or at the end of a milestone (whichever comes first), post an update as a reply in your original proposal thread on the forum.
      • Use the following minimal template. You can adapt the wording, but please keep all the sections so we have consistent information across projects:

      It should take ~5 minutes to produce. The key is that the update is:

      • Short and easy to scan
      • Linked to concrete artefacts (PRs, repos, docs, deployments, etc.)
      • Clearly tied back to the proposal’s milestones and goals

      As a reference, you can look at the ongoing Explorer RFP updates, which follow a similar pattern.


      Here’s the template:

      [Project / Team Name] — Update #[N]

      Period: [Start date – End date]

      Status: [On track/At Risk/Missed*]

      Status guidance:
      On track - Everything is fine, progressing as expected
      At risk - Needs attention, elements are blocked or there is a risk a deadline is missed.
      Missed - Deadline has been missed

      Summary (1 to 2 sentences):

      One-line description of what was completed during this period and why it matters. Focus on outcomes, not process.

      Completed Deliverables:

      List concrete outputs with links. Group by milestone if applicable.

      • [Deliverable name or milestone]
        • Short description of what shipped or was completed
        • Link(s): PRs, issues, docs, releases, dashboards

      ETA for Next Update: [Date]

      Planned by Next Update:

      • Brief bullet list of what is expected to be completed by the next report.

      If anything about this format doesn’t work for your project, reach out so we can adjust while still keeping reports comparable across the ecosystem.

      ', + replyCount: 0, + datePosted: 'Jan 14, 2026', + }, + { + title: 'Proposal: LIP 101 - Restart treasury reward cut', + href: 'https://forum.livepeer.org/t/3161', + author: 'By b3nnn (@b3nnn)', + content: '
      TLDR

      We propose to reactivate treasury rewards to ensure available funds for strategic ecosystem projects:

      • treasuryRewardCutRate: 10%
      Motivation

      The treasury is key to supporting strategic investments to improve UX for stakeholders, effectively manage protocol security, and fund other capital and resource needs for this exciting phase of the project. As of now, the onchain treasury is currently not accumulating LPT due to hitting the “treasury ceiling” and resetting the treasury reward cut to 0%.

      We have a number of strategic proposals around managing capital, supporting network security, and funding continued development and optimisation of the network either live or in backlog ready to be proposed. We need certainty that the treasury will be repopulating before making important decisions around these proposals.

      Rationale

      The treasury is key to supporting strategic ecosystem investments and public goods. As such, the desired treasuryRewardCutRate should reflect the value that best supports us in achieving those ecosystem goals.

      Through open discussion in the forum and the most recent Watercooler, focus was on rapid action to restart the treasury cut. In the interests of moving rapidly, and given there was little to no discussion about changing the parameter, we propose to restart round-based treasury rewards at 10%. This is the same rate as during the life of the onchain treasury.

      Technical Specification

      Implementation of this LIP will update the target parameter with the new value:

      Parameter Value
      treasuryRewardCutRate 10%

      For a more detailed technical review of the parameter you can review its instantiation in the original LIP here

      Implementation

      The security committee, as owners of the protocol, invoke the function and set the value as per the results of the vote.

      Testing

      Testing is not required. The Security Committee ensure to set the value of this parameter to 1e26 (1 with 26 zeroes after).

      ', + replyCount: 2, + datePosted: 'Dec 11, 2025', + }, + { + title: 'Proposal - Protocol R&D Special Purpose Entity', + href: 'https://forum.livepeer.org/t/3160', + author: 'By Rick (@rickstaa)', + content: '
      Abstract

      All network value depends on protocol security.

      Protocol security requires dedicated capacity to detect issues early, resolve them quickly and deploy upgrades with confidence. The current model depends on limited, distributed resources that cannot consistently support these demands. The Protocol R&D Special Purpose Entity (SPE) resolves this by establishing a professional, continuously staffed function responsible for vulnerability triage, safe upgrade preparation, and shipping additional protocol features like a reliable testnet for rigorous validation and development.

      This proposal funds the SPE for an initial six-month term. It brings together a contracted security and engineering partner, under the governance of Livepeer Foundation and Livepeer Inc. The SPE creates a single, accountable structure that protects the protocol, reduces operational risk and enables faster, safer delivery of protocol improvements as the network continues to scale.

      Mision

      The mission of the Protocol R&D SPE is to provide the most secure, resilient and continuously improving protocol foundations possible for Livepeer, at the best possible price-to-value ratio.

      Rationale

      The protocol supports significant on-chain value which continues to grow through the expansion of services to real-time video AI inference. Protecting this requires consistent access to security and engineering expertise. The current model, while effective at securing the protocol since inception, relies on Livepeer Inc and places a significant load on the security committee. This constrains core feature development and protocol progress. Having a dedicated security partner reduces the load on the security committee and frees them for other obligations, while increasing the speed at which we can improve network security.

      Core to this SPE is the engagement of a Protocol Engineering & Security Partner (Sidestream) to provide a dedicated, multi-disciplinary team. They provide first‑response to Immunefi-identified vulnerabilities and implement audited on‑chain patches and upgrades. Immunefi has been a massive success in terms of the mission, keeping the protocol safe at modest cost—historically about $75–100k per year in bounty payouts—while helping protect tens of millions in protocol value. The Partner works in close coordination with the Security Committee, which retains review and execution authority for upgrades and emergency patches.

      The steps reduce our reliance on more constrained support, and moves toward a stable, accountable model for protocol security. The SPE creates a durable, well defined structure for protocol stewardship as the network decentralizes. It gives the community a clear point of accountability for security and core maintenance, which reduces operational risk and supports the reliable functioning of the protocol over the long term.

      Deliverables

      The Protocol R&D SPE improves operational responsibilities, fast and continuous response, ships already‑built but not‑yet‑deployed features in the protocol R&D pipeline, and launches and maintains a public testnet and DevEx toolkit to speed up future development.

      (1) Core Protocol Security Operations

      Goal: Maintain continuous protocol security coverage and rapid incident response through the Immunefi bounty program and close coordination with the Security Committee.

      Outputs: The SPE will manage the Immunefi process as first responder for vulnerability reports. The Partner will reproduce, validate, and propose patches within defined response windows, in coordination with the Foundation Technical Lead and the Security Committee for review and deployment. Quarterly readiness reviews will strengthen detection, response time, and coordination.

      Success Indicators: Continuous Immunefi coverage with valid reports acknowledged within 24 hours and triaged within one week. Critical issues are resolved or escalated for deployment within agreed timelines. The SPE operates the response process independently while the Security Committee maintains oversight.

      (2) Ship Backlog Features and Build the R&D Pipeline

      Goal: Deliver the high-priority protocol upgrades from the existing backlog while building the foundation for a sustainable and iterative R&D process.

      Outputs: The SPE will complete and deploy existing features nearing readiness for mainnet release—such as the Reward Call Delegate, Ticket Distinction, and stability patches. The specific upgrades shipped each release cycle will be selected through a lightweight triage process established by the SPE, supported by the Foundation protocol engineer as the role comes online.

      Success Indicators: At least one backlog feature or patch deployed to mainnet per release cycle. Lightweight triage and delivery process is established and used to prioritize and ship work. The Foundation protocol engineer is hired and supporting development and coordination by the end of Q1 2026.

      (3) Public Testnet and Developer Infrastructure

      Goal: Deliver and maintain the testnet and tooling needed for reliable validation, audits, and developer experimentation, supporting both protocol and client development.

      Outputs: The SPE will operate a continuously available public testnet with faucet access, CI integration, and simulation tooling. Clear developer documentation and workflows will make it easier to run local or private devnets and test upgrades or integrations before mainnet deployment.

      Success Indicators: Public testnet operational with ≥99% uptime and integrated into CI and simulation workflows. Developer and client teams actively use the infrastructure for validation and testing.

      Key Milestones
      Milestone Target Completion Description
      Partner Onboarding Completed Q4 2025 Protocol Engineering & Security Partner contracted and operational, and security and triage procedures aligned with the Security Committee.
      Continuous Immunefi Vulnerability Response All of H1 2026 Maintain full first-response capability for Immunefi reports: reproduce issues, propose fixes, coordinate Security Committee review, and ensure continuous coverage.
      Public Testnet Live Q1 2026 Launch a stable, persistent public testnet with faucet, CI integration, and reproducible deployment tooling.
      Triage Pipeline Established & First Upgrade Shipped Q1 2026 Lightweight triage process established and validated through at least one feature or protocol upgrade shipped to mainnet.
      Triage Pipeline Updated & Additional Upgrade Shipped Q2 2026 Triage pipeline updated, with at least one additional upgrade triaged and deployed to mainnet.
      Six-Month Review & Renewal Assessment Q2 2026 Performance and financial review concluded by the SPE Board,; results shared publicly and renewal proposal prepared.
      SPE Governance Structure

      The Protocol R&D SPE is managed and governed by the Livepeer Foundation and Livepeer Security Committee. Through their collaboration, they enable the work of the Protocol Engineering & Security Partner.

      The exact operations of security practices are not shared here.
      SPE funds are held in a secure multisig SAFE with a threshold of known, trusted signers from the Foundation and the Security Committee, following standard security practices.

      The SPE will operate transparently through quarterly public reporting, open development and open access to non-sensitive work.

      Roles & Responsibilities
      Body / Role Responsibilities Scope Funding Source
      Security Committee Review and execute upgrades and patches a final security checkpoint Security oversight; upgrade authorization & execution. Livepeer Inc.
      Foundation Coordinate roadmap and delivery, manage funds and payouts for Immunefi, audits and security partner milestones Program and roadmap management, coordination and treasury/ops. Foundation
      Protocol Engineering & Security Partner First responder for patches, implementator of new protocol features, audited upgrades, and patches, and build/maintenance of testnet and tooling components On-chain development, security response, contract CI/tooling, on-chain testnet components. SPE
      Budget

      The Protocol R&D SPE seeks $360,000 equivalent amount. This ensures 24/7 responsiveness from the team in addition to their core security deveopment work.

      The budget includes a line item for audits to ensure that significant protocol changes and new implementations receive appropriate security review before deployment. Other necessary costs for executing this SPE, such as the Foundation’s protocol engineer, infrastructure, and operations, are covered separately by the Foundation.

      A core responsibility of the SPE is managing the Immunefi bounty program. The Livepeer Foundation will cover Immunefi payouts in the short term to avoid withdrawing capital from the treasury until necessary. This approach allows treasury capital to continue supporting other strategic initiatives across the ecosystem. As part of the Foundation, I can share that we are glad to support this active capital management to advance Livepeer’s collective goals.

      Projected Spending:
      Category LPT USD Description
      Protocol Engineering & Security Partner (team) N $300,000 Six-month engagement focused on security response, prioritized backlog features, and on-chain testnet ops.
      Audits & External Reviews N $60,000 Third-party security reviews (reserve-based)
      Total Initial Request N $360,000
      Key Terms
      Term Definition
      Protocol R&D SPE A Special Purpose Entity funded by the Livepeer Treasury to manage protocol research, development, and security operations.
      Protocol Engineering & Security Partner The contracted team responsible for hands-on protocol development, audits, and vulnerability response under the SPE framework.
      Security Committee Oversight body responsible for reviewing protocol upgrades, validating critical patches, and guiding decentralization of security responsibilities.
      Immunefi Program Livepeer’s bug-bounty initiative that incentivizes whitehat researchers to identify and responsibly disclose vulnerabilities in the protocol. Managed under the SPE to ensure continuous coverage and rapid triage.
      Triage Pipeline The structured process for evaluating, prioritizing, and implementing protocol work, including community proposals (LIPs) and vulnerability reports, through coordinated specification, review, and deployment stages.
      Public Testnet A continuously maintained network environment mirroring mainnet, used for protocol validation, client testing, and developer experimentation before production deployments.
      DevEx Tooling Developer-experience infrastructure, including CI pipelines, simulations, and documentation, enabling contributors to test and validate protocol upgrades efficiently and safely.
      SPE Board The governance body composed of representatives from the Foundation, Security Committee, and Livepeer Inc., responsible for approvals, budget oversight, and performance reviews.
      Audits Independent security reviews performed by external experts to assess the safety, correctness, and performance of protocol changes before deployment.
      Multisig SAFE A secure multi-signature wallet used for custody and management of SPE funds, requiring approval from designated Foundation and Security Committee signers.
      ', + replyCount: 9, + datePosted: 'Dec 11, 2025', + } +]; diff --git a/snippets/automationData/globals/README.mdx b/snippets/automations/globals/README.mdx similarity index 92% rename from snippets/automationData/globals/README.mdx rename to snippets/automations/globals/README.mdx index 78ea95bd..d4f94f09 100644 --- a/snippets/automationData/globals/README.mdx +++ b/snippets/automations/globals/README.mdx @@ -66,7 +66,7 @@ jobs: - name: Read current version from globals.jsx id: current_version run: | - CURRENT=$(grep -oP 'LatestRelease:\s*["'\''\"]?\K[^"'\'']+' snippets/automationData/globals/globals.jsx || echo "") + CURRENT=$(grep -oP 'LatestRelease:\s*["'\''\"]?\K[^"'\'']+' snippets/automations/globals/globals.jsx || echo "") echo "current=${CURRENT}" >> $GITHUB_OUTPUT echo "Current version: ${CURRENT}" @@ -74,14 +74,14 @@ jobs: if: steps.get_release.outputs.release != steps.current_version.outputs.current run: | # Create backup - cp snippets/automationData/globals/globals.jsx snippets/automationData/globals/globals.jsx.bak + cp snippets/automations/globals/globals.jsx snippets/automations/globals/globals.jsx.bak # Update the LatestRelease value - sed -i "s/LatestRelease:[[:space:]]*[\"'][^\"']*[\"']/LatestRelease: \"${{ steps.get_release.outputs.release }}\"/" snippets/automationData/globals/globals.jsx + sed -i "s/LatestRelease:[[:space:]]*[\"'][^\"']*[\"']/LatestRelease: \"${{ steps.get_release.outputs.release }}\"/" snippets/automations/globals/globals.jsx # Verify the change echo "Updated content:" - grep "LatestRelease" snippets/automationData/globals/globals.jsx + grep "LatestRelease" snippets/automations/globals/globals.jsx - name: Commit and push if changed if: steps.get_release.outputs.release != steps.current_version.outputs.current diff --git a/snippets/automationData/globals/globals.jsx b/snippets/automations/globals/globals.jsx similarity index 100% rename from snippets/automationData/globals/globals.jsx rename to snippets/automations/globals/globals.jsx diff --git a/snippets/automationData/globals/globals.mdx b/snippets/automations/globals/globals.mdx similarity index 100% rename from snippets/automationData/globals/globals.mdx rename to snippets/automations/globals/globals.mdx diff --git a/snippets/automations/luma/lumaEventsData.jsx b/snippets/automations/luma/lumaEventsData.jsx new file mode 100644 index 00000000..6443e8e0 --- /dev/null +++ b/snippets/automations/luma/lumaEventsData.jsx @@ -0,0 +1,187 @@ +export const lumaEventsData = { + lastUpdated: "2026-01-21T08:39:40.878Z", + upcoming: [ + ], + past: [ + { + title: "AI x Open Media Forum presented by Livepeer, co-curated with Refraction", + date: "November 18, 2025", + location: "https://luma.com/event/evt-KWn61dZNxwOf7tP", + url: "https://luma.com/9q0swwro" + }, + { + title: "SLC Livepeer Delegator Workshops", + date: "July 26, 2025", + location: "https://luma.com/event/evt-wRQfFL4REh1KEwm", + url: "https://luma.com/wfdaaujk" + }, + { + title: "Virtual Livepeer Delegator Workshop", + date: "July 23, 2025", + location: "https://luma.com/event/evt-j9zlkAhOTSKbtYU", + url: "https://luma.com/2si5dp2x" + }, + { + title: "Abuja Livepeer Delegator Workshop", + date: "July 19, 2025", + location: "https://luma.com/event/evt-OPud7laxPHK87V7", + url: "https://luma.com/2bl3t9jn" + }, + { + title: "Livepeer Treasury Talk 💰", + date: "July 7, 2025", + location: "https://luma.com/event/evt-1yHgJArDXMmyB3j", + url: "https://luma.com/n7rpu9wt" + }, + { + title: "The Brunch™ (Cannes) - Builder Brunch at ETHCC", + date: "July 3, 2025", + location: "https://luma.com/event/evt-eqtiphMEMwFuHdp", + url: "https://luma.com/xzbn0cxc" + }, + { + title: "Live AI Fashion Hackathon", + date: "June 10, 2025", + location: "https://luma.com/event/evt-y9bE78VDvyVyFPF", + url: "https://luma.com/tijlbvq6" + }, + { + title: "Livepeer Open Ecosystem Call", + date: "June 6, 2025", + location: "https://luma.com/event/evt-z4GLweG2CVSW81e", + url: "https://luma.com/6ckodf8u" + }, + { + title: "Livepeer Core Dev Call", + date: "May 15, 2025", + location: "https://luma.com/event/evt-wEQR5bO6XaRN3aO", + url: "https://luma.com/1nn2dunw" + }, + { + title: "Daydream Creator Sessions", + date: "May 8, 2025", + location: "https://luma.com/event/evt-PxASpZkEiflGNde", + url: "https://luma.com/5dl1e8ds" + }, + { + title: "IRL Daydream in Greenpoint with Maachew Bentley (063N13)", + date: "May 1, 2025", + location: "Ponyboy, 632 Manhattan Ave, Brooklyn, NY 11222, USA", + url: "https://luma.com/bl9x3zz9" + }, + { + title: "Real-Time Video AI @GenART NYU with ComfyUI & Livepeer", + date: "April 18, 2025", + location: "370 Jay St 4th floor, Brooklyn, NY 11201, USA", + url: "https://luma.com/wyvt8b4k" + }, + { + title: "Open Source AI Meetup Amsterdam", + date: "March 21, 2025", + location: "Mauritskade 57, 1092 AD Amsterdam, Netherlands", + url: "https://luma.com/zgm3iz35" + }, + { + title: "Workflow Competition: Innovating Realtime Video AI", + date: "March 17, 2025", + location: "https://luma.com/event/evt-IdZR5WmEE8NDpPC", + url: "https://luma.com/ztyb4wr4" + }, + { + title: "ComfyUI Official Meetup - Austin AI Film Fest Edition", + date: "March 14, 2025", + location: "AT&T Hotel and Conference Center, 1900 University Ave, Austin, TX 78705, USA", + url: "https://luma.com/nkiothz3" + }, + { + title: "Open Source & Creative AI: Using ComfyUI for Real Time Video AI", + date: "February 25, 2025", + location: "Code Talent, 3412 Blake St, Denver, CO 80205, USA", + url: "https://luma.com/dkuob1j4" + }, + { + title: "ComfyUI Official NYC February Meet-Up", + date: "February 19, 2025", + location: "https://luma.com/event/evt-Ho2RAER8bUJ0V9Q", + url: "https://luma.com/ettshrqa" + }, + { + title: "Real-Time Video AI @GenART NYU with ComfyUI & Livepeer", + date: "January 31, 2025", + location: "370 Jay St 4th floor, Brooklyn, NY 11201, USA", + url: "https://luma.com/cene9t4y" + }, + { + title: "ComfyUI Hacker Program Demo Day", + date: "January 31, 2025", + location: "https://luma.com/event/evt-jovMI8YYwF57G0H", + url: "https://luma.com/5fe2977r" + }, + { + title: "Whats New @Livepeer", + date: "January 29, 2025", + location: "https://luma.com/event/evt-wQHF1QiB98kQ9uW", + url: "https://luma.com/opmnkhna" + }, + { + title: "Livepeer: StreamDiffusion Workshop", + date: "January 27, 2025", + location: "https://luma.com/event/evt-KlsC8BJyisKKsAU", + url: "https://luma.com/yl91e6yy" + }, + { + title: "Weekly Water Cooler Chat", + date: "December 23, 2024", + location: "https://luma.com/event/evt-MMk14m6djg9XwQD", + url: "https://luma.com/qpvkmiyq" + }, + { + title: "ComfyStream Contributors Workshop", + date: "December 19, 2024", + location: "https://luma.com/event/evt-e4CLbc5vMwUeH9S", + url: "https://luma.com/8lt1q50y" + }, + { + title: "AI Video Hackathon: Finale and Prizegiving", + date: "November 26, 2024", + location: "https://luma.com/event/evt-eciLN0qY3oNVRQz", + url: "https://luma.com/E0466_2889" + }, + { + title: "AI Community Research Report: ComfyUI Case Study", + date: "November 5, 2024", + location: "https://luma.com/event/evt-U8GgnmpFsM6WzYb", + url: "https://luma.com/ltaqk21p" + }, + { + title: "Livepeer AI Orchestrator Logo Generation", + date: "October 30, 2024", + location: "https://luma.com/event/evt-3fl2yqHXznPAs26", + url: "https://luma.com/5tg36ots" + }, + { + title: "AI Startup Program Demo Day", + date: "October 9, 2024", + location: "https://luma.com/event/evt-BDB36ZqZBbjwCUS", + url: "https://luma.com/mhr5reat" + }, + { + title: "LIMITLESS: TOKEN-POWERED AI", + date: "September 17, 2024", + location: "ArtScience Museum, 6 Bayfront Ave, Singapore 018974", + url: "https://luma.com/xqvgrmuv" + }, + { + title: "Happy Hour w/ Livepeer", + date: "July 11, 2024", + location: "Reset, Rue de Ligne 8, 1000 Bruxelles, Belgium", + url: "https://luma.com/j8rw4jva" + }, + { + title: "GEN VIDEO Summit - The Future of Decentralized AI Media & Streaming", + date: "May 23, 2024", + location: "NEST Schank- und Speisewirtschaft, Görlitzer Str. 52, 10997 Berlin, Germany", + url: "https://luma.com/4ochjrc3" + } + ] +}; diff --git a/snippets/automations/scripts/n8n/Discord_Announce_to_Mintlify.json b/snippets/automations/scripts/n8n/Discord_Announce_to_Mintlify.json new file mode 100644 index 00000000..ae1bf811 --- /dev/null +++ b/snippets/automations/scripts/n8n/Discord_Announce_to_Mintlify.json @@ -0,0 +1,316 @@ +{ + "name": "Discord_Announce_to_Mintlify", + "nodes": [ + { + "parameters": { + "rule": { + "interval": [ + {} + ] + } + }, + "id": "38f769ec-ef3c-41d6-9805-81f98b0e86e6", + "name": "Schedule Trigger", + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1.2, + "position": [ + -576, + -336 + ] + }, + { + "parameters": { + "resource": "message", + "operation": "getAll", + "guildId": { + "__rl": true, + "value": "={{ $json.discordServerID }}", + "mode": "id" + }, + "channelId": { + "__rl": true, + "value": "={{ $json.discordChannelID }}", + "mode": "id" + }, + "limit": 50, + "options": {} + }, + "id": "c463d2b2-caca-423a-aaa3-b1f4a80e21d8", + "name": "Get Discord Messages", + "type": "n8n-nodes-base.discord", + "typeVersion": 2, + "position": [ + -192, + -336 + ], + "webhookId": "1a6cec03-797e-4a28-b0a0-0c7d848eddb3", + "credentials": { + "discordBotApi": { + "id": "w1Jsx7w9upr3KgFD", + "name": "Discord Bot account" + } + } + }, + { + "parameters": { + "conditions": { + "options": { + "caseSensitive": true, + "leftValue": "", + "typeValidation": "strict" + }, + "conditions": [ + { + "id": "filter-recent", + "leftValue": "={{ new Date($json.timestamp).getTime() }}", + "rightValue": "={{ Date.now() - (24 * 60 * 60 * 1000) }}", + "operator": { + "type": "number", + "operation": "gt" + } + } + ], + "combinator": "and" + }, + "options": {} + }, + "id": "23a51220-2e7b-454c-886d-1dfb1701c009", + "name": "Filter Recent Messages", + "type": "n8n-nodes-base.filter", + "typeVersion": 2, + "position": [ + 16, + -336 + ] + }, + { + "parameters": { + "jsCode": "const announcements = [];\n\nfor (const item of $input.all()) {\n const message = item.json;\n \n // Skip non-normal messages (type 12 is channel follow notification)\n if (message.type !== 0) continue;\n \n // Extract content from message snapshots (cross-posted messages)\n let content = message.content;\n if (!content && message.message_snapshots && message.message_snapshots.length > 0) {\n content = message.message_snapshots[0].message.content;\n }\n \n // Skip if still no content\n if (!content) continue;\n \n // Get original message reference for better URL\n const originalGuildId = message.message_reference?.guild_id || message.guild_id;\n const originalChannelId = message.message_reference?.channel_id || message.channel_id;\n const originalMessageId = message.message_reference?.message_id || message.id;\n \n announcements.push({\n id: message.id,\n content: content,\n author: message.author.global_name || message.author.username,\n timestamp: message.message_snapshots && message.message_snapshots.length > 0 \n ? message.message_snapshots[0].message.timestamp \n : message.timestamp,\n url: `https://discord.com/channels/${originalGuildId}/${originalChannelId}/${originalMessageId}`,\n attachments: message.attachments || [],\n embeds: message.embeds || []\n });\n}\n\n// Sort by timestamp, newest first\nannouncements.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp));\n\nreturn [{ json: { announcements } }];" + }, + "id": "6337e61c-c39e-4db0-b742-07a99bddf5dd", + "name": "Process Announcements", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 224, + -336 + ] + }, + { + "parameters": { + "jsCode": "const announcements = $input.first().json.announcements;\n\n// Helper function to escape JSX content\nfunction escapeJSX(str) {\n if (!str) return '';\n return str\n .replace(/&/g, '&')\n .replace(//g, '>')\n .replace(/\"/g, '"')\n .replace(/'/g, ''')\n .replace(/\\{/g, '{')\n .replace(/\\}/g, '}');\n}\n\n// Helper function to format Discord markdown to HTML\nfunction formatContent(content) {\n if (!content) return '';\n \n let formatted = escapeJSX(content);\n \n // Convert Discord markdown\n formatted = formatted\n .replace(/\\*\\*(.+?)\\*\\*/g, '$1') // Bold\n .replace(/\\*(.+?)\\*/g, '$1') // Italic\n .replace(/\\n/g, '
      '); // Line breaks\n \n return formatted;\n}\n\n// Generate JSX content\nconst jsxContent = `export const DiscordAnnouncements = () => {\n const announcements = [\n${announcements.map(ann => ` {\n id: \"${ann.id}\",\n content: \"${formatContent(ann.content)}\",\n author: \"${escapeJSX(ann.author)}\",\n timestamp: \"${ann.timestamp}\",\n url: \"${ann.url}\"\n }`).join(',\\n')}\n ];\n\n return (\n
      \n
      \n

      Latest Livepeer Announcements

      \n

      From Discord

      \n
      \n
      \n {announcements.map((announcement) => (\n
      \n
      \n {announcement.author}\n \n \n
      \n \n ))}\n
      \n
      \n );\n};\n`;\n\nreturn [{ json: { content: jsxContent, announcements } }];" + }, + "id": "013f9f4a-baca-4fb0-ac7e-c51965c4f55e", + "name": "Generate JSX", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 416, + -336 + ] + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "={{ $json.githubOwner }}", + "mode": "" + }, + "repository": { + "__rl": true, + "value": "={{ $json.githubRepo }}", + "mode": "" + }, + "filePath": "={{ $json.githubFilePath }}", + "fileContent": "={{ $('Generate JSX').item.json.content }}", + "commitMessage": "=commitMessage: `chore: create Discord announcements file from workflow - ${new Date().toISOString()}`", + "additionalParameters": { + "branch": { + "branch": "={{ $json.githubBranch }}" + } + } + }, + "id": "98eb0352-31ab-4a2e-b9d7-9070f459917b", + "name": "Update GitHub File", + "type": "n8n-nodes-base.github", + "typeVersion": 1, + "position": [ + 1040, + -400 + ], + "webhookId": "a1db9fa1-0d11-4d5c-89c8-28f69cbfb60e", + "credentials": { + "githubApi": { + "id": "vAAQD9gcQcGNKMOH", + "name": "Github Livepeer/docs Write Commit Token" + } + } + }, + { + "parameters": { + "assignments": { + "assignments": [ + { + "id": "github_owner", + "name": "githubOwner", + "value": "livepeer", + "type": "string" + }, + { + "id": "github_repo", + "name": "githubRepo", + "value": "docs", + "type": "string" + }, + { + "id": "github_path", + "name": "githubFilePath", + "value": "snippets/automations/discord/discordAnnouncementsData.jsx", + "type": "string" + }, + { + "id": "077c994c-4563-4210-8690-3b00fe4dba99", + "name": "githubBranch", + "value": "docs-v2-preview", + "type": "string" + }, + { + "id": "293846b3-b346-4a17-96fc-880b2917db8d", + "name": "discordServerID", + "value": "1066890817425387581", + "type": "string" + }, + { + "id": "5cf8e964-1dad-40bd-9813-1b23ecc6e10e", + "name": "discordChannelID", + "value": "1463391944746078319", + "type": "string" + } + ] + }, + "options": {} + }, + "name": "Config", + "type": "n8n-nodes-base.set", + "typeVersion": 3.3, + "position": [ + -384, + -336 + ], + "id": "a0ccaed1-687b-4ac9-8f5a-50ff9d10cd21" + }, + { + "parameters": { + "mode": "combine", + "combineBy": "combineByPosition", + "options": {} + }, + "type": "n8n-nodes-base.merge", + "typeVersion": 3.2, + "position": [ + 784, + -496 + ], + "id": "ec413328-ecc7-46ed-8f7a-10cb3eb00c77", + "name": "Merge" + } + ], + "pinData": {}, + "connections": { + "Schedule Trigger": { + "main": [ + [ + { + "node": "Config", + "type": "main", + "index": 0 + } + ] + ] + }, + "Get Discord Messages": { + "main": [ + [ + { + "node": "Filter Recent Messages", + "type": "main", + "index": 0 + } + ] + ] + }, + "Filter Recent Messages": { + "main": [ + [ + { + "node": "Process Announcements", + "type": "main", + "index": 0 + } + ] + ] + }, + "Process Announcements": { + "main": [ + [ + { + "node": "Generate JSX", + "type": "main", + "index": 0 + } + ] + ] + }, + "Generate JSX": { + "main": [ + [ + { + "node": "Merge", + "type": "main", + "index": 1 + } + ] + ] + }, + "Config": { + "main": [ + [ + { + "node": "Get Discord Messages", + "type": "main", + "index": 0 + }, + { + "node": "Merge", + "type": "main", + "index": 0 + } + ] + ] + }, + "Merge": { + "main": [ + [ + { + "node": "Update GitHub File", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "27728702-e1aa-40f2-877b-ba59e857eb82", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "b77ce31e344450acd6ad20bd6cde79f46c1e07d55921483122bfba86e8350352" + }, + "id": "zmXdoAYwgqwSESAV", + "tags": [] +} \ No newline at end of file diff --git a/snippets/automations/scripts/n8n/Forum-To-Mintlify-Latest-Topics.json b/snippets/automations/scripts/n8n/Forum-To-Mintlify-Latest-Topics.json new file mode 100644 index 00000000..e6048601 --- /dev/null +++ b/snippets/automations/scripts/n8n/Forum-To-Mintlify-Latest-Topics.json @@ -0,0 +1,324 @@ +{ + "name": "Forum-To-Mintlify-Latest-Topics", + "nodes": [ + { + "parameters": { + "functionCode": "return items.map(item => {\n const topic = item.json;\n const first = topic.post_stream?.posts?.find(p => p.post_number === 1);\n return {\n json: {\n id: topic.id,\n title: topic.title,\n url: `https://forum.livepeer.org/t/${topic.id}`,\n authorName: first?.name || first?.username || \"Unknown\",\n authorUsername: first?.username || \"unknown\",\n body: first?.cooked || \"\",\n replyCount: topic.posts_count - 1 || 0, // Subtract 1 for original post\n createdAt: topic.created_at || first?.created_at || \"\",\n updatedAt: topic.updated_at || first?.updated_at || \"\"\n }\n };\n});" + }, + "id": "9c954e20-38a6-4f89-b661-9653e835fe49", + "name": "Extract Original Post w/ Author", + "type": "n8n-nodes-base.function", + "position": [ + 2560, + 720 + ], + "typeVersion": 1 + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "DeveloperAlly", + "mode": "list", + "cachedResultName": "DeveloperAlly", + "cachedResultUrl": "https://github.com/DeveloperAlly" + }, + "repository": { + "__rl": true, + "value": "livepeer-automations", + "mode": "list", + "cachedResultName": "livepeer-automations", + "cachedResultUrl": "https://github.com/DeveloperAlly/livepeer-automations" + }, + "filePath": "data/forumData.jsx", + "fileContent": "={{ $json.fileContent }}", + "commitMessage": "=Update forum data - {{ $now.toISO() }}" + }, + "type": "n8n-nodes-base.github", + "typeVersion": 1.1, + "position": [ + 3712, + 848 + ], + "id": "84e56137-4d69-49a2-8ae7-d914c49776e6", + "name": "Edit a file", + "webhookId": "0a16afd5-8684-4178-bff3-e0eaea0c81bb", + "credentials": { + "githubApi": { + "id": "jjy0epl4eqPHYqlG", + "name": "GitHub account" + } + } + }, + { + "parameters": { + "mode": "combine", + "combineBy": "combineByPosition", + "options": {} + }, + "type": "n8n-nodes-base.merge", + "typeVersion": 3.2, + "position": [ + 2944, + 848 + ], + "id": "1c6bc37f-5c4b-4a7a-ae7e-bef6e2d02e1e", + "name": "Merge" + }, + { + "parameters": { + "functionCode": "const list = items[0].json.topic_list?.topics || [];\nreturn list.map(t => ({ json: t }));" + }, + "id": "1d258870-9cbf-4779-851d-1cd3e0d04716", + "name": "Extract All Topics", + "type": "n8n-nodes-base.function", + "position": [ + 1792, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "url": "https://forum.livepeer.org/latest.json", + "options": {} + }, + "id": "4b036856-5604-481a-b608-9fb47b6e3160", + "name": "Fetch Latest Topics", + "type": "n8n-nodes-base.httpRequest", + "position": [ + 1536, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "functionCode": "function isOldPinned(t) {\n const pinned = t.json.pinned === true || t.json.pinned_globally === true;\n if (!pinned) return false;\n const created = new Date(t.json.created_at);\n const now = new Date();\n const ageDays = (now - created) / (1000 * 60 * 60 * 24);\n return ageDays > 30;\n}\n\nlet topics = items.filter(t => !isOldPinned(t));\nconst top4 = topics.slice(0, 4);\nreturn top4;" + }, + "id": "3a2a14b5-c878-47ad-bd45-001e4d48942a", + "name": "Filter Top 4 (Exclude Old Pinned)", + "type": "n8n-nodes-base.function", + "position": [ + 2032, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "url": "=https://forum.livepeer.org/t/{{$json.id}}", + "options": { + "fullResponse": false + } + }, + "id": "95915c7f-1c6e-433c-817f-5c92a11b7d11", + "name": "Fetch Topic JSON", + "type": "n8n-nodes-base.httpRequest", + "position": [ + 2304, + 720 + ], + "typeVersion": 1 + }, + { + "parameters": { + "url": "=https://forum.livepeer.org/raw/{{$json.id }}/1", + "options": { + "response": { + "response": { + "responseFormat": "text" + } + } + } + }, + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [ + 2304, + 896 + ], + "id": "c04b94f3-7c8e-4c4c-8bee-3e51eca30e7b", + "name": "Fetch Topic Raw" + }, + { + "parameters": { + "functionCode": "return [{ json: { topics: items.map(i => i.json) } }];" + }, + "id": "9c693984-c37c-47e7-bfea-b4cc57b3d8c2", + "name": "Aggregate Topics", + "type": "n8n-nodes-base.function", + "position": [ + 3248, + 848 + ], + "typeVersion": 1 + }, + { + "parameters": { + "triggerTimes": { + "item": [ + {} + ] + } + }, + "id": "b650689a-f9d1-4751-803e-d689e63d6a67", + "name": "Run Daily", + "type": "n8n-nodes-base.cron", + "position": [ + 1280, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "jsCode": "const topics = items[0].json.topics;\nconst forumData = [];\n\n// HTML cleaner function - keeps basic HTML formatting\nfunction cleanAndFormatHTML(html) {\n let cleanHTML = html;\n \n // Remove anchor navigation links\n cleanHTML = cleanHTML.replace(/]*name=\"[^\"]*\"[^>]*class=\"anchor\"[^>]*>.*?<\\/a>/g, '');\n \n // Clean up headings\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h1>/g, '

      $1

      ');\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h2>/g, '

      $1

      ');\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h3>/g, '
      $1
      ');\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h[4-6]>/g, '
      $1
      ');\n \n // Clean up images and their references\n cleanHTML = cleanHTML.replace(/]*class=\"lightbox\"[^>]*>.*?<\\/a>/g, ''); // Remove lightbox wrappers\n cleanHTML = cleanHTML.replace(/]*class=\"lightbox-wrapper\"[^>]*>.*?<\\/div>/g, ''); // Remove lightbox divs\n cleanHTML = cleanHTML.replace(/]*>/g, ''); // Remove img tags\n cleanHTML = cleanHTML.replace(/\\[!\\[.*?\\]\\(.*?\\)\\]\\(.*?\\)/g, ''); // Remove markdown image links\n cleanHTML = cleanHTML.replace(/image\\d+×\\d+\\s+[\\d.]+\\s*[KM]B/gi, ''); // Remove image size text\n \n // Keep paragraphs, lists, emphasis, code\n cleanHTML = cleanHTML.replace(/

      /g, '

      ');\n cleanHTML = cleanHTML.replace(/<\\/p>/g, '

      ');\n cleanHTML = cleanHTML.replace(/
        /g, '
          ');\n cleanHTML = cleanHTML.replace(/<\\/ul>/g, '
        ');\n cleanHTML = cleanHTML.replace(/
      • /g, '
      • ');\n cleanHTML = cleanHTML.replace(/<\\/li>/g, '
      • ');\n cleanHTML = cleanHTML.replace(/(.*?)<\\/strong>/g, '$1');\n cleanHTML = cleanHTML.replace(/(.*?)<\\/em>/g, '$1');\n cleanHTML = cleanHTML.replace(/(.*?)<\\/code>/g, '$1');\n \n // Simplify links\n cleanHTML = cleanHTML.replace(/]*href=\"([^\"]*)\"[^>]*>(.*?)<\\/a>/g, '$2');\n \n // Decode HTML entities\n cleanHTML = cleanHTML.replace(/&/g, '&');\n cleanHTML = cleanHTML.replace(/</g, '<');\n cleanHTML = cleanHTML.replace(/>/g, '>');\n cleanHTML = cleanHTML.replace(/"/g, '\"');\n cleanHTML = cleanHTML.replace(/'/g, \"'\");\n cleanHTML = cleanHTML.replace(/ /g, ' ');\n \n // Clean up whitespace\n cleanHTML = cleanHTML.replace(/\\s+/g, ' ');\n cleanHTML = cleanHTML.replace(/

        \\s*<\\/p>/g, '');\n \n cleanHTML = cleanHTML.trim();\n \n return cleanHTML;\n}\n\nfor (const t of topics) {\n // Convert to clean HTML\n const htmlContent = cleanAndFormatHTML(t.body);\n \n // Format the date nicely\n const datePosted = t.createdAt ? new Date(t.createdAt).toLocaleDateString('en-US', {\n year: 'numeric',\n month: 'short',\n day: 'numeric'\n }) : '';\n \n forumData.push({\n title: t.title,\n href: t.url,\n author: `By ${t.authorName} (@${t.authorUsername})`,\n content: htmlContent, // Clean HTML\n replyCount: t.replyCount || 0,\n datePosted: datePosted\n });\n}\n\n// Generate the JavaScript export string\nlet jsExport = 'export const forumData = [\\n';\nforumData.forEach((item, index) => {\n jsExport += ' {\\n';\n \n // Title\n jsExport += ` title: '${item.title.replace(/\\\\/g, '\\\\\\\\').replace(/'/g, \"\\\\'\")}',\\n`;\n \n // URL\n jsExport += ` href: '${item.href}',\\n`;\n \n // Author\n jsExport += ` author: '${item.author.replace(/\\\\/g, '\\\\\\\\').replace(/'/g, \"\\\\'\")}',\\n`;\n \n // Content - HTML, properly escaped for JS string\n const escapedContent = item.content\n .replace(/\\\\/g, '\\\\\\\\')\n .replace(/'/g, \"\\\\'\")\n .replace(/\"/g, '\\\\\"')\n .replace(/\\n/g, '\\\\n')\n .replace(/\\r/g, '\\\\r')\n .replace(/\\t/g, '\\\\t');\n jsExport += ` content: '${escapedContent}',\\n`;\n \n // Reply count\n jsExport += ` replyCount: ${item.replyCount},\\n`;\n \n // Date posted\n jsExport += ` datePosted: '${item.datePosted}',\\n`;\n \n jsExport += ' }';\n if (index < forumData.length - 1) {\n jsExport += ',';\n }\n jsExport += '\\n';\n});\njsExport += '];\\n';\n\nreturn [{ json: { fileContent: jsExport } }];" + }, + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 3488, + 848 + ], + "id": "738c8086-e2fd-4815-a78a-021c0539c69d", + "name": "Build ForumData.jsx [mdx content]" + } + ], + "pinData": {}, + "connections": { + "Extract Original Post w/ Author": { + "main": [ + [ + { + "node": "Merge", + "type": "main", + "index": 0 + } + ] + ] + }, + "Edit a file": { + "main": [ + [] + ] + }, + "Merge": { + "main": [ + [ + { + "node": "Aggregate Topics", + "type": "main", + "index": 0 + } + ] + ] + }, + "Extract All Topics": { + "main": [ + [ + { + "node": "Filter Top 4 (Exclude Old Pinned)", + "type": "main", + "index": 0 + } + ] + ] + }, + "Fetch Latest Topics": { + "main": [ + [ + { + "node": "Extract All Topics", + "type": "main", + "index": 0 + } + ] + ] + }, + "Filter Top 4 (Exclude Old Pinned)": { + "main": [ + [ + { + "node": "Fetch Topic JSON", + "type": "main", + "index": 0 + }, + { + "node": "Fetch Topic Raw", + "type": "main", + "index": 0 + } + ] + ] + }, + "Fetch Topic JSON": { + "main": [ + [ + { + "node": "Extract Original Post w/ Author", + "type": "main", + "index": 0 + } + ] + ] + }, + "Fetch Topic Raw": { + "main": [ + [ + { + "node": "Merge", + "type": "main", + "index": 1 + } + ] + ] + }, + "Aggregate Topics": { + "main": [ + [ + { + "node": "Build ForumData.jsx [mdx content]", + "type": "main", + "index": 0 + } + ] + ] + }, + "Run Daily": { + "main": [ + [ + { + "node": "Fetch Latest Topics", + "type": "main", + "index": 0 + } + ] + ] + }, + "Build ForumData.jsx [mdx content]": { + "main": [ + [ + { + "node": "Edit a file", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "dc5aba83-7f21-405f-b960-6d7ded2b952e", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "b77ce31e344450acd6ad20bd6cde79f46c1e07d55921483122bfba86e8350352" + }, + "id": "qBcNA3S15BdUz55M", + "tags": [] +} \ No newline at end of file diff --git a/snippets/automations/scripts/n8n/Ghost-to-Mintlify.json b/snippets/automations/scripts/n8n/Ghost-to-Mintlify.json new file mode 100644 index 00000000..ebae0b6f --- /dev/null +++ b/snippets/automations/scripts/n8n/Ghost-to-Mintlify.json @@ -0,0 +1,157 @@ +{ + "name": "Ghost-to-Mintlify", + "nodes": [ + { + "parameters": { + "rule": { + "interval": [ + {} + ] + } + }, + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1.2, + "position": [ + 0, + 0 + ], + "id": "4ba480b5-a326-4d11-92f9-5432b6246edb", + "name": "Schedule Trigger" + }, + { + "parameters": { + "url": "https://livepeer-studio.ghost.io/ghost/api/content/posts/", + "sendQuery": true, + "queryParameters": { + "parameters": [ + { + "name": "=key", + "value": "eaf54ba5c9d4ab35ce268663b0" + }, + { + "name": "limit", + "value": "4" + }, + { + "name": "include", + "value": "tags, authors" + } + ] + }, + "options": {} + }, + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [ + 208, + 0 + ], + "id": "383d6d68-71ce-424b-82b8-c1ae57655488", + "name": "HTTP Request" + }, + { + "parameters": { + "jsCode": "function safeHTML(html) {\n // Escape ONLY backticks\n return (html || \"\").replace(/`/g, \"\\\\`\");\n}\n\nfunction formatDate(iso) {\n return new Date(iso).toLocaleDateString(\"en-US\", {\n month: \"short\",\n day: \"numeric\",\n year: \"numeric\"\n });\n}\n\nconst posts = $json.posts.map(p => ({\n title: p.title,\n href: p.url,\n author: p.primary_author?.name \n ? `By ${p.primary_author.name}`\n : \"By Livepeer Team\",\n\n // SAFE VERSION — template literal inside code export\n content: safeHTML(p.html),\n\n datePosted: formatDate(p.published_at),\n feature_image: p.feature_image,\n excerpt: safeHTML(p.excerpt),\n reading_time: p.reading_time,\n}));\n\n\nconst js = `export const ghostData = [\n${posts.map(post => `{\n title: \\`${post.title}\\`,\n href: \\`${post.href}\\`,\n author: \\`${post.author}\\`,\n content: \\`${post.content}\\`,\n datePosted: \\`${post.datePosted}\\`,\n img: \\`${post.feature_image || \"\"}\\`,\n excerpt: \\`${post.excerpt}\\`,\n readingTime: ${post.reading_time}\n}` ).join(\",\\n\")}\n];`;\n\nreturn [{ json: { js } }];" + }, + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 416, + 0 + ], + "id": "8df60f75-86a6-4433-a76f-5da0c1711f4f", + "name": "Format Data For Mintlify", + "alwaysOutputData": false + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "DeveloperAlly", + "mode": "list", + "cachedResultName": "DeveloperAlly", + "cachedResultUrl": "https://github.com/DeveloperAlly" + }, + "repository": { + "__rl": true, + "value": "livepeer-automations", + "mode": "list", + "cachedResultName": "livepeer-automations", + "cachedResultUrl": "https://github.com/DeveloperAlly/livepeer-automations" + }, + "filePath": "data/ghostBlogData.jsx", + "fileContent": "={{ $json.js }}", + "commitMessage": "=Update Blog Data {{ $now.toISO() }}", + "additionalParameters": { + "branch": { + "branch": "main" + } + } + }, + "type": "n8n-nodes-base.github", + "typeVersion": 1.1, + "position": [ + 624, + 0 + ], + "id": "12da7005-8b77-44f6-bb8d-1b3cf61b2db7", + "name": "Edit a file", + "webhookId": "3002edb1-3d17-44c0-be7d-e526f4aa14ad", + "credentials": { + "githubApi": { + "id": "jjy0epl4eqPHYqlG", + "name": "GitHub account" + } + } + } + ], + "pinData": {}, + "connections": { + "Schedule Trigger": { + "main": [ + [ + { + "node": "HTTP Request", + "type": "main", + "index": 0 + } + ] + ] + }, + "HTTP Request": { + "main": [ + [ + { + "node": "Format Data For Mintlify", + "type": "main", + "index": 0 + } + ] + ] + }, + "Format Data For Mintlify": { + "main": [ + [ + { + "node": "Edit a file", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "541aa7a4-475b-40b4-8f0e-3ab5ebfe6b98", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "b77ce31e344450acd6ad20bd6cde79f46c1e07d55921483122bfba86e8350352" + }, + "id": "5uLNIqPAxnTXwOnE", + "tags": [] +} \ No newline at end of file diff --git a/snippets/automations/scripts/n8n/Luma-To-Mintlify.json b/snippets/automations/scripts/n8n/Luma-To-Mintlify.json new file mode 100644 index 00000000..bee02941 --- /dev/null +++ b/snippets/automations/scripts/n8n/Luma-To-Mintlify.json @@ -0,0 +1,296 @@ +{ + "name": "My workflow", + "nodes": [ + { + "parameters": { + "rule": { + "interval": [ + { + "field": "weeks" + } + ] + } + }, + "name": "Schedule Trigger", + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1, + "position": [ + 64, + -96 + ], + "id": "c69f62f0-871a-49f7-870a-062016aaae16" + }, + { + "parameters": { + "url": "=https://api2.luma.com/ics/get?entity=calendar&id={{ $json.lumaCalID }}", + "options": {} + }, + "name": "Fetch iCal", + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.1, + "position": [ + 432, + -96 + ], + "id": "2a8ebed6-0dd4-405c-af8a-c41102d2046e" + }, + { + "parameters": { + "jsCode": "const icalData = $input.item.json.data;\n\n// Extract all VEVENT blocks\nconst eventBlocks = icalData.match(/BEGIN:VEVENT[\\s\\S]*?END:VEVENT/g) || [];\n\nconst events = eventBlocks.map(block => {\n const getField = (field) => {\n const match = block.match(new RegExp(`${field}:(.*?)(?:\\n[A-Z]|\\nEND:)`, 's'));\n return match ? match[1].replace(/\\n /g, '').trim() : '';\n };\n \n const parseDate = (dateStr) => {\n // Format: 20240523T090033Z\n const year = dateStr.slice(0, 4);\n const month = dateStr.slice(4, 6);\n const day = dateStr.slice(6, 8);\n const hour = dateStr.slice(9, 11);\n const min = dateStr.slice(11, 13);\n return new Date(`${year}-${month}-${day}T${hour}:${min}:00Z`);\n };\n\n const startStr = getField('DTSTART');\n const endStr = getField('DTEND');\n const summary = getField('SUMMARY');\n const description = getField('DESCRIPTION');\n const location = getField('LOCATION');\n const uid = getField('UID').split('@')[0];\n \n // Extract luma URL from description\n const lumaUrl = description.match(/https:\\/\\/luma\\.com\\/\\w+/)?.[0] || '';\n\n return {\n title: summary,\n start: parseDate(startStr),\n end: parseDate(endStr),\n description: description.split('\\n\\n')[0], // First paragraph only\n location: location,\n url: lumaUrl,\n uid: uid\n };\n});\n\n// Sort by date (newest first for display)\nconst sorted = events.sort((a, b) => b.start - a.start);\n\nreturn [{ json: { events: sorted } }];" + }, + "name": "Parse iCal", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 624, + -96 + ], + "id": "110949d1-b6b9-467e-bea4-7157c672a129" + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "={{ $json.githubOwner }}", + "mode": "" + }, + "repository": { + "__rl": true, + "value": "={{ $json.githubRepo }}", + "mode": "" + }, + "filePath": "={{ $json.githubFilePath }}", + "fileContent": "={{ $json.content }}", + "commitMessage": "`Chore: Update Livepeer events from Luma - ${new Date().toISOString()}`", + "additionalParameters": { + "branch": { + "branch": "={{ $json.githubBranch }}" + } + } + }, + "name": "Update GitHub", + "type": "n8n-nodes-base.github", + "typeVersion": 1, + "position": [ + 1712, + -96 + ], + "id": "6ea25cad-b0ce-46e2-8e2d-df449de5650e", + "webhookId": "4ad2e461-cf8b-438d-8723-03a405599e22", + "credentials": { + "githubApi": { + "id": "vAAQD9gcQcGNKMOH", + "name": "Github Livepeer/docs Write Commit Token" + } + } + }, + { + "parameters": { + "assignments": { + "assignments": [ + { + "id": "github_owner", + "name": "githubOwner", + "value": "livepeer", + "type": "string" + }, + { + "id": "github_repo", + "name": "githubRepo", + "value": "docs", + "type": "string" + }, + { + "id": "github_path", + "name": "githubFilePath", + "value": "snippets/automations/luma/lumaEventsData.jsx", + "type": "string" + }, + { + "id": "077c994c-4563-4210-8690-3b00fe4dba99", + "name": "githubBranch", + "value": "docs-v2-preview", + "type": "string" + }, + { + "id": "28db75b7-87d1-4ad7-982d-c7c114bb9386", + "name": "lumaCal", + "value": "https://api2.luma.com/ics/get?entity=calendar&id=cal-X93qV3PuUH0wq0f", + "type": "string" + }, + { + "id": "c3e05cc9-c4c2-482c-8fcb-498f68cb3839", + "name": "lumaCalID", + "value": "cal-X93qV3PuUH0wq0f", + "type": "string" + } + ] + }, + "options": {} + }, + "name": "Config", + "type": "n8n-nodes-base.set", + "typeVersion": 3.3, + "position": [ + 256, + -96 + ], + "id": "a9e89dab-422c-4952-a0dd-60a9ae45f9d1" + }, + { + "parameters": { + "mode": "combine", + "combineBy": "combineByPosition", + "options": {} + }, + "type": "n8n-nodes-base.merge", + "typeVersion": 3.2, + "position": [ + 1632, + -352 + ], + "id": "2884d9f4-db09-4594-8016-886a4833d387", + "name": "Merge", + "executeOnce": true + }, + { + "parameters": { + "jsCode": "const events = $input.item.json.events;\n\nconst now = new Date();\nconst upcoming = events.filter(e => new Date(e.start) >= now);\nconst past = events.filter(e => new Date(e.start) < now);\n\nconst formatDate = (dateStr) => {\n const date = new Date(dateStr);\n return date.toLocaleDateString('en-US', { \n year: 'numeric', \n month: 'long', \n day: 'numeric',\n hour: '2-digit',\n minute: '2-digit',\n timeZoneName: 'short'\n });\n};\n\nconst formatDateShort = (dateStr) => {\n const date = new Date(dateStr);\n return date.toLocaleDateString('en-US', { \n year: 'numeric', \n month: 'long', \n day: 'numeric'\n });\n};\n\nlet jsx = `export const lumaEventsData = {\\n`;\njsx += ` lastUpdated: \"${new Date().toISOString()}\",\\n`;\njsx += ` upcoming: [\\n`;\n\nupcoming.forEach((event, idx) => {\n jsx += ` {\\n`;\n jsx += ` title: \"${event.title.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` date: \"${formatDate(event.start)}\",\\n`;\n jsx += ` location: \"${event.location.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` url: \"${event.url}\",\\n`;\n jsx += ` description: \"${event.description.split('\\\\n\\\\n')[0].replace(/\"/g, '\\\\\"').replace(/\\n/g, ' ')}\"\\n`;\n jsx += ` }${idx < upcoming.length - 1 ? ',' : ''}\\n`;\n});\n\njsx += ` ],\\n`;\njsx += ` past: [\\n`;\n\npast.forEach((event, idx) => {\n jsx += ` {\\n`;\n jsx += ` title: \"${event.title.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` date: \"${formatDateShort(event.start)}\",\\n`;\n jsx += ` location: \"${event.location.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` url: \"${event.url}\"\\n`;\n jsx += ` }${idx < past.length - 1 ? ',' : ''}\\n`;\n});\n\njsx += ` ]\\n`;\njsx += `};\\n`;\n\nreturn [{ \n json: { \n content: jsx,\n filename: 'lumaEventsData.jsx'\n } \n}];" + }, + "name": "Generate JSX", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 1424, + -96 + ], + "id": "d2e845cd-bbab-4f8b-88d6-1a5497541fdd" + }, + { + "parameters": { + "url": "https://luma.com/livepeer", + "options": {} + }, + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [ + 832, + 160 + ], + "id": "af12e7c9-8c57-4381-aa62-29f247c4da1b", + "name": "HTTP Request", + "disabled": true + }, + { + "parameters": { + "jsCode": "const events = $input.first().json.events;\nconst html = $input.last().json.data;\n\n// Extract __NEXT_DATA__ which has event images\nconst match = html.match(/