Next.js Tutorial
This guide contains Next.js code snippets for using video files in Ragie using the TypeScript SDK.
Video Upload
As with any file, the first step is to ingest the data into Ragie.
Request
const ragie = new Ragie({
auth: process.env.RAGIE_API_KEY,
});
const file = new File([blob], "presentation.mp4");
const result = await ragie.documents.create({
file: file,
mode: { video: "audio_video",}
});
Response
{
"status": "partitioning",
"id": "id123",
"created_at": "2025-05-15T21:19:16.701463Z",
"updated_at": "2025-05-15T21:19:18.075945Z",
"name": "presentation.mp4",
"metadata": {},
"partition": "default",
"chunk_count": null,
"external_id": null,
"page_count": null
}
Once the document status is “ready”, we will be able to perform a retrieval using the document.
Retrieval
Request
const ragie = new Ragie({
auth: process.env.RAGIE_API_KEY,
});
const response = await ragie.retrievals.retrieve({
query: "What are the 3 main points of the presentation?",
});
Response
{
"scored_chunks": [
{
"text": "{\"video_description\": \"The 3 main..."}",
"score": 0.16783216783216784,
"id": "id123",
"index": 0,
"metadata": {
"end_time": 15,
"start_time": 0
},
"document_id": "docId",
"document_name": "presentation.mp4",
"document_metadata": {},
"links": {
"self": {
"href": "https://api.ragie.ai/documents/docId/chunks/chunkId",
"type": "application/json"
},
"self_text": {
"href": "https://api.ragie.ai/documents/docId/chunks/chunkId/content?media_type=text/plain-text",
"type": "text/plain-text"
},
"document": {
"href": "https://api.ragie.ai/documents/docId",
"type": "application/json"
},
"document_text": {
"href": "https://api.ragie.ai/documents/docId/content?media_type=text/plain-text",
"type": "text/plain-text"
},
"self_audio_stream": {
"href": "https://api.ragie.ai/documents/docId/chunks/chunkId/content?media_type=audio/mpeg",
"type": "audio/mpeg"
},
"self_audio_download": {
"href": "https://api.ragie.ai/documents/docId/chunks/chunkId/content?media_type=audio/mpeg&download=true",
"type": "audio/mpeg"
},
"document_audio_stream": {
"href": "https://api.ragie.ai/documents/docId/content?media_type=audio/mpeg",
"type": "audio/mpeg"
},
"document_audio_download": {
"href": "https://api.ragie.ai/documents/docId/content?media_type=audio/mpeg&download=true",
"type": "audio/mpeg"
},
"self_video_stream": {
"href": "https://api.ragie.ai/documents/docId/chunks/chunkId/content?media_type=video/mp4",
"type": "video/mp4"
},
"self_video_download": {
"href": "https://api.ragie.ai/documents/docId/chunks/chunkId/content?media_type=video/mp4&download=true",
"type": "video/mp4"
},
"document_video_stream": {
"href": "https://api.ragie.ai/documents/docId/content?media_type=video/mp4",
"type": "video/mp4"
},
"document_video_download": {
"href": "https://api.ragie.ai/documents/docId/content?media_type=video/mp4&download=true",
"type": "video/mp4"
}
}
},
...
Now that we have retrieved the data from the video, we can pass these chunks to an LLM for generation.
Generation Example
Here we use generateText
and openai
from Vercel’s AI SDK.
import { openai } from '@ai-sdk/openai';
import { generateText } from 'ai';
const response = await ragie.retrievals.retrieve({
query: "What are the 3 main points of the presentation?",
});
const sources = response.scoredChunks.map((chunk) => {
return {
...chunk.documentMetadata,
text: chunk.text,
documentName: chunk.documentName,
streamUrl: chunk.links.self_video_stream?.href,
startTime: chunk.metadata?.start_time,
endTime: chunk.metadata?.end_time,
};
const { text } = await generateText({
model: openai('gpt-4o'),
prompt: `Answer the user question using the provided sources: ${sources}`,
});
console.log(text);
Streaming Video
You can use the provided self_video_stream.href
to stream the chunk’s video into a <video>
player.
const streamUrl = chunk.self_video_stream?.href;
<video
src={getRagieStreamPath(streamUrl)}
/>
Note you will need to use a proxy URL in the src
prop to provide Bearer authentication using your Ragie API key. Here is a basic implementation.
export const getRagieStreamPath = (streamUrl: string) => {
return `/api/ragie/stream?url=${streamUrl}`;
};
// app/api/ragie/stream/route.ts
import { NextRequest } from "next/server";
// Important: The next edge runtime strips "simple headers" like "Range" from the request,
// so we need to use the Node.js runtime to preserve them.
export const runtime = "nodejs";
export async function GET(request: NextRequest) {
const url = request.nextUrl.searchParams.get("url");
try {
const ragieApiKey = process.env.RAGIE_API_KEY;
// Forward Range if present
const reqRange = request.headers.get("range");
// Propagate stream cancel from player
const controller = new AbortController();
request.signal.addEventListener("abort", () => controller.abort());
const upstreamResponse = await fetch(url, {
headers: {
authorization: `Bearer ${ragieApiKey}`,
...(reqRange ? { Range: reqRange } : {}),
},
signal: controller.signal,
});
// Stream the upstream response directly back to the client preserving status, headers, etc...
const passedThroughHeaders = [
"Content-Type",
"Accept-Ranges",
"Content-Length",
"Content-Range",
"Transfer-Encoding",
];
const headers = new Headers();
passedThroughHeaders.forEach((header) => {
const value = upstreamResponse.headers.get(header);
if (value) {
headers.set(header, value);
}
});
return new Response(upstreamResponse.body, {
status: upstreamResponse.status,
headers,
});
} catch (error) {
return new Response("Error fetching transcription stream", { status: 500 });
}
}
To see a more robust implementation of incorporating audio and video files into retrievals, generations, and streams, see our open-source project Base Chat.
Downloading Video
If instead you would like to download the video:
const downloadUrl = chunk.self_video_download?.href;
function DownloadAudioButton({ videoUrl }: { videoUrl: string }) {
return (
<a href={videoUrl} download>
<button>Download Audio</button>
</a>
);
}
Updated 27 days ago