utils/llm.ts (view raw)
1import { load } from "@std/dotenv";
2
3const env = await load({ envPath: ".env" });
4
5const mistralApiKey = env["MISTRAL_APIKEY"];
6const mistralApiUrl = "https://api.mistral.ai/v1/chat/completions";
7
8export const callLLM = async (prompt: string) => {
9 const response = await fetch(mistralApiUrl, {
10 method: "POST",
11 headers: {
12 "Content-Type": "application/json",
13 Authorization: `Bearer ${mistralApiKey}`,
14 },
15 body: JSON.stringify({
16 model: "mistral-small-latest",
17 response_format: { type: "json_object" },
18 messages: [{ role: "user", content: prompt }],
19 }),
20 });
21
22 const data = await response.json();
23 return data.choices[0].message.content;
24};