deepseek integrated succesfully

This commit is contained in:
echo 2025-11-24 19:25:34 +01:00
parent 0896dd46d1
commit bf532ba35d
4 changed files with 155 additions and 51 deletions

3
apps/admin/.env.example Normal file
View File

@ -0,0 +1,3 @@
# DeepSeek AI API Key
# Get your API key from https://platform.deepseek.com/
DEEPSEEK_API_KEY=your_deepseek_api_key_here

Binary file not shown.

View File

@ -3,7 +3,7 @@ import { getDatabase } from "@/lib/database";
export async function POST(req: Request) {
try {
const { userId } = await req.json();
const { userId, useExternalModel } = await req.json();
if (!userId) {
return NextResponse.json({ error: "User ID is required" }, { status: 400 });
@ -21,7 +21,7 @@ export async function POST(req: Request) {
);
}
// Construct prompt for Ollama
// Construct prompt
const prompt = `
You are a professional fitness trainer and nutritionist.
Generate a detailed daily recommendation for a user with the following profile:
@ -42,14 +42,94 @@ export async function POST(req: Request) {
}
`;
// Call Ollama
let parsedResponse;
if (useExternalModel) {
// Use DeepSeek AI
const deepseekApiKey = process.env.DEEPSEEK_API_KEY;
if (!deepseekApiKey) {
return NextResponse.json(
{ error: "DeepSeek API key not configured" },
{ status: 500 }
);
}
console.log("Using DeepSeek AI model...");
const deepseekResponse = await fetch("https://api.deepseek.com/v1/chat/completions", {
method: "POST",
headers: {
"Content-Type": "application/json",
"Authorization": `Bearer ${deepseekApiKey}`,
},
body: JSON.stringify({
model: "deepseek-chat",
messages: [
{
role: "system",
content: "You are a professional fitness trainer and nutritionist. Always respond with valid JSON only, no markdown or code blocks."
},
{
role: "user",
content: prompt
}
],
temperature: 0.7,
max_tokens: 1000,
}),
});
if (!deepseekResponse.ok) {
const errorText = await deepseekResponse.text();
console.error("DeepSeek API error:", errorText);
return NextResponse.json(
{ error: "Failed to generate recommendation from DeepSeek AI" },
{ status: 500 }
);
}
const deepseekData = await deepseekResponse.json();
console.log("Raw DeepSeek Response:", deepseekData);
try {
const content = deepseekData.choices[0].message.content;
let cleanResponse = content.trim();
// Remove markdown code blocks if present
if (cleanResponse.startsWith("```json")) {
cleanResponse = cleanResponse.replace(/^```json\s*/, "").replace(/\s*```$/, "");
} else if (cleanResponse.startsWith("```")) {
cleanResponse = cleanResponse.replace(/^```\s*/, "").replace(/\s*```$/, "");
}
// Find the first '{' and last '}' to extract the JSON object
const firstBrace = cleanResponse.indexOf("{");
const lastBrace = cleanResponse.lastIndexOf("}");
if (firstBrace !== -1 && lastBrace !== -1) {
cleanResponse = cleanResponse.substring(firstBrace, lastBrace + 1);
}
parsedResponse = JSON.parse(cleanResponse);
} catch (e) {
console.error("Failed to parse DeepSeek response:", deepseekData);
return NextResponse.json(
{ error: "Invalid response format from DeepSeek AI" },
{ status: 500 }
);
}
} else {
// Use local Ollama
console.log("Using local Ollama model...");
const ollamaResponse = await fetch("http://localhost:11434/api/generate", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "gemma3:latest", // Make sure this model is pulled
model: "gemma3:latest",
prompt: prompt,
stream: false,
format: "json",
@ -59,17 +139,15 @@ export async function POST(req: Request) {
if (!ollamaResponse.ok) {
console.error("Ollama API error:", await ollamaResponse.text());
return NextResponse.json(
{ error: "Failed to generate recommendation from AI service" },
{ error: "Failed to generate recommendation from Ollama" },
{ status: 500 }
);
}
const aiData = await ollamaResponse.json();
console.log("Raw AI Response:", aiData.response);
console.log("Raw Ollama Response:", aiData.response);
let parsedResponse;
try {
// Helper to clean up the response
let cleanResponse = aiData.response.trim();
// Remove markdown code blocks if present
@ -89,19 +167,19 @@ export async function POST(req: Request) {
parsedResponse = JSON.parse(cleanResponse);
} catch (e) {
// Fallback if model doesn't return perfect JSON despite instruction
console.error("Failed to parse AI response:", aiData.response);
console.error("Failed to parse Ollama response:", aiData.response);
return NextResponse.json(
{ error: "Invalid response format from AI model" },
{ error: "Invalid response format from Ollama" },
{ status: 500 }
);
}
}
// Save to database
const recommendation = await db.createRecommendation({
id: crypto.randomUUID(),
userId,
fitnessProfileId: profile.userId, // Using userId as ID for now since it's 1:1
fitnessProfileId: profile.userId,
type: 'ai_plan',
content: parsedResponse.recommendationText,
activityPlan: parsedResponse.activityPlan,

View File

@ -26,6 +26,7 @@ export default function RecommendationsPage() {
const [pendingRecommendations, setPendingRecommendations] = useState<any[]>([]);
const [loading, setLoading] = useState(true);
const [generating, setGenerating] = useState<string | null>(null);
const [useExternalModel, setUseExternalModel] = useState(false);
useEffect(() => {
fetchData();
@ -56,7 +57,7 @@ export default function RecommendationsPage() {
const res = await fetch("/api/recommendations/generate", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ userId }),
body: JSON.stringify({ userId, useExternalModel }),
});
if (!res.ok) {
@ -143,7 +144,29 @@ export default function RecommendationsPage() {
return (
<div className="container mx-auto py-10 px-4">
<h1 className="text-3xl font-bold mb-8">AI Recommendations</h1>
<div className="flex justify-between items-center mb-8">
<h1 className="text-3xl font-bold">AI Recommendations</h1>
{/* Model Selection Toggle */}
<div className="flex items-center gap-3 bg-white px-4 py-2 rounded-lg shadow">
<span className="text-sm font-medium text-gray-700">
{useExternalModel ? "DeepSeek AI" : "Local Ollama"}
</span>
<button
onClick={() => setUseExternalModel(!useExternalModel)}
className={`relative inline-flex h-6 w-11 items-center rounded-full transition-colors ${useExternalModel ? "bg-blue-600" : "bg-gray-300"
}`}
>
<span
className={`inline-block h-4 w-4 transform rounded-full bg-white transition-transform ${useExternalModel ? "translate-x-6" : "translate-x-1"
}`}
/>
</button>
<span className="text-xs text-gray-500">
{useExternalModel ? "External" : "Local"}
</span>
</div>
</div>
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8">
{/* Generate Section */}