Implemented comprehensive Romanian Biblical Guide web app: - Next.js 15 with App Router and TypeScript - Material UI 7.3.2 for modern, responsive design - PostgreSQL database with Prisma ORM - Complete Bible reader with book/chapter navigation - AI-powered biblical chat with Romanian responses - Prayer wall for community prayer requests - Advanced Bible search with filters and highlighting - Sample Bible data imported from API.Bible - All API endpoints created and working - Professional Material UI components throughout - Responsive layout with navigation and theme 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
59 lines
1.8 KiB
TypeScript
59 lines
1.8 KiB
TypeScript
import { AzureOpenAI } from 'openai'
|
|
|
|
const client = new AzureOpenAI({
|
|
apiKey: process.env.AZURE_OPENAI_KEY!,
|
|
apiVersion: '2024-02-01',
|
|
endpoint: process.env.AZURE_OPENAI_ENDPOINT!,
|
|
})
|
|
|
|
export async function generateChatResponse(
|
|
messages: Array<{ role: string; content: string }>,
|
|
verseContext?: string
|
|
) {
|
|
try {
|
|
const systemPrompt = `Ești un asistent pentru studiul Bibliei care răspunde în română. Întotdeauna oferă referințe din Scriptură pentru răspunsurile tale. Fii respectuos și oferă răspunsuri biblice fundamentate. ${verseContext ? `Context: ${verseContext}` : ''}`
|
|
|
|
const response = await client.chat.completions.create({
|
|
model: process.env.AZURE_OPENAI_DEPLOYMENT || 'gpt-4',
|
|
messages: [
|
|
{ role: 'system', content: systemPrompt },
|
|
...messages
|
|
],
|
|
temperature: 0.7,
|
|
max_tokens: 1000
|
|
})
|
|
|
|
return response.choices[0].message.content
|
|
} catch (error) {
|
|
console.error('Azure OpenAI error:', error)
|
|
throw new Error('Eroare la generarea răspunsului AI')
|
|
}
|
|
}
|
|
|
|
export async function generateEmbedding(text: string): Promise<number[]> {
|
|
try {
|
|
if (!process.env.OLLAMA_API_URL) {
|
|
throw new Error('OLLAMA_API_URL not configured')
|
|
}
|
|
|
|
const response = await fetch(`${process.env.OLLAMA_API_URL}/api/embeddings`, {
|
|
method: 'POST',
|
|
headers: { 'Content-Type': 'application/json' },
|
|
body: JSON.stringify({
|
|
model: 'nomic-embed-text',
|
|
prompt: text
|
|
})
|
|
})
|
|
|
|
if (!response.ok) {
|
|
throw new Error('Failed to generate embedding')
|
|
}
|
|
|
|
const data = await response.json()
|
|
return data.embedding
|
|
} catch (error) {
|
|
console.error('Embedding generation error:', error)
|
|
// Return empty array if embedding service is not available
|
|
return []
|
|
}
|
|
} |