Commit
·
9cb9fcf
1
Parent(s):
3de23a5
� FIX: RAG系统导入和API集成修复 - Fixed RAG imports, updated API endpoints and frontend integration
Browse files- backend/api/routes.py +2 -2
- backend/models.py +3 -0
- frontend/src/hooks/useChat.ts +4 -2
- frontend/src/pages/Playground.tsx +20 -15
- rag_system.py +2 -3
- static/assets/index-cf882c3b.js +0 -0
- static/assets/index-cf882c3b.js.map +0 -0
- static/index.html +1 -1
backend/api/routes.py
CHANGED
|
@@ -217,8 +217,8 @@ async def generate_text(request: PromptRequest):
|
|
| 217 |
enhanced_system_prompt = request.system_prompt
|
| 218 |
|
| 219 |
# Check if RAG is available and should be used
|
| 220 |
-
use_rag =
|
| 221 |
-
retrieval_count =
|
| 222 |
|
| 223 |
if RAG_AVAILABLE and use_rag:
|
| 224 |
rag_system = get_rag_system()
|
|
|
|
| 217 |
enhanced_system_prompt = request.system_prompt
|
| 218 |
|
| 219 |
# Check if RAG is available and should be used
|
| 220 |
+
use_rag = request.use_rag or False
|
| 221 |
+
retrieval_count = request.retrieval_count or 3
|
| 222 |
|
| 223 |
if RAG_AVAILABLE and use_rag:
|
| 224 |
rag_system = get_rag_system()
|
backend/models.py
CHANGED
|
@@ -16,6 +16,9 @@ class PromptRequest(BaseModel):
|
|
| 16 |
model_name: Optional[str] = None
|
| 17 |
temperature: Optional[float] = 0.7
|
| 18 |
max_new_tokens: Optional[int] = 1024
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
|
| 21 |
class PromptResponse(BaseModel):
|
|
|
|
| 16 |
model_name: Optional[str] = None
|
| 17 |
temperature: Optional[float] = 0.7
|
| 18 |
max_new_tokens: Optional[int] = 1024
|
| 19 |
+
# RAG configuration
|
| 20 |
+
use_rag: Optional[bool] = False
|
| 21 |
+
retrieval_count: Optional[int] = 3
|
| 22 |
|
| 23 |
|
| 24 |
class PromptResponse(BaseModel):
|
frontend/src/hooks/useChat.ts
CHANGED
|
@@ -128,7 +128,7 @@ export function useChat(options: UseChatOptions = {}) {
|
|
| 128 |
}, [currentSessionId])
|
| 129 |
|
| 130 |
// Send message
|
| 131 |
-
const sendMessage = useCallback(async (assistantInfo?: AssistantInfo) => {
|
| 132 |
if (!input.trim() || status.isLoading) return
|
| 133 |
|
| 134 |
let sessionId = currentSessionId
|
|
@@ -176,7 +176,9 @@ export function useChat(options: UseChatOptions = {}) {
|
|
| 176 |
system_prompt: systemPrompt || null,
|
| 177 |
model_name: selectedModel,
|
| 178 |
temperature,
|
| 179 |
-
max_new_tokens: maxTokens
|
|
|
|
|
|
|
| 180 |
}),
|
| 181 |
})
|
| 182 |
|
|
|
|
| 128 |
}, [currentSessionId])
|
| 129 |
|
| 130 |
// Send message
|
| 131 |
+
const sendMessage = useCallback(async (assistantInfo?: AssistantInfo, ragConfig?: { useRag: boolean, retrievalCount: number }) => {
|
| 132 |
if (!input.trim() || status.isLoading) return
|
| 133 |
|
| 134 |
let sessionId = currentSessionId
|
|
|
|
| 176 |
system_prompt: systemPrompt || null,
|
| 177 |
model_name: selectedModel,
|
| 178 |
temperature,
|
| 179 |
+
max_new_tokens: maxTokens,
|
| 180 |
+
use_rag: ragConfig?.useRag || false,
|
| 181 |
+
retrieval_count: ragConfig?.retrievalCount || 3
|
| 182 |
}),
|
| 183 |
})
|
| 184 |
|
frontend/src/pages/Playground.tsx
CHANGED
|
@@ -90,6 +90,10 @@ export function Playground() {
|
|
| 90 |
setMaxTokens
|
| 91 |
} = useChat()
|
| 92 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
// UI state - sidebar collapse states
|
| 94 |
const [sessionsCollapsed, setSessionsCollapsed] = useState(false)
|
| 95 |
const [configCollapsed, setConfigCollapsed] = useState(false)
|
|
@@ -623,7 +627,8 @@ export function Playground() {
|
|
| 623 |
e.preventDefault()
|
| 624 |
if (!selectedModel || !models.find(m => m.model_name === selectedModel)) return
|
| 625 |
const assistantInfo = getCurrentAssistantInfo()
|
| 626 |
-
|
|
|
|
| 627 |
}}
|
| 628 |
isGenerating={isLoading}
|
| 629 |
stop={stopGeneration}
|
|
@@ -718,6 +723,10 @@ export function Playground() {
|
|
| 718 |
<TabsContent value="documents" className="p-6 space-y-6 m-0 h-full overflow-y-auto">
|
| 719 |
<DocumentsTab
|
| 720 |
isLoading={isLoading}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 721 |
/>
|
| 722 |
</TabsContent>
|
| 723 |
</div>
|
|
@@ -1376,14 +1385,20 @@ function SystemInstructionsTab({
|
|
| 1376 |
|
| 1377 |
// Documents Tab Component for RAG functionality
|
| 1378 |
function DocumentsTab({
|
| 1379 |
-
isLoading
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1380 |
}: {
|
| 1381 |
isLoading: boolean
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1382 |
}) {
|
| 1383 |
const [uploadedFiles, setUploadedFiles] = useState<any[]>([])
|
| 1384 |
const [isUploading, setIsUploading] = useState(false)
|
| 1385 |
-
const [ragEnabled, setRagEnabled] = useState(false)
|
| 1386 |
-
const [retrievalCount, setRetrievalCount] = useState(3)
|
| 1387 |
|
| 1388 |
// Load existing documents on component mount
|
| 1389 |
useEffect(() => {
|
|
@@ -1418,27 +1433,21 @@ function DocumentsTab({
|
|
| 1418 |
if (!files) return
|
| 1419 |
|
| 1420 |
setIsUploading(true)
|
| 1421 |
-
console.log('Starting file upload process...', files.length, 'files')
|
| 1422 |
|
| 1423 |
try {
|
| 1424 |
const formData = new FormData()
|
| 1425 |
|
| 1426 |
for (const file of Array.from(files)) {
|
| 1427 |
-
console.log('Adding file to FormData:', file.name, 'type:', file.type)
|
| 1428 |
formData.append('files', file)
|
| 1429 |
}
|
| 1430 |
|
| 1431 |
-
console.log('Making API request to /rag/upload...')
|
| 1432 |
const response = await fetch('/rag/upload', {
|
| 1433 |
method: 'POST',
|
| 1434 |
body: formData,
|
| 1435 |
})
|
| 1436 |
|
| 1437 |
-
console.log('Upload response status:', response.status)
|
| 1438 |
-
|
| 1439 |
if (response.ok) {
|
| 1440 |
const result = await response.json()
|
| 1441 |
-
console.log('Upload result:', result)
|
| 1442 |
|
| 1443 |
// Add successfully processed files to the list
|
| 1444 |
const newFiles = result.results
|
|
@@ -1453,19 +1462,15 @@ function DocumentsTab({
|
|
| 1453 |
chunks: r.chunks
|
| 1454 |
}))
|
| 1455 |
|
| 1456 |
-
console.log('Successfully processed files:', newFiles)
|
| 1457 |
setUploadedFiles(prev => [...prev, ...newFiles])
|
| 1458 |
|
| 1459 |
// Show errors for failed uploads
|
| 1460 |
const failedUploads = result.results.filter((r: any) => !r.success)
|
| 1461 |
if (failedUploads.length > 0) {
|
| 1462 |
console.error('Some files failed to upload:', failedUploads)
|
| 1463 |
-
} else {
|
| 1464 |
-
console.log('All files uploaded successfully!')
|
| 1465 |
}
|
| 1466 |
} else {
|
| 1467 |
-
|
| 1468 |
-
console.error('Upload failed:', response.status, response.statusText, errorText)
|
| 1469 |
}
|
| 1470 |
|
| 1471 |
// Reset input
|
|
|
|
| 90 |
setMaxTokens
|
| 91 |
} = useChat()
|
| 92 |
|
| 93 |
+
// RAG configuration state
|
| 94 |
+
const [ragEnabled, setRagEnabled] = useState(false)
|
| 95 |
+
const [retrievalCount, setRetrievalCount] = useState(3)
|
| 96 |
+
|
| 97 |
// UI state - sidebar collapse states
|
| 98 |
const [sessionsCollapsed, setSessionsCollapsed] = useState(false)
|
| 99 |
const [configCollapsed, setConfigCollapsed] = useState(false)
|
|
|
|
| 627 |
e.preventDefault()
|
| 628 |
if (!selectedModel || !models.find(m => m.model_name === selectedModel)) return
|
| 629 |
const assistantInfo = getCurrentAssistantInfo()
|
| 630 |
+
const ragConfig = { useRag: ragEnabled, retrievalCount }
|
| 631 |
+
await sendMessage(assistantInfo, ragConfig)
|
| 632 |
}}
|
| 633 |
isGenerating={isLoading}
|
| 634 |
stop={stopGeneration}
|
|
|
|
| 723 |
<TabsContent value="documents" className="p-6 space-y-6 m-0 h-full overflow-y-auto">
|
| 724 |
<DocumentsTab
|
| 725 |
isLoading={isLoading}
|
| 726 |
+
ragEnabled={ragEnabled}
|
| 727 |
+
setRagEnabled={setRagEnabled}
|
| 728 |
+
retrievalCount={retrievalCount}
|
| 729 |
+
setRetrievalCount={setRetrievalCount}
|
| 730 |
/>
|
| 731 |
</TabsContent>
|
| 732 |
</div>
|
|
|
|
| 1385 |
|
| 1386 |
// Documents Tab Component for RAG functionality
|
| 1387 |
function DocumentsTab({
|
| 1388 |
+
isLoading,
|
| 1389 |
+
ragEnabled,
|
| 1390 |
+
setRagEnabled,
|
| 1391 |
+
retrievalCount,
|
| 1392 |
+
setRetrievalCount
|
| 1393 |
}: {
|
| 1394 |
isLoading: boolean
|
| 1395 |
+
ragEnabled: boolean
|
| 1396 |
+
setRagEnabled: (enabled: boolean) => void
|
| 1397 |
+
retrievalCount: number
|
| 1398 |
+
setRetrievalCount: (count: number) => void
|
| 1399 |
}) {
|
| 1400 |
const [uploadedFiles, setUploadedFiles] = useState<any[]>([])
|
| 1401 |
const [isUploading, setIsUploading] = useState(false)
|
|
|
|
|
|
|
| 1402 |
|
| 1403 |
// Load existing documents on component mount
|
| 1404 |
useEffect(() => {
|
|
|
|
| 1433 |
if (!files) return
|
| 1434 |
|
| 1435 |
setIsUploading(true)
|
|
|
|
| 1436 |
|
| 1437 |
try {
|
| 1438 |
const formData = new FormData()
|
| 1439 |
|
| 1440 |
for (const file of Array.from(files)) {
|
|
|
|
| 1441 |
formData.append('files', file)
|
| 1442 |
}
|
| 1443 |
|
|
|
|
| 1444 |
const response = await fetch('/rag/upload', {
|
| 1445 |
method: 'POST',
|
| 1446 |
body: formData,
|
| 1447 |
})
|
| 1448 |
|
|
|
|
|
|
|
| 1449 |
if (response.ok) {
|
| 1450 |
const result = await response.json()
|
|
|
|
| 1451 |
|
| 1452 |
// Add successfully processed files to the list
|
| 1453 |
const newFiles = result.results
|
|
|
|
| 1462 |
chunks: r.chunks
|
| 1463 |
}))
|
| 1464 |
|
|
|
|
| 1465 |
setUploadedFiles(prev => [...prev, ...newFiles])
|
| 1466 |
|
| 1467 |
// Show errors for failed uploads
|
| 1468 |
const failedUploads = result.results.filter((r: any) => !r.success)
|
| 1469 |
if (failedUploads.length > 0) {
|
| 1470 |
console.error('Some files failed to upload:', failedUploads)
|
|
|
|
|
|
|
| 1471 |
}
|
| 1472 |
} else {
|
| 1473 |
+
console.error('Upload failed:', response.statusText)
|
|
|
|
| 1474 |
}
|
| 1475 |
|
| 1476 |
// Reset input
|
rag_system.py
CHANGED
|
@@ -17,9 +17,8 @@ try:
|
|
| 17 |
from langchain_huggingface import HuggingFaceEmbeddings
|
| 18 |
from langchain_community.vectorstores import FAISS
|
| 19 |
from langchain.schema import Document
|
| 20 |
-
except ImportError
|
| 21 |
-
print(
|
| 22 |
-
print("Install with: pip install langchain langchain-community langchain-huggingface pypdf python-docx faiss-cpu sentence-transformers")
|
| 23 |
|
| 24 |
class SimpleRAGSystem:
|
| 25 |
def __init__(self):
|
|
|
|
| 17 |
from langchain_huggingface import HuggingFaceEmbeddings
|
| 18 |
from langchain_community.vectorstores import FAISS
|
| 19 |
from langchain.schema import Document
|
| 20 |
+
except ImportError:
|
| 21 |
+
print("LangChain not installed. Install with: pip install langchain langchain-community langchain-huggingface pypdf python-docx faiss-cpu sentence-transformers")
|
|
|
|
| 22 |
|
| 23 |
class SimpleRAGSystem:
|
| 24 |
def __init__(self):
|
static/assets/index-cf882c3b.js
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
static/assets/index-cf882c3b.js.map
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
static/index.html
CHANGED
|
@@ -5,7 +5,7 @@
|
|
| 5 |
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
| 6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
| 7 |
<title>Edge LLM</title>
|
| 8 |
-
<script type="module" crossorigin src="/assets/index-
|
| 9 |
<link rel="stylesheet" href="/assets/index-4cff54ff.css">
|
| 10 |
</head>
|
| 11 |
<body>
|
|
|
|
| 5 |
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
| 6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
| 7 |
<title>Edge LLM</title>
|
| 8 |
+
<script type="module" crossorigin src="/assets/index-cf882c3b.js"></script>
|
| 9 |
<link rel="stylesheet" href="/assets/index-4cff54ff.css">
|
| 10 |
</head>
|
| 11 |
<body>
|