1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660 |
- import json
- import logging
- import mimetypes
- import os
- import shutil
- import uuid
- from datetime import datetime
- from pathlib import Path
- from typing import Iterator, List, Optional, Sequence, Union
- from fastapi import (
- Depends,
- FastAPI,
- File,
- Form,
- HTTPException,
- UploadFile,
- Request,
- status,
- APIRouter,
- )
- from fastapi.middleware.cors import CORSMiddleware
- from fastapi.concurrency import run_in_threadpool
- from pydantic import BaseModel
- import tiktoken
- from langchain.text_splitter import RecursiveCharacterTextSplitter, TokenTextSplitter
- from langchain_core.documents import Document
- from open_webui.models.files import FileModel, Files
- from open_webui.models.knowledge import Knowledges
- from open_webui.storage.provider import Storage
- from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT
- # Document loaders
- from open_webui.retrieval.loaders.main import Loader
- from open_webui.retrieval.loaders.youtube import YoutubeLoader
- # Web search engines
- from open_webui.retrieval.web.main import SearchResult
- from open_webui.retrieval.web.utils import get_web_loader
- from open_webui.retrieval.web.brave import search_brave
- from open_webui.retrieval.web.kagi import search_kagi
- from open_webui.retrieval.web.mojeek import search_mojeek
- from open_webui.retrieval.web.bocha import search_bocha
- from open_webui.retrieval.web.duckduckgo import search_duckduckgo
- from open_webui.retrieval.web.google_pse import search_google_pse
- from open_webui.retrieval.web.jina_search import search_jina
- from open_webui.retrieval.web.searchapi import search_searchapi
- from open_webui.retrieval.web.serpapi import search_serpapi
- from open_webui.retrieval.web.searxng import search_searxng
- from open_webui.retrieval.web.serper import search_serper
- from open_webui.retrieval.web.serply import search_serply
- from open_webui.retrieval.web.serpstack import search_serpstack
- from open_webui.retrieval.web.tavily import search_tavily
- from open_webui.retrieval.web.bing import search_bing
- from open_webui.retrieval.web.exa import search_exa
- from open_webui.retrieval.utils import (
- get_embedding_function,
- get_model_path,
- query_collection,
- query_collection_with_hybrid_search,
- query_doc,
- query_doc_with_hybrid_search,
- )
- from open_webui.utils.misc import (
- calculate_sha256_string,
- )
- from open_webui.utils.auth import get_admin_user, get_verified_user
- from open_webui.config import (
- ENV,
- RAG_EMBEDDING_MODEL_AUTO_UPDATE,
- RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
- RAG_RERANKING_MODEL_AUTO_UPDATE,
- RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
- UPLOAD_DIR,
- DEFAULT_LOCALE,
- )
- from open_webui.env import (
- SRC_LOG_LEVELS,
- DEVICE_TYPE,
- DOCKER,
- )
- from open_webui.constants import ERROR_MESSAGES
- log = logging.getLogger(__name__)
- log.setLevel(SRC_LOG_LEVELS["RAG"])
- ##########################################
- #
- # Utility functions
- #
- ##########################################
- def get_ef(
- engine: str,
- embedding_model: str,
- auto_update: bool = False,
- ):
- ef = None
- if embedding_model and engine == "":
- from sentence_transformers import SentenceTransformer
- try:
- ef = SentenceTransformer(
- get_model_path(embedding_model, auto_update),
- device=DEVICE_TYPE,
- trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
- )
- except Exception as e:
- log.debug(f"Error loading SentenceTransformer: {e}")
- return ef
- def get_rf(
- reranking_model: str,
- auto_update: bool = False,
- ):
- rf = None
- if reranking_model:
- if any(model in reranking_model for model in ["jinaai/jina-colbert-v2"]):
- try:
- from open_webui.retrieval.models.colbert import ColBERT
- rf = ColBERT(
- get_model_path(reranking_model, auto_update),
- env="docker" if DOCKER else None,
- )
- except Exception as e:
- log.error(f"ColBERT: {e}")
- raise Exception(ERROR_MESSAGES.DEFAULT(e))
- else:
- import sentence_transformers
- try:
- rf = sentence_transformers.CrossEncoder(
- get_model_path(reranking_model, auto_update),
- device=DEVICE_TYPE,
- trust_remote_code=RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
- )
- except:
- log.error("CrossEncoder error")
- raise Exception(ERROR_MESSAGES.DEFAULT("CrossEncoder error"))
- return rf
- ##########################################
- #
- # API routes
- #
- ##########################################
- router = APIRouter()
- class CollectionNameForm(BaseModel):
- collection_name: Optional[str] = None
- class ProcessUrlForm(CollectionNameForm):
- url: str
- class SearchForm(CollectionNameForm):
- query: str
- @router.get("/")
- async def get_status(request: Request):
- return {
- "status": True,
- "chunk_size": request.app.state.config.CHUNK_SIZE,
- "chunk_overlap": request.app.state.config.CHUNK_OVERLAP,
- "template": request.app.state.config.RAG_TEMPLATE,
- "embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
- "embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
- "reranking_model": request.app.state.config.RAG_RERANKING_MODEL,
- "embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
- }
- @router.get("/embedding")
- async def get_embedding_config(request: Request, user=Depends(get_admin_user)):
- return {
- "status": True,
- "embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
- "embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
- "embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
- "openai_config": {
- "url": request.app.state.config.RAG_OPENAI_API_BASE_URL,
- "key": request.app.state.config.RAG_OPENAI_API_KEY,
- },
- "ollama_config": {
- "url": request.app.state.config.RAG_OLLAMA_BASE_URL,
- "key": request.app.state.config.RAG_OLLAMA_API_KEY,
- },
- }
- @router.get("/reranking")
- async def get_reraanking_config(request: Request, user=Depends(get_admin_user)):
- return {
- "status": True,
- "reranking_model": request.app.state.config.RAG_RERANKING_MODEL,
- }
- class OpenAIConfigForm(BaseModel):
- url: str
- key: str
- class OllamaConfigForm(BaseModel):
- url: str
- key: str
- class EmbeddingModelUpdateForm(BaseModel):
- openai_config: Optional[OpenAIConfigForm] = None
- ollama_config: Optional[OllamaConfigForm] = None
- embedding_engine: str
- embedding_model: str
- embedding_batch_size: Optional[int] = 1
- @router.post("/embedding/update")
- async def update_embedding_config(
- request: Request, form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user)
- ):
- log.info(
- f"Updating embedding model: {request.app.state.config.RAG_EMBEDDING_MODEL} to {form_data.embedding_model}"
- )
- try:
- request.app.state.config.RAG_EMBEDDING_ENGINE = form_data.embedding_engine
- request.app.state.config.RAG_EMBEDDING_MODEL = form_data.embedding_model
- if request.app.state.config.RAG_EMBEDDING_ENGINE in ["ollama", "openai"]:
- if form_data.openai_config is not None:
- request.app.state.config.RAG_OPENAI_API_BASE_URL = (
- form_data.openai_config.url
- )
- request.app.state.config.RAG_OPENAI_API_KEY = (
- form_data.openai_config.key
- )
- if form_data.ollama_config is not None:
- request.app.state.config.RAG_OLLAMA_BASE_URL = (
- form_data.ollama_config.url
- )
- request.app.state.config.RAG_OLLAMA_API_KEY = (
- form_data.ollama_config.key
- )
- request.app.state.config.RAG_EMBEDDING_BATCH_SIZE = (
- form_data.embedding_batch_size
- )
- request.app.state.ef = get_ef(
- request.app.state.config.RAG_EMBEDDING_ENGINE,
- request.app.state.config.RAG_EMBEDDING_MODEL,
- )
- request.app.state.EMBEDDING_FUNCTION = get_embedding_function(
- request.app.state.config.RAG_EMBEDDING_ENGINE,
- request.app.state.config.RAG_EMBEDDING_MODEL,
- request.app.state.ef,
- (
- request.app.state.config.RAG_OPENAI_API_BASE_URL
- if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
- else request.app.state.config.RAG_OLLAMA_BASE_URL
- ),
- (
- request.app.state.config.RAG_OPENAI_API_KEY
- if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
- else request.app.state.config.RAG_OLLAMA_API_KEY
- ),
- request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
- )
- return {
- "status": True,
- "embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
- "embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
- "embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
- "openai_config": {
- "url": request.app.state.config.RAG_OPENAI_API_BASE_URL,
- "key": request.app.state.config.RAG_OPENAI_API_KEY,
- },
- "ollama_config": {
- "url": request.app.state.config.RAG_OLLAMA_BASE_URL,
- "key": request.app.state.config.RAG_OLLAMA_API_KEY,
- },
- }
- except Exception as e:
- log.exception(f"Problem updating embedding model: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=ERROR_MESSAGES.DEFAULT(e),
- )
- class RerankingModelUpdateForm(BaseModel):
- reranking_model: str
- @router.post("/reranking/update")
- async def update_reranking_config(
- request: Request, form_data: RerankingModelUpdateForm, user=Depends(get_admin_user)
- ):
- log.info(
- f"Updating reranking model: {request.app.state.config.RAG_RERANKING_MODEL} to {form_data.reranking_model}"
- )
- try:
- request.app.state.config.RAG_RERANKING_MODEL = form_data.reranking_model
- try:
- request.app.state.rf = get_rf(
- request.app.state.config.RAG_RERANKING_MODEL,
- True,
- )
- except Exception as e:
- log.error(f"Error loading reranking model: {e}")
- request.app.state.config.ENABLE_RAG_HYBRID_SEARCH = False
- return {
- "status": True,
- "reranking_model": request.app.state.config.RAG_RERANKING_MODEL,
- }
- except Exception as e:
- log.exception(f"Problem updating reranking model: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=ERROR_MESSAGES.DEFAULT(e),
- )
- @router.get("/config")
- async def get_rag_config(request: Request, user=Depends(get_admin_user)):
- return {
- "status": True,
- "pdf_extract_images": request.app.state.config.PDF_EXTRACT_IMAGES,
- "RAG_FULL_CONTEXT": request.app.state.config.RAG_FULL_CONTEXT,
- "enable_google_drive_integration": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION,
- "content_extraction": {
- "engine": request.app.state.config.CONTENT_EXTRACTION_ENGINE,
- "tika_server_url": request.app.state.config.TIKA_SERVER_URL,
- },
- "chunk": {
- "text_splitter": request.app.state.config.TEXT_SPLITTER,
- "chunk_size": request.app.state.config.CHUNK_SIZE,
- "chunk_overlap": request.app.state.config.CHUNK_OVERLAP,
- },
- "file": {
- "max_size": request.app.state.config.FILE_MAX_SIZE,
- "max_count": request.app.state.config.FILE_MAX_COUNT,
- },
- "youtube": {
- "language": request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
- "translation": request.app.state.YOUTUBE_LOADER_TRANSLATION,
- "proxy_url": request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
- },
- "web": {
- "ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION": request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
- "RAG_WEB_SEARCH_FULL_CONTEXT": request.app.state.config.RAG_WEB_SEARCH_FULL_CONTEXT,
- "search": {
- "enabled": request.app.state.config.ENABLE_RAG_WEB_SEARCH,
- "drive": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION,
- "engine": request.app.state.config.RAG_WEB_SEARCH_ENGINE,
- "searxng_query_url": request.app.state.config.SEARXNG_QUERY_URL,
- "google_pse_api_key": request.app.state.config.GOOGLE_PSE_API_KEY,
- "google_pse_engine_id": request.app.state.config.GOOGLE_PSE_ENGINE_ID,
- "brave_search_api_key": request.app.state.config.BRAVE_SEARCH_API_KEY,
- "kagi_search_api_key": request.app.state.config.KAGI_SEARCH_API_KEY,
- "mojeek_search_api_key": request.app.state.config.MOJEEK_SEARCH_API_KEY,
- "bocha_search_api_key": request.app.state.config.BOCHA_SEARCH_API_KEY,
- "serpstack_api_key": request.app.state.config.SERPSTACK_API_KEY,
- "serpstack_https": request.app.state.config.SERPSTACK_HTTPS,
- "serper_api_key": request.app.state.config.SERPER_API_KEY,
- "serply_api_key": request.app.state.config.SERPLY_API_KEY,
- "tavily_api_key": request.app.state.config.TAVILY_API_KEY,
- "searchapi_api_key": request.app.state.config.SEARCHAPI_API_KEY,
- "searchapi_engine": request.app.state.config.SEARCHAPI_ENGINE,
- "serpapi_api_key": request.app.state.config.SERPAPI_API_KEY,
- "serpapi_engine": request.app.state.config.SERPAPI_ENGINE,
- "jina_api_key": request.app.state.config.JINA_API_KEY,
- "bing_search_v7_endpoint": request.app.state.config.BING_SEARCH_V7_ENDPOINT,
- "bing_search_v7_subscription_key": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
- "exa_api_key": request.app.state.config.EXA_API_KEY,
- "result_count": request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- "concurrent_requests": request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
- "domain_filter_list": request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- },
- },
- }
- class FileConfig(BaseModel):
- max_size: Optional[int] = None
- max_count: Optional[int] = None
- class ContentExtractionConfig(BaseModel):
- engine: str = ""
- tika_server_url: Optional[str] = None
- class ChunkParamUpdateForm(BaseModel):
- text_splitter: Optional[str] = None
- chunk_size: int
- chunk_overlap: int
- class YoutubeLoaderConfig(BaseModel):
- language: list[str]
- translation: Optional[str] = None
- proxy_url: str = ""
- class WebSearchConfig(BaseModel):
- enabled: bool
- engine: Optional[str] = None
- searxng_query_url: Optional[str] = None
- google_pse_api_key: Optional[str] = None
- google_pse_engine_id: Optional[str] = None
- brave_search_api_key: Optional[str] = None
- kagi_search_api_key: Optional[str] = None
- mojeek_search_api_key: Optional[str] = None
- bocha_search_api_key: Optional[str] = None
- serpstack_api_key: Optional[str] = None
- serpstack_https: Optional[bool] = None
- serper_api_key: Optional[str] = None
- serply_api_key: Optional[str] = None
- tavily_api_key: Optional[str] = None
- searchapi_api_key: Optional[str] = None
- searchapi_engine: Optional[str] = None
- serpapi_api_key: Optional[str] = None
- serpapi_engine: Optional[str] = None
- jina_api_key: Optional[str] = None
- bing_search_v7_endpoint: Optional[str] = None
- bing_search_v7_subscription_key: Optional[str] = None
- exa_api_key: Optional[str] = None
- result_count: Optional[int] = None
- concurrent_requests: Optional[int] = None
- trust_env: Optional[bool] = None
- domain_filter_list: Optional[List[str]] = []
- class WebConfig(BaseModel):
- search: WebSearchConfig
- ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION: Optional[bool] = None
- RAG_WEB_SEARCH_FULL_CONTEXT: Optional[bool] = None
- class ConfigUpdateForm(BaseModel):
- RAG_FULL_CONTEXT: Optional[bool] = None
- pdf_extract_images: Optional[bool] = None
- enable_google_drive_integration: Optional[bool] = None
- file: Optional[FileConfig] = None
- content_extraction: Optional[ContentExtractionConfig] = None
- chunk: Optional[ChunkParamUpdateForm] = None
- youtube: Optional[YoutubeLoaderConfig] = None
- web: Optional[WebConfig] = None
- @router.post("/config/update")
- async def update_rag_config(
- request: Request, form_data: ConfigUpdateForm, user=Depends(get_admin_user)
- ):
- request.app.state.config.PDF_EXTRACT_IMAGES = (
- form_data.pdf_extract_images
- if form_data.pdf_extract_images is not None
- else request.app.state.config.PDF_EXTRACT_IMAGES
- )
- request.app.state.config.RAG_FULL_CONTEXT = (
- form_data.RAG_FULL_CONTEXT
- if form_data.RAG_FULL_CONTEXT is not None
- else request.app.state.config.RAG_FULL_CONTEXT
- )
- request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION = (
- form_data.enable_google_drive_integration
- if form_data.enable_google_drive_integration is not None
- else request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION
- )
- if form_data.file is not None:
- request.app.state.config.FILE_MAX_SIZE = form_data.file.max_size
- request.app.state.config.FILE_MAX_COUNT = form_data.file.max_count
- if form_data.content_extraction is not None:
- log.info(f"Updating text settings: {form_data.content_extraction}")
- request.app.state.config.CONTENT_EXTRACTION_ENGINE = (
- form_data.content_extraction.engine
- )
- request.app.state.config.TIKA_SERVER_URL = (
- form_data.content_extraction.tika_server_url
- )
- if form_data.chunk is not None:
- request.app.state.config.TEXT_SPLITTER = form_data.chunk.text_splitter
- request.app.state.config.CHUNK_SIZE = form_data.chunk.chunk_size
- request.app.state.config.CHUNK_OVERLAP = form_data.chunk.chunk_overlap
- if form_data.youtube is not None:
- request.app.state.config.YOUTUBE_LOADER_LANGUAGE = form_data.youtube.language
- request.app.state.config.YOUTUBE_LOADER_PROXY_URL = form_data.youtube.proxy_url
- request.app.state.YOUTUBE_LOADER_TRANSLATION = form_data.youtube.translation
- if form_data.web is not None:
- request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
- # Note: When UI "Bypass SSL verification for Websites"=True then ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION=False
- form_data.web.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
- )
- request.app.state.config.ENABLE_RAG_WEB_SEARCH = form_data.web.search.enabled
- request.app.state.config.RAG_WEB_SEARCH_ENGINE = form_data.web.search.engine
- request.app.state.config.RAG_WEB_SEARCH_FULL_CONTEXT = (
- form_data.web.RAG_WEB_SEARCH_FULL_CONTEXT
- )
- request.app.state.config.SEARXNG_QUERY_URL = (
- form_data.web.search.searxng_query_url
- )
- request.app.state.config.GOOGLE_PSE_API_KEY = (
- form_data.web.search.google_pse_api_key
- )
- request.app.state.config.GOOGLE_PSE_ENGINE_ID = (
- form_data.web.search.google_pse_engine_id
- )
- request.app.state.config.BRAVE_SEARCH_API_KEY = (
- form_data.web.search.brave_search_api_key
- )
- request.app.state.config.KAGI_SEARCH_API_KEY = (
- form_data.web.search.kagi_search_api_key
- )
- request.app.state.config.MOJEEK_SEARCH_API_KEY = (
- form_data.web.search.mojeek_search_api_key
- )
- request.app.state.config.BOCHA_SEARCH_API_KEY = (
- form_data.web.search.bocha_search_api_key
- )
- request.app.state.config.SERPSTACK_API_KEY = (
- form_data.web.search.serpstack_api_key
- )
- request.app.state.config.SERPSTACK_HTTPS = form_data.web.search.serpstack_https
- request.app.state.config.SERPER_API_KEY = form_data.web.search.serper_api_key
- request.app.state.config.SERPLY_API_KEY = form_data.web.search.serply_api_key
- request.app.state.config.TAVILY_API_KEY = form_data.web.search.tavily_api_key
- request.app.state.config.SEARCHAPI_API_KEY = (
- form_data.web.search.searchapi_api_key
- )
- request.app.state.config.SEARCHAPI_ENGINE = (
- form_data.web.search.searchapi_engine
- )
- request.app.state.config.SERPAPI_API_KEY = form_data.web.search.serpapi_api_key
- request.app.state.config.SERPAPI_ENGINE = form_data.web.search.serpapi_engine
- request.app.state.config.JINA_API_KEY = form_data.web.search.jina_api_key
- request.app.state.config.BING_SEARCH_V7_ENDPOINT = (
- form_data.web.search.bing_search_v7_endpoint
- )
- request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY = (
- form_data.web.search.bing_search_v7_subscription_key
- )
- request.app.state.config.EXA_API_KEY = form_data.web.search.exa_api_key
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT = (
- form_data.web.search.result_count
- )
- request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS = (
- form_data.web.search.concurrent_requests
- )
- request.app.state.config.RAG_WEB_SEARCH_TRUST_ENV = (
- form_data.web.search.trust_env
- )
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST = (
- form_data.web.search.domain_filter_list
- )
- return {
- "status": True,
- "pdf_extract_images": request.app.state.config.PDF_EXTRACT_IMAGES,
- "RAG_FULL_CONTEXT": request.app.state.config.RAG_FULL_CONTEXT,
- "file": {
- "max_size": request.app.state.config.FILE_MAX_SIZE,
- "max_count": request.app.state.config.FILE_MAX_COUNT,
- },
- "content_extraction": {
- "engine": request.app.state.config.CONTENT_EXTRACTION_ENGINE,
- "tika_server_url": request.app.state.config.TIKA_SERVER_URL,
- },
- "chunk": {
- "text_splitter": request.app.state.config.TEXT_SPLITTER,
- "chunk_size": request.app.state.config.CHUNK_SIZE,
- "chunk_overlap": request.app.state.config.CHUNK_OVERLAP,
- },
- "youtube": {
- "language": request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
- "proxy_url": request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
- "translation": request.app.state.YOUTUBE_LOADER_TRANSLATION,
- },
- "web": {
- "ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION": request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
- "RAG_WEB_SEARCH_FULL_CONTEXT": request.app.state.config.RAG_WEB_SEARCH_FULL_CONTEXT,
- "search": {
- "enabled": request.app.state.config.ENABLE_RAG_WEB_SEARCH,
- "engine": request.app.state.config.RAG_WEB_SEARCH_ENGINE,
- "searxng_query_url": request.app.state.config.SEARXNG_QUERY_URL,
- "google_pse_api_key": request.app.state.config.GOOGLE_PSE_API_KEY,
- "google_pse_engine_id": request.app.state.config.GOOGLE_PSE_ENGINE_ID,
- "brave_search_api_key": request.app.state.config.BRAVE_SEARCH_API_KEY,
- "kagi_search_api_key": request.app.state.config.KAGI_SEARCH_API_KEY,
- "mojeek_search_api_key": request.app.state.config.MOJEEK_SEARCH_API_KEY,
- "bocha_search_api_key": request.app.state.config.BOCHA_SEARCH_API_KEY,
- "serpstack_api_key": request.app.state.config.SERPSTACK_API_KEY,
- "serpstack_https": request.app.state.config.SERPSTACK_HTTPS,
- "serper_api_key": request.app.state.config.SERPER_API_KEY,
- "serply_api_key": request.app.state.config.SERPLY_API_KEY,
- "serachapi_api_key": request.app.state.config.SEARCHAPI_API_KEY,
- "searchapi_engine": request.app.state.config.SEARCHAPI_ENGINE,
- "serpapi_api_key": request.app.state.config.SERPAPI_API_KEY,
- "serpapi_engine": request.app.state.config.SERPAPI_ENGINE,
- "tavily_api_key": request.app.state.config.TAVILY_API_KEY,
- "jina_api_key": request.app.state.config.JINA_API_KEY,
- "bing_search_v7_endpoint": request.app.state.config.BING_SEARCH_V7_ENDPOINT,
- "bing_search_v7_subscription_key": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
- "exa_api_key": request.app.state.config.EXA_API_KEY,
- "result_count": request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- "concurrent_requests": request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
- "trust_env": request.app.state.config.RAG_WEB_SEARCH_TRUST_ENV,
- "domain_filter_list": request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- },
- },
- }
- @router.get("/template")
- async def get_rag_template(request: Request, user=Depends(get_verified_user)):
- return {
- "status": True,
- "template": request.app.state.config.RAG_TEMPLATE,
- }
- @router.get("/query/settings")
- async def get_query_settings(request: Request, user=Depends(get_admin_user)):
- return {
- "status": True,
- "template": request.app.state.config.RAG_TEMPLATE,
- "k": request.app.state.config.TOP_K,
- "r": request.app.state.config.RELEVANCE_THRESHOLD,
- "hybrid": request.app.state.config.ENABLE_RAG_HYBRID_SEARCH,
- }
- class QuerySettingsForm(BaseModel):
- k: Optional[int] = None
- r: Optional[float] = None
- template: Optional[str] = None
- hybrid: Optional[bool] = None
- @router.post("/query/settings/update")
- async def update_query_settings(
- request: Request, form_data: QuerySettingsForm, user=Depends(get_admin_user)
- ):
- request.app.state.config.RAG_TEMPLATE = form_data.template
- request.app.state.config.TOP_K = form_data.k if form_data.k else 4
- request.app.state.config.RELEVANCE_THRESHOLD = form_data.r if form_data.r else 0.0
- request.app.state.config.ENABLE_RAG_HYBRID_SEARCH = (
- form_data.hybrid if form_data.hybrid else False
- )
- return {
- "status": True,
- "template": request.app.state.config.RAG_TEMPLATE,
- "k": request.app.state.config.TOP_K,
- "r": request.app.state.config.RELEVANCE_THRESHOLD,
- "hybrid": request.app.state.config.ENABLE_RAG_HYBRID_SEARCH,
- }
- ####################################
- #
- # Document process and retrieval
- #
- ####################################
- def save_docs_to_vector_db(
- request: Request,
- docs,
- collection_name,
- metadata: Optional[dict] = None,
- overwrite: bool = False,
- split: bool = True,
- add: bool = False,
- user=None,
- ) -> bool:
- def _get_docs_info(docs: list[Document]) -> str:
- docs_info = set()
- # Trying to select relevant metadata identifying the document.
- for doc in docs:
- metadata = getattr(doc, "metadata", {})
- doc_name = metadata.get("name", "")
- if not doc_name:
- doc_name = metadata.get("title", "")
- if not doc_name:
- doc_name = metadata.get("source", "")
- if doc_name:
- docs_info.add(doc_name)
- return ", ".join(docs_info)
- log.info(
- f"save_docs_to_vector_db: document {_get_docs_info(docs)} {collection_name}"
- )
- # Check if entries with the same hash (metadata.hash) already exist
- if metadata and "hash" in metadata:
- result = VECTOR_DB_CLIENT.query(
- collection_name=collection_name,
- filter={"hash": metadata["hash"]},
- )
- if result is not None:
- existing_doc_ids = result.ids[0]
- if existing_doc_ids:
- log.info(f"Document with hash {metadata['hash']} already exists")
- raise ValueError(ERROR_MESSAGES.DUPLICATE_CONTENT)
- if split:
- if request.app.state.config.TEXT_SPLITTER in ["", "character"]:
- text_splitter = RecursiveCharacterTextSplitter(
- chunk_size=request.app.state.config.CHUNK_SIZE,
- chunk_overlap=request.app.state.config.CHUNK_OVERLAP,
- add_start_index=True,
- )
- elif request.app.state.config.TEXT_SPLITTER == "token":
- log.info(
- f"Using token text splitter: {request.app.state.config.TIKTOKEN_ENCODING_NAME}"
- )
- tiktoken.get_encoding(str(request.app.state.config.TIKTOKEN_ENCODING_NAME))
- text_splitter = TokenTextSplitter(
- encoding_name=str(request.app.state.config.TIKTOKEN_ENCODING_NAME),
- chunk_size=request.app.state.config.CHUNK_SIZE,
- chunk_overlap=request.app.state.config.CHUNK_OVERLAP,
- add_start_index=True,
- )
- else:
- raise ValueError(ERROR_MESSAGES.DEFAULT("Invalid text splitter"))
- docs = text_splitter.split_documents(docs)
- if len(docs) == 0:
- raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
- texts = [doc.page_content for doc in docs]
- metadatas = [
- {
- **doc.metadata,
- **(metadata if metadata else {}),
- "embedding_config": json.dumps(
- {
- "engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
- "model": request.app.state.config.RAG_EMBEDDING_MODEL,
- }
- ),
- }
- for doc in docs
- ]
- # ChromaDB does not like datetime formats
- # for meta-data so convert them to string.
- for metadata in metadatas:
- for key, value in metadata.items():
- if (
- isinstance(value, datetime)
- or isinstance(value, list)
- or isinstance(value, dict)
- ):
- metadata[key] = str(value)
- try:
- if VECTOR_DB_CLIENT.has_collection(collection_name=collection_name):
- log.info(f"collection {collection_name} already exists")
- if overwrite:
- VECTOR_DB_CLIENT.delete_collection(collection_name=collection_name)
- log.info(f"deleting existing collection {collection_name}")
- elif add is False:
- log.info(
- f"collection {collection_name} already exists, overwrite is False and add is False"
- )
- return True
- log.info(f"adding to collection {collection_name}")
- embedding_function = get_embedding_function(
- request.app.state.config.RAG_EMBEDDING_ENGINE,
- request.app.state.config.RAG_EMBEDDING_MODEL,
- request.app.state.ef,
- (
- request.app.state.config.RAG_OPENAI_API_BASE_URL
- if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
- else request.app.state.config.RAG_OLLAMA_BASE_URL
- ),
- (
- request.app.state.config.RAG_OPENAI_API_KEY
- if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
- else request.app.state.config.RAG_OLLAMA_API_KEY
- ),
- request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
- )
- embeddings = embedding_function(
- list(map(lambda x: x.replace("\n", " "), texts)), user=user
- )
- items = [
- {
- "id": str(uuid.uuid4()),
- "text": text,
- "vector": embeddings[idx],
- "metadata": metadatas[idx],
- }
- for idx, text in enumerate(texts)
- ]
- VECTOR_DB_CLIENT.insert(
- collection_name=collection_name,
- items=items,
- )
- return True
- except Exception as e:
- log.exception(e)
- raise e
- class ProcessFileForm(BaseModel):
- file_id: str
- content: Optional[str] = None
- collection_name: Optional[str] = None
- @router.post("/process/file")
- def process_file(
- request: Request,
- form_data: ProcessFileForm,
- user=Depends(get_verified_user),
- ):
- try:
- file = Files.get_file_by_id(form_data.file_id)
- collection_name = form_data.collection_name
- if collection_name is None:
- collection_name = f"file-{file.id}"
- if form_data.content:
- # Update the content in the file
- # Usage: /files/{file_id}/data/content/update
- VECTOR_DB_CLIENT.delete_collection(collection_name=f"file-{file.id}")
- docs = [
- Document(
- page_content=form_data.content.replace("<br/>", "\n"),
- metadata={
- **file.meta,
- "name": file.filename,
- "created_by": file.user_id,
- "file_id": file.id,
- "source": file.filename,
- },
- )
- ]
- text_content = form_data.content
- elif form_data.collection_name:
- # Check if the file has already been processed and save the content
- # Usage: /knowledge/{id}/file/add, /knowledge/{id}/file/update
- result = VECTOR_DB_CLIENT.query(
- collection_name=f"file-{file.id}", filter={"file_id": file.id}
- )
- if result is not None and len(result.ids[0]) > 0:
- docs = [
- Document(
- page_content=result.documents[0][idx],
- metadata=result.metadatas[0][idx],
- )
- for idx, id in enumerate(result.ids[0])
- ]
- else:
- docs = [
- Document(
- page_content=file.data.get("content", ""),
- metadata={
- **file.meta,
- "name": file.filename,
- "created_by": file.user_id,
- "file_id": file.id,
- "source": file.filename,
- },
- )
- ]
- text_content = file.data.get("content", "")
- else:
- # Process the file and save the content
- # Usage: /files/
- file_path = file.path
- if file_path:
- file_path = Storage.get_file(file_path)
- loader = Loader(
- engine=request.app.state.config.CONTENT_EXTRACTION_ENGINE,
- TIKA_SERVER_URL=request.app.state.config.TIKA_SERVER_URL,
- PDF_EXTRACT_IMAGES=request.app.state.config.PDF_EXTRACT_IMAGES,
- )
- docs = loader.load(
- file.filename, file.meta.get("content_type"), file_path
- )
- docs = [
- Document(
- page_content=doc.page_content,
- metadata={
- **doc.metadata,
- "name": file.filename,
- "created_by": file.user_id,
- "file_id": file.id,
- "source": file.filename,
- },
- )
- for doc in docs
- ]
- else:
- docs = [
- Document(
- page_content=file.data.get("content", ""),
- metadata={
- **file.meta,
- "name": file.filename,
- "created_by": file.user_id,
- "file_id": file.id,
- "source": file.filename,
- },
- )
- ]
- text_content = " ".join([doc.page_content for doc in docs])
- log.debug(f"text_content: {text_content}")
- Files.update_file_data_by_id(
- file.id,
- {"content": text_content},
- )
- hash = calculate_sha256_string(text_content)
- Files.update_file_hash_by_id(file.id, hash)
- try:
- result = save_docs_to_vector_db(
- request,
- docs=docs,
- collection_name=collection_name,
- metadata={
- "file_id": file.id,
- "name": file.filename,
- "hash": hash,
- },
- add=(True if form_data.collection_name else False),
- user=user,
- )
- if result:
- Files.update_file_metadata_by_id(
- file.id,
- {
- "collection_name": collection_name,
- },
- )
- return {
- "status": True,
- "collection_name": collection_name,
- "filename": file.filename,
- "content": text_content,
- }
- except Exception as e:
- raise e
- except Exception as e:
- log.exception(e)
- if "No pandoc was found" in str(e):
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED,
- )
- else:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=str(e),
- )
- class ProcessTextForm(BaseModel):
- name: str
- content: str
- collection_name: Optional[str] = None
- @router.post("/process/text")
- def process_text(
- request: Request,
- form_data: ProcessTextForm,
- user=Depends(get_verified_user),
- ):
- collection_name = form_data.collection_name
- if collection_name is None:
- collection_name = calculate_sha256_string(form_data.content)
- docs = [
- Document(
- page_content=form_data.content,
- metadata={"name": form_data.name, "created_by": user.id},
- )
- ]
- text_content = form_data.content
- log.debug(f"text_content: {text_content}")
- result = save_docs_to_vector_db(request, docs, collection_name, user=user)
- if result:
- return {
- "status": True,
- "collection_name": collection_name,
- "content": text_content,
- }
- else:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=ERROR_MESSAGES.DEFAULT(),
- )
- @router.post("/process/youtube")
- def process_youtube_video(
- request: Request, form_data: ProcessUrlForm, user=Depends(get_verified_user)
- ):
- try:
- collection_name = form_data.collection_name
- if not collection_name:
- collection_name = calculate_sha256_string(form_data.url)[:63]
- loader = YoutubeLoader(
- form_data.url,
- language=request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
- proxy_url=request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
- )
- docs = loader.load()
- content = " ".join([doc.page_content for doc in docs])
- log.debug(f"text_content: {content}")
- save_docs_to_vector_db(
- request, docs, collection_name, overwrite=True, user=user
- )
- return {
- "status": True,
- "collection_name": collection_name,
- "filename": form_data.url,
- "file": {
- "data": {
- "content": content,
- },
- "meta": {
- "name": form_data.url,
- },
- },
- }
- except Exception as e:
- log.exception(e)
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=ERROR_MESSAGES.DEFAULT(e),
- )
- @router.post("/process/web")
- def process_web(
- request: Request, form_data: ProcessUrlForm, user=Depends(get_verified_user)
- ):
- try:
- collection_name = form_data.collection_name
- if not collection_name:
- collection_name = calculate_sha256_string(form_data.url)[:63]
- loader = get_web_loader(
- form_data.url,
- verify_ssl=request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
- requests_per_second=request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
- )
- docs = loader.load()
- content = " ".join([doc.page_content for doc in docs])
- log.debug(f"text_content: {content}")
- save_docs_to_vector_db(
- request, docs, collection_name, overwrite=True, user=user
- )
- return {
- "status": True,
- "collection_name": collection_name,
- "filename": form_data.url,
- "file": {
- "data": {
- "content": content,
- },
- "meta": {
- "name": form_data.url,
- },
- },
- }
- except Exception as e:
- log.exception(e)
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=ERROR_MESSAGES.DEFAULT(e),
- )
- def search_web(request: Request, engine: str, query: str) -> list[SearchResult]:
- """Search the web using a search engine and return the results as a list of SearchResult objects.
- Will look for a search engine API key in environment variables in the following order:
- - SEARXNG_QUERY_URL
- - GOOGLE_PSE_API_KEY + GOOGLE_PSE_ENGINE_ID
- - BRAVE_SEARCH_API_KEY
- - KAGI_SEARCH_API_KEY
- - MOJEEK_SEARCH_API_KEY
- - BOCHA_SEARCH_API_KEY
- - SERPSTACK_API_KEY
- - SERPER_API_KEY
- - SERPLY_API_KEY
- - TAVILY_API_KEY
- - EXA_API_KEY
- - SEARCHAPI_API_KEY + SEARCHAPI_ENGINE (by default `google`)
- - SERPAPI_API_KEY + SERPAPI_ENGINE (by default `google`)
- Args:
- query (str): The query to search for
- """
- # TODO: add playwright to search the web
- if engine == "searxng":
- if request.app.state.config.SEARXNG_QUERY_URL:
- return search_searxng(
- request.app.state.config.SEARXNG_QUERY_URL,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No SEARXNG_QUERY_URL found in environment variables")
- elif engine == "google_pse":
- if (
- request.app.state.config.GOOGLE_PSE_API_KEY
- and request.app.state.config.GOOGLE_PSE_ENGINE_ID
- ):
- return search_google_pse(
- request.app.state.config.GOOGLE_PSE_API_KEY,
- request.app.state.config.GOOGLE_PSE_ENGINE_ID,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception(
- "No GOOGLE_PSE_API_KEY or GOOGLE_PSE_ENGINE_ID found in environment variables"
- )
- elif engine == "brave":
- if request.app.state.config.BRAVE_SEARCH_API_KEY:
- return search_brave(
- request.app.state.config.BRAVE_SEARCH_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No BRAVE_SEARCH_API_KEY found in environment variables")
- elif engine == "kagi":
- if request.app.state.config.KAGI_SEARCH_API_KEY:
- return search_kagi(
- request.app.state.config.KAGI_SEARCH_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No KAGI_SEARCH_API_KEY found in environment variables")
- elif engine == "mojeek":
- if request.app.state.config.MOJEEK_SEARCH_API_KEY:
- return search_mojeek(
- request.app.state.config.MOJEEK_SEARCH_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No MOJEEK_SEARCH_API_KEY found in environment variables")
- elif engine == "bocha":
- if request.app.state.config.BOCHA_SEARCH_API_KEY:
- return search_bocha(
- request.app.state.config.BOCHA_SEARCH_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No BOCHA_SEARCH_API_KEY found in environment variables")
- elif engine == "serpstack":
- if request.app.state.config.SERPSTACK_API_KEY:
- return search_serpstack(
- request.app.state.config.SERPSTACK_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- https_enabled=request.app.state.config.SERPSTACK_HTTPS,
- )
- else:
- raise Exception("No SERPSTACK_API_KEY found in environment variables")
- elif engine == "serper":
- if request.app.state.config.SERPER_API_KEY:
- return search_serper(
- request.app.state.config.SERPER_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No SERPER_API_KEY found in environment variables")
- elif engine == "serply":
- if request.app.state.config.SERPLY_API_KEY:
- return search_serply(
- request.app.state.config.SERPLY_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No SERPLY_API_KEY found in environment variables")
- elif engine == "duckduckgo":
- return search_duckduckgo(
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- elif engine == "tavily":
- if request.app.state.config.TAVILY_API_KEY:
- return search_tavily(
- request.app.state.config.TAVILY_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No TAVILY_API_KEY found in environment variables")
- elif engine == "searchapi":
- if request.app.state.config.SEARCHAPI_API_KEY:
- return search_searchapi(
- request.app.state.config.SEARCHAPI_API_KEY,
- request.app.state.config.SEARCHAPI_ENGINE,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No SEARCHAPI_API_KEY found in environment variables")
- elif engine == "serpapi":
- if request.app.state.config.SERPAPI_API_KEY:
- return search_serpapi(
- request.app.state.config.SERPAPI_API_KEY,
- request.app.state.config.SERPAPI_ENGINE,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No SERPAPI_API_KEY found in environment variables")
- elif engine == "jina":
- return search_jina(
- request.app.state.config.JINA_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- )
- elif engine == "bing":
- return search_bing(
- request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
- request.app.state.config.BING_SEARCH_V7_ENDPOINT,
- str(DEFAULT_LOCALE),
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- elif engine == "exa":
- return search_exa(
- request.app.state.config.EXA_API_KEY,
- query,
- request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
- request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
- )
- else:
- raise Exception("No search engine API key found in environment variables")
- @router.post("/process/web/search")
- async def process_web_search(
- request: Request, form_data: SearchForm, user=Depends(get_verified_user)
- ):
- try:
- logging.info(
- f"trying to web search with {request.app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query}"
- )
- web_results = search_web(
- request, request.app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query
- )
- except Exception as e:
- log.exception(e)
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=ERROR_MESSAGES.WEB_SEARCH_ERROR(e),
- )
- log.debug(f"web_results: {web_results}")
- try:
- collection_name = form_data.collection_name
- if collection_name == "" or collection_name is None:
- collection_name = f"web-search-{calculate_sha256_string(form_data.query)}"[
- :63
- ]
- urls = [result.link for result in web_results]
- loader = get_web_loader(
- urls,
- verify_ssl=request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
- requests_per_second=request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
- trust_env=request.app.state.config.RAG_WEB_SEARCH_TRUST_ENV,
- )
- docs = await loader.aload()
- if request.app.state.config.RAG_WEB_SEARCH_FULL_CONTEXT:
- return {
- "status": True,
- "docs": [
- {
- "content": doc.page_content,
- "metadata": doc.metadata,
- }
- for doc in docs
- ],
- "filenames": urls,
- "loaded_count": len(docs),
- }
- else:
- await run_in_threadpool(
- save_docs_to_vector_db,
- request,
- docs,
- collection_name,
- overwrite=True,
- user=user,
- )
- return {
- "status": True,
- "collection_name": collection_name,
- "filenames": urls,
- "loaded_count": len(docs),
- }
- except Exception as e:
- log.exception(e)
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=ERROR_MESSAGES.DEFAULT(e),
- )
- class QueryDocForm(BaseModel):
- collection_name: str
- query: str
- k: Optional[int] = None
- r: Optional[float] = None
- hybrid: Optional[bool] = None
- @router.post("/query/doc")
- def query_doc_handler(
- request: Request,
- form_data: QueryDocForm,
- user=Depends(get_verified_user),
- ):
- try:
- if request.app.state.config.ENABLE_RAG_HYBRID_SEARCH:
- return query_doc_with_hybrid_search(
- collection_name=form_data.collection_name,
- query=form_data.query,
- embedding_function=lambda query: request.app.state.EMBEDDING_FUNCTION(
- query, user=user
- ),
- k=form_data.k if form_data.k else request.app.state.config.TOP_K,
- reranking_function=request.app.state.rf,
- r=(
- form_data.r
- if form_data.r
- else request.app.state.config.RELEVANCE_THRESHOLD
- ),
- user=user,
- )
- else:
- return query_doc(
- collection_name=form_data.collection_name,
- query_embedding=request.app.state.EMBEDDING_FUNCTION(
- form_data.query, user=user
- ),
- k=form_data.k if form_data.k else request.app.state.config.TOP_K,
- user=user,
- )
- except Exception as e:
- log.exception(e)
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=ERROR_MESSAGES.DEFAULT(e),
- )
- class QueryCollectionsForm(BaseModel):
- collection_names: list[str]
- query: str
- k: Optional[int] = None
- r: Optional[float] = None
- hybrid: Optional[bool] = None
- @router.post("/query/collection")
- def query_collection_handler(
- request: Request,
- form_data: QueryCollectionsForm,
- user=Depends(get_verified_user),
- ):
- try:
- if request.app.state.config.ENABLE_RAG_HYBRID_SEARCH:
- return query_collection_with_hybrid_search(
- collection_names=form_data.collection_names,
- queries=[form_data.query],
- embedding_function=lambda query: request.app.state.EMBEDDING_FUNCTION(
- query, user=user
- ),
- k=form_data.k if form_data.k else request.app.state.config.TOP_K,
- reranking_function=request.app.state.rf,
- r=(
- form_data.r
- if form_data.r
- else request.app.state.config.RELEVANCE_THRESHOLD
- ),
- )
- else:
- return query_collection(
- collection_names=form_data.collection_names,
- queries=[form_data.query],
- embedding_function=lambda query: request.app.state.EMBEDDING_FUNCTION(
- query, user=user
- ),
- k=form_data.k if form_data.k else request.app.state.config.TOP_K,
- )
- except Exception as e:
- log.exception(e)
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=ERROR_MESSAGES.DEFAULT(e),
- )
- ####################################
- #
- # Vector DB operations
- #
- ####################################
- class DeleteForm(BaseModel):
- collection_name: str
- file_id: str
- @router.post("/delete")
- def delete_entries_from_collection(form_data: DeleteForm, user=Depends(get_admin_user)):
- try:
- if VECTOR_DB_CLIENT.has_collection(collection_name=form_data.collection_name):
- file = Files.get_file_by_id(form_data.file_id)
- hash = file.hash
- VECTOR_DB_CLIENT.delete(
- collection_name=form_data.collection_name,
- metadata={"hash": hash},
- )
- return {"status": True}
- else:
- return {"status": False}
- except Exception as e:
- log.exception(e)
- return {"status": False}
- @router.post("/reset/db")
- def reset_vector_db(user=Depends(get_admin_user)):
- VECTOR_DB_CLIENT.reset()
- Knowledges.delete_all_knowledge()
- @router.post("/reset/uploads")
- def reset_upload_dir(user=Depends(get_admin_user)) -> bool:
- folder = f"{UPLOAD_DIR}"
- try:
- # Check if the directory exists
- if os.path.exists(folder):
- # Iterate over all the files and directories in the specified directory
- for filename in os.listdir(folder):
- file_path = os.path.join(folder, filename)
- try:
- if os.path.isfile(file_path) or os.path.islink(file_path):
- os.unlink(file_path) # Remove the file or link
- elif os.path.isdir(file_path):
- shutil.rmtree(file_path) # Remove the directory
- except Exception as e:
- print(f"Failed to delete {file_path}. Reason: {e}")
- else:
- print(f"The directory {folder} does not exist")
- except Exception as e:
- print(f"Failed to process the directory {folder}. Reason: {e}")
- return True
- if ENV == "dev":
- @router.get("/ef/{text}")
- async def get_embeddings(request: Request, text: Optional[str] = "Hello World!"):
- return {"result": request.app.state.EMBEDDING_FUNCTION(text)}
- class BatchProcessFilesForm(BaseModel):
- files: List[FileModel]
- collection_name: str
- class BatchProcessFilesResult(BaseModel):
- file_id: str
- status: str
- error: Optional[str] = None
- class BatchProcessFilesResponse(BaseModel):
- results: List[BatchProcessFilesResult]
- errors: List[BatchProcessFilesResult]
- @router.post("/process/files/batch")
- def process_files_batch(
- request: Request,
- form_data: BatchProcessFilesForm,
- user=Depends(get_verified_user),
- ) -> BatchProcessFilesResponse:
- """
- Process a batch of files and save them to the vector database.
- """
- results: List[BatchProcessFilesResult] = []
- errors: List[BatchProcessFilesResult] = []
- collection_name = form_data.collection_name
- # Prepare all documents first
- all_docs: List[Document] = []
- for file in form_data.files:
- try:
- text_content = file.data.get("content", "")
- docs: List[Document] = [
- Document(
- page_content=text_content.replace("<br/>", "\n"),
- metadata={
- **file.meta,
- "name": file.filename,
- "created_by": file.user_id,
- "file_id": file.id,
- "source": file.filename,
- },
- )
- ]
- hash = calculate_sha256_string(text_content)
- Files.update_file_hash_by_id(file.id, hash)
- Files.update_file_data_by_id(file.id, {"content": text_content})
- all_docs.extend(docs)
- results.append(BatchProcessFilesResult(file_id=file.id, status="prepared"))
- except Exception as e:
- log.error(f"process_files_batch: Error processing file {file.id}: {str(e)}")
- errors.append(
- BatchProcessFilesResult(file_id=file.id, status="failed", error=str(e))
- )
- # Save all documents in one batch
- if all_docs:
- try:
- save_docs_to_vector_db(
- request=request,
- docs=all_docs,
- collection_name=collection_name,
- add=True,
- user=user,
- )
- # Update all files with collection name
- for result in results:
- Files.update_file_metadata_by_id(
- result.file_id, {"collection_name": collection_name}
- )
- result.status = "completed"
- except Exception as e:
- log.error(
- f"process_files_batch: Error saving documents to vector DB: {str(e)}"
- )
- for result in results:
- result.status = "failed"
- errors.append(
- BatchProcessFilesResult(file_id=result.file_id, error=str(e))
- )
- return BatchProcessFilesResponse(results=results, errors=errors)
|