main.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946
  1. from fastapi import (
  2. FastAPI,
  3. Depends,
  4. HTTPException,
  5. status,
  6. UploadFile,
  7. File,
  8. Form,
  9. )
  10. from fastapi.middleware.cors import CORSMiddleware
  11. import os, shutil, logging, re
  12. from pathlib import Path
  13. from typing import List, Union, Sequence
  14. from chromadb.utils.batch_utils import create_batches
  15. from langchain_community.document_loaders import (
  16. WebBaseLoader,
  17. TextLoader,
  18. PyPDFLoader,
  19. CSVLoader,
  20. BSHTMLLoader,
  21. Docx2txtLoader,
  22. UnstructuredEPubLoader,
  23. UnstructuredWordDocumentLoader,
  24. UnstructuredMarkdownLoader,
  25. UnstructuredXMLLoader,
  26. UnstructuredRSTLoader,
  27. UnstructuredExcelLoader,
  28. YoutubeLoader,
  29. )
  30. from langchain.text_splitter import RecursiveCharacterTextSplitter
  31. import validators
  32. import urllib.parse
  33. import socket
  34. from pydantic import BaseModel
  35. from typing import Optional
  36. import mimetypes
  37. import uuid
  38. import json
  39. import sentence_transformers
  40. from apps.web.models.documents import (
  41. Documents,
  42. DocumentForm,
  43. DocumentResponse,
  44. )
  45. from apps.rag.utils import (
  46. get_model_path,
  47. get_embedding_function,
  48. query_doc,
  49. query_doc_with_hybrid_search,
  50. query_collection,
  51. query_collection_with_hybrid_search,
  52. search_web,
  53. )
  54. from utils.misc import (
  55. calculate_sha256,
  56. calculate_sha256_string,
  57. sanitize_filename,
  58. extract_folders_after_data_docs,
  59. )
  60. from utils.utils import get_current_user, get_admin_user
  61. from config import (
  62. SRC_LOG_LEVELS,
  63. UPLOAD_DIR,
  64. DOCS_DIR,
  65. RAG_TOP_K,
  66. RAG_RELEVANCE_THRESHOLD,
  67. RAG_EMBEDDING_ENGINE,
  68. RAG_EMBEDDING_MODEL,
  69. RAG_EMBEDDING_MODEL_AUTO_UPDATE,
  70. RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
  71. ENABLE_RAG_HYBRID_SEARCH,
  72. ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  73. RAG_RERANKING_MODEL,
  74. PDF_EXTRACT_IMAGES,
  75. RAG_RERANKING_MODEL_AUTO_UPDATE,
  76. RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
  77. RAG_OPENAI_API_BASE_URL,
  78. RAG_OPENAI_API_KEY,
  79. DEVICE_TYPE,
  80. CHROMA_CLIENT,
  81. CHUNK_SIZE,
  82. CHUNK_OVERLAP,
  83. RAG_TEMPLATE,
  84. ENABLE_RAG_LOCAL_WEB_FETCH,
  85. )
  86. from constants import ERROR_MESSAGES
  87. log = logging.getLogger(__name__)
  88. log.setLevel(SRC_LOG_LEVELS["RAG"])
  89. app = FastAPI()
  90. app.state.TOP_K = RAG_TOP_K
  91. app.state.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD
  92. app.state.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH
  93. app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
  94. ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
  95. )
  96. app.state.CHUNK_SIZE = CHUNK_SIZE
  97. app.state.CHUNK_OVERLAP = CHUNK_OVERLAP
  98. app.state.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE
  99. app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
  100. app.state.RAG_RERANKING_MODEL = RAG_RERANKING_MODEL
  101. app.state.RAG_TEMPLATE = RAG_TEMPLATE
  102. app.state.OPENAI_API_BASE_URL = RAG_OPENAI_API_BASE_URL
  103. app.state.OPENAI_API_KEY = RAG_OPENAI_API_KEY
  104. app.state.PDF_EXTRACT_IMAGES = PDF_EXTRACT_IMAGES
  105. def update_embedding_model(
  106. embedding_model: str,
  107. update_model: bool = False,
  108. ):
  109. if embedding_model and app.state.RAG_EMBEDDING_ENGINE == "":
  110. app.state.sentence_transformer_ef = sentence_transformers.SentenceTransformer(
  111. get_model_path(embedding_model, update_model),
  112. device=DEVICE_TYPE,
  113. trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
  114. )
  115. else:
  116. app.state.sentence_transformer_ef = None
  117. def update_reranking_model(
  118. reranking_model: str,
  119. update_model: bool = False,
  120. ):
  121. if reranking_model:
  122. app.state.sentence_transformer_rf = sentence_transformers.CrossEncoder(
  123. get_model_path(reranking_model, update_model),
  124. device=DEVICE_TYPE,
  125. trust_remote_code=RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
  126. )
  127. else:
  128. app.state.sentence_transformer_rf = None
  129. update_embedding_model(
  130. app.state.RAG_EMBEDDING_MODEL,
  131. RAG_EMBEDDING_MODEL_AUTO_UPDATE,
  132. )
  133. update_reranking_model(
  134. app.state.RAG_RERANKING_MODEL,
  135. RAG_RERANKING_MODEL_AUTO_UPDATE,
  136. )
  137. app.state.EMBEDDING_FUNCTION = get_embedding_function(
  138. app.state.RAG_EMBEDDING_ENGINE,
  139. app.state.RAG_EMBEDDING_MODEL,
  140. app.state.sentence_transformer_ef,
  141. app.state.OPENAI_API_KEY,
  142. app.state.OPENAI_API_BASE_URL,
  143. )
  144. origins = ["*"]
  145. app.add_middleware(
  146. CORSMiddleware,
  147. allow_origins=origins,
  148. allow_credentials=True,
  149. allow_methods=["*"],
  150. allow_headers=["*"],
  151. )
  152. class CollectionNameForm(BaseModel):
  153. collection_name: Optional[str] = "test"
  154. class UrlForm(CollectionNameForm):
  155. url: str
  156. class SearchForm(CollectionNameForm):
  157. query: str
  158. @app.get("/")
  159. async def get_status():
  160. return {
  161. "status": True,
  162. "chunk_size": app.state.CHUNK_SIZE,
  163. "chunk_overlap": app.state.CHUNK_OVERLAP,
  164. "template": app.state.RAG_TEMPLATE,
  165. "embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
  166. "embedding_model": app.state.RAG_EMBEDDING_MODEL,
  167. "reranking_model": app.state.RAG_RERANKING_MODEL,
  168. }
  169. @app.get("/embedding")
  170. async def get_embedding_config(user=Depends(get_admin_user)):
  171. return {
  172. "status": True,
  173. "embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
  174. "embedding_model": app.state.RAG_EMBEDDING_MODEL,
  175. "openai_config": {
  176. "url": app.state.OPENAI_API_BASE_URL,
  177. "key": app.state.OPENAI_API_KEY,
  178. },
  179. }
  180. @app.get("/reranking")
  181. async def get_reraanking_config(user=Depends(get_admin_user)):
  182. return {"status": True, "reranking_model": app.state.RAG_RERANKING_MODEL}
  183. class OpenAIConfigForm(BaseModel):
  184. url: str
  185. key: str
  186. class EmbeddingModelUpdateForm(BaseModel):
  187. openai_config: Optional[OpenAIConfigForm] = None
  188. embedding_engine: str
  189. embedding_model: str
  190. @app.post("/embedding/update")
  191. async def update_embedding_config(
  192. form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user)
  193. ):
  194. log.info(
  195. f"Updating embedding model: {app.state.RAG_EMBEDDING_MODEL} to {form_data.embedding_model}"
  196. )
  197. try:
  198. app.state.RAG_EMBEDDING_ENGINE = form_data.embedding_engine
  199. app.state.RAG_EMBEDDING_MODEL = form_data.embedding_model
  200. if app.state.RAG_EMBEDDING_ENGINE in ["ollama", "openai"]:
  201. if form_data.openai_config != None:
  202. app.state.OPENAI_API_BASE_URL = form_data.openai_config.url
  203. app.state.OPENAI_API_KEY = form_data.openai_config.key
  204. update_embedding_model(app.state.RAG_EMBEDDING_MODEL, True)
  205. app.state.EMBEDDING_FUNCTION = get_embedding_function(
  206. app.state.RAG_EMBEDDING_ENGINE,
  207. app.state.RAG_EMBEDDING_MODEL,
  208. app.state.sentence_transformer_ef,
  209. app.state.OPENAI_API_KEY,
  210. app.state.OPENAI_API_BASE_URL,
  211. )
  212. return {
  213. "status": True,
  214. "embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
  215. "embedding_model": app.state.RAG_EMBEDDING_MODEL,
  216. "openai_config": {
  217. "url": app.state.OPENAI_API_BASE_URL,
  218. "key": app.state.OPENAI_API_KEY,
  219. },
  220. }
  221. except Exception as e:
  222. log.exception(f"Problem updating embedding model: {e}")
  223. raise HTTPException(
  224. status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
  225. detail=ERROR_MESSAGES.DEFAULT(e),
  226. )
  227. class RerankingModelUpdateForm(BaseModel):
  228. reranking_model: str
  229. @app.post("/reranking/update")
  230. async def update_reranking_config(
  231. form_data: RerankingModelUpdateForm, user=Depends(get_admin_user)
  232. ):
  233. log.info(
  234. f"Updating reranking model: {app.state.RAG_RERANKING_MODEL} to {form_data.reranking_model}"
  235. )
  236. try:
  237. app.state.RAG_RERANKING_MODEL = form_data.reranking_model
  238. update_reranking_model(app.state.RAG_RERANKING_MODEL, True)
  239. return {
  240. "status": True,
  241. "reranking_model": app.state.RAG_RERANKING_MODEL,
  242. }
  243. except Exception as e:
  244. log.exception(f"Problem updating reranking model: {e}")
  245. raise HTTPException(
  246. status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
  247. detail=ERROR_MESSAGES.DEFAULT(e),
  248. )
  249. @app.get("/config")
  250. async def get_rag_config(user=Depends(get_admin_user)):
  251. return {
  252. "status": True,
  253. "pdf_extract_images": app.state.PDF_EXTRACT_IMAGES,
  254. "chunk": {
  255. "chunk_size": app.state.CHUNK_SIZE,
  256. "chunk_overlap": app.state.CHUNK_OVERLAP,
  257. },
  258. "web_loader_ssl_verification": app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  259. }
  260. class ChunkParamUpdateForm(BaseModel):
  261. chunk_size: int
  262. chunk_overlap: int
  263. class ConfigUpdateForm(BaseModel):
  264. pdf_extract_images: Optional[bool] = None
  265. chunk: Optional[ChunkParamUpdateForm] = None
  266. web_loader_ssl_verification: Optional[bool] = None
  267. @app.post("/config/update")
  268. async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_user)):
  269. app.state.PDF_EXTRACT_IMAGES = (
  270. form_data.pdf_extract_images
  271. if form_data.pdf_extract_images != None
  272. else app.state.PDF_EXTRACT_IMAGES
  273. )
  274. app.state.CHUNK_SIZE = (
  275. form_data.chunk.chunk_size if form_data.chunk != None else app.state.CHUNK_SIZE
  276. )
  277. app.state.CHUNK_OVERLAP = (
  278. form_data.chunk.chunk_overlap
  279. if form_data.chunk != None
  280. else app.state.CHUNK_OVERLAP
  281. )
  282. app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
  283. form_data.web_loader_ssl_verification
  284. if form_data.web_loader_ssl_verification != None
  285. else app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
  286. )
  287. return {
  288. "status": True,
  289. "pdf_extract_images": app.state.PDF_EXTRACT_IMAGES,
  290. "chunk": {
  291. "chunk_size": app.state.CHUNK_SIZE,
  292. "chunk_overlap": app.state.CHUNK_OVERLAP,
  293. },
  294. "web_loader_ssl_verification": app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  295. }
  296. @app.get("/template")
  297. async def get_rag_template(user=Depends(get_current_user)):
  298. return {
  299. "status": True,
  300. "template": app.state.RAG_TEMPLATE,
  301. }
  302. @app.get("/query/settings")
  303. async def get_query_settings(user=Depends(get_admin_user)):
  304. return {
  305. "status": True,
  306. "template": app.state.RAG_TEMPLATE,
  307. "k": app.state.TOP_K,
  308. "r": app.state.RELEVANCE_THRESHOLD,
  309. "hybrid": app.state.ENABLE_RAG_HYBRID_SEARCH,
  310. }
  311. class QuerySettingsForm(BaseModel):
  312. k: Optional[int] = None
  313. r: Optional[float] = None
  314. template: Optional[str] = None
  315. hybrid: Optional[bool] = None
  316. @app.post("/query/settings/update")
  317. async def update_query_settings(
  318. form_data: QuerySettingsForm, user=Depends(get_admin_user)
  319. ):
  320. app.state.RAG_TEMPLATE = form_data.template if form_data.template else RAG_TEMPLATE
  321. app.state.TOP_K = form_data.k if form_data.k else 4
  322. app.state.RELEVANCE_THRESHOLD = form_data.r if form_data.r else 0.0
  323. app.state.ENABLE_RAG_HYBRID_SEARCH = form_data.hybrid if form_data.hybrid else False
  324. return {
  325. "status": True,
  326. "template": app.state.RAG_TEMPLATE,
  327. "k": app.state.TOP_K,
  328. "r": app.state.RELEVANCE_THRESHOLD,
  329. "hybrid": app.state.ENABLE_RAG_HYBRID_SEARCH,
  330. }
  331. class QueryDocForm(BaseModel):
  332. collection_name: str
  333. query: str
  334. k: Optional[int] = None
  335. r: Optional[float] = None
  336. hybrid: Optional[bool] = None
  337. @app.post("/query/doc")
  338. def query_doc_handler(
  339. form_data: QueryDocForm,
  340. user=Depends(get_current_user),
  341. ):
  342. try:
  343. if app.state.ENABLE_RAG_HYBRID_SEARCH:
  344. return query_doc_with_hybrid_search(
  345. collection_name=form_data.collection_name,
  346. query=form_data.query,
  347. embedding_function=app.state.EMBEDDING_FUNCTION,
  348. k=form_data.k if form_data.k else app.state.TOP_K,
  349. reranking_function=app.state.sentence_transformer_rf,
  350. r=form_data.r if form_data.r else app.state.RELEVANCE_THRESHOLD,
  351. )
  352. else:
  353. return query_doc(
  354. collection_name=form_data.collection_name,
  355. query=form_data.query,
  356. embedding_function=app.state.EMBEDDING_FUNCTION,
  357. k=form_data.k if form_data.k else app.state.TOP_K,
  358. )
  359. except Exception as e:
  360. log.exception(e)
  361. raise HTTPException(
  362. status_code=status.HTTP_400_BAD_REQUEST,
  363. detail=ERROR_MESSAGES.DEFAULT(e),
  364. )
  365. class QueryCollectionsForm(BaseModel):
  366. collection_names: List[str]
  367. query: str
  368. k: Optional[int] = None
  369. r: Optional[float] = None
  370. hybrid: Optional[bool] = None
  371. @app.post("/query/collection")
  372. def query_collection_handler(
  373. form_data: QueryCollectionsForm,
  374. user=Depends(get_current_user),
  375. ):
  376. try:
  377. if app.state.ENABLE_RAG_HYBRID_SEARCH:
  378. return query_collection_with_hybrid_search(
  379. collection_names=form_data.collection_names,
  380. query=form_data.query,
  381. embedding_function=app.state.EMBEDDING_FUNCTION,
  382. k=form_data.k if form_data.k else app.state.TOP_K,
  383. reranking_function=app.state.sentence_transformer_rf,
  384. r=form_data.r if form_data.r else app.state.RELEVANCE_THRESHOLD,
  385. )
  386. else:
  387. return query_collection(
  388. collection_names=form_data.collection_names,
  389. query=form_data.query,
  390. embedding_function=app.state.EMBEDDING_FUNCTION,
  391. k=form_data.k if form_data.k else app.state.TOP_K,
  392. )
  393. except Exception as e:
  394. log.exception(e)
  395. raise HTTPException(
  396. status_code=status.HTTP_400_BAD_REQUEST,
  397. detail=ERROR_MESSAGES.DEFAULT(e),
  398. )
  399. @app.post("/youtube")
  400. def store_youtube_video(form_data: UrlForm, user=Depends(get_current_user)):
  401. try:
  402. loader = YoutubeLoader.from_youtube_url(form_data.url, add_video_info=False)
  403. data = loader.load()
  404. collection_name = form_data.collection_name
  405. if collection_name == "":
  406. collection_name = calculate_sha256_string(form_data.url)[:63]
  407. store_data_in_vector_db(data, collection_name, overwrite=True)
  408. return {
  409. "status": True,
  410. "collection_name": collection_name,
  411. "filename": form_data.url,
  412. }
  413. except Exception as e:
  414. log.exception(e)
  415. raise HTTPException(
  416. status_code=status.HTTP_400_BAD_REQUEST,
  417. detail=ERROR_MESSAGES.DEFAULT(e),
  418. )
  419. @app.post("/web")
  420. def store_web(form_data: UrlForm, user=Depends(get_current_user)):
  421. # "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
  422. try:
  423. loader = get_web_loader(
  424. form_data.url, verify_ssl=app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
  425. )
  426. data = loader.load()
  427. collection_name = form_data.collection_name
  428. if collection_name == "":
  429. collection_name = calculate_sha256_string(form_data.url)[:63]
  430. store_data_in_vector_db(data, collection_name, overwrite=True)
  431. return {
  432. "status": True,
  433. "collection_name": collection_name,
  434. "filename": form_data.url,
  435. }
  436. except Exception as e:
  437. log.exception(e)
  438. raise HTTPException(
  439. status_code=status.HTTP_400_BAD_REQUEST,
  440. detail=ERROR_MESSAGES.DEFAULT(e),
  441. )
  442. def get_web_loader(url: Union[str, Sequence[str]], verify_ssl: bool = True):
  443. # Check if the URL is valid
  444. if not validate_url(url):
  445. raise ValueError(ERROR_MESSAGES.INVALID_URL)
  446. return WebBaseLoader(url, verify_ssl=verify_ssl)
  447. def validate_url(url: Union[str, Sequence[str]]):
  448. if isinstance(url, str):
  449. if isinstance(validators.url(url), validators.ValidationError):
  450. raise ValueError(ERROR_MESSAGES.INVALID_URL)
  451. if not ENABLE_LOCAL_WEB_FETCH:
  452. # Local web fetch is disabled, filter out any URLs that resolve to private IP addresses
  453. parsed_url = urllib.parse.urlparse(url)
  454. # Get IPv4 and IPv6 addresses
  455. ipv4_addresses, ipv6_addresses = resolve_hostname(parsed_url.hostname)
  456. # Check if any of the resolved addresses are private
  457. # This is technically still vulnerable to DNS rebinding attacks, as we don't control WebBaseLoader
  458. for ip in ipv4_addresses:
  459. if validators.ipv4(ip, private=True):
  460. raise ValueError(ERROR_MESSAGES.INVALID_URL)
  461. for ip in ipv6_addresses:
  462. if validators.ipv6(ip, private=True):
  463. raise ValueError(ERROR_MESSAGES.INVALID_URL)
  464. return True
  465. elif isinstance(url, Sequence):
  466. return all(validate_url(u) for u in url)
  467. else:
  468. return False
  469. def resolve_hostname(hostname):
  470. # Get address information
  471. addr_info = socket.getaddrinfo(hostname, None)
  472. # Extract IP addresses from address information
  473. ipv4_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET]
  474. ipv6_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET6]
  475. return ipv4_addresses, ipv6_addresses
  476. @app.post("/websearch")
  477. def store_websearch(form_data: SearchForm, user=Depends(get_current_user)):
  478. try:
  479. try:
  480. web_results = search_web(form_data.query)
  481. except Exception as e:
  482. log.exception(e)
  483. raise HTTPException(
  484. status_code=status.HTTP_400_BAD_REQUEST,
  485. detail=ERROR_MESSAGES.WEB_SEARCH_ERROR,
  486. )
  487. urls = [result.link for result in web_results]
  488. loader = get_web_loader(urls)
  489. data = loader.load()
  490. collection_name = form_data.collection_name
  491. if collection_name == "":
  492. collection_name = calculate_sha256_string(form_data.query)[:63]
  493. store_data_in_vector_db(data, collection_name, overwrite=True)
  494. return {
  495. "status": True,
  496. "collection_name": collection_name,
  497. "filenames": urls,
  498. }
  499. except Exception as e:
  500. log.exception(e)
  501. raise HTTPException(
  502. status_code=status.HTTP_400_BAD_REQUEST,
  503. detail=ERROR_MESSAGES.DEFAULT(e),
  504. )
  505. def store_data_in_vector_db(data, collection_name, overwrite: bool = False) -> bool:
  506. text_splitter = RecursiveCharacterTextSplitter(
  507. chunk_size=app.state.CHUNK_SIZE,
  508. chunk_overlap=app.state.CHUNK_OVERLAP,
  509. add_start_index=True,
  510. )
  511. docs = text_splitter.split_documents(data)
  512. if len(docs) > 0:
  513. log.info(f"store_data_in_vector_db {docs}")
  514. return store_docs_in_vector_db(docs, collection_name, overwrite), None
  515. else:
  516. raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
  517. def store_text_in_vector_db(
  518. text, metadata, collection_name, overwrite: bool = False
  519. ) -> bool:
  520. text_splitter = RecursiveCharacterTextSplitter(
  521. chunk_size=app.state.CHUNK_SIZE,
  522. chunk_overlap=app.state.CHUNK_OVERLAP,
  523. add_start_index=True,
  524. )
  525. docs = text_splitter.create_documents([text], metadatas=[metadata])
  526. return store_docs_in_vector_db(docs, collection_name, overwrite)
  527. def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> bool:
  528. log.info(f"store_docs_in_vector_db {docs} {collection_name}")
  529. texts = [doc.page_content for doc in docs]
  530. metadatas = [doc.metadata for doc in docs]
  531. try:
  532. if overwrite:
  533. for collection in CHROMA_CLIENT.list_collections():
  534. if collection_name == collection.name:
  535. log.info(f"deleting existing collection {collection_name}")
  536. CHROMA_CLIENT.delete_collection(name=collection_name)
  537. collection = CHROMA_CLIENT.create_collection(name=collection_name)
  538. embedding_func = get_embedding_function(
  539. app.state.RAG_EMBEDDING_ENGINE,
  540. app.state.RAG_EMBEDDING_MODEL,
  541. app.state.sentence_transformer_ef,
  542. app.state.OPENAI_API_KEY,
  543. app.state.OPENAI_API_BASE_URL,
  544. )
  545. embedding_texts = list(map(lambda x: x.replace("\n", " "), texts))
  546. embeddings = embedding_func(embedding_texts)
  547. for batch in create_batches(
  548. api=CHROMA_CLIENT,
  549. ids=[str(uuid.uuid1()) for _ in texts],
  550. metadatas=metadatas,
  551. embeddings=embeddings,
  552. documents=texts,
  553. ):
  554. collection.add(*batch)
  555. return True
  556. except Exception as e:
  557. log.exception(e)
  558. if e.__class__.__name__ == "UniqueConstraintError":
  559. return True
  560. return False
  561. def get_loader(filename: str, file_content_type: str, file_path: str):
  562. file_ext = filename.split(".")[-1].lower()
  563. known_type = True
  564. known_source_ext = [
  565. "go",
  566. "py",
  567. "java",
  568. "sh",
  569. "bat",
  570. "ps1",
  571. "cmd",
  572. "js",
  573. "ts",
  574. "css",
  575. "cpp",
  576. "hpp",
  577. "h",
  578. "c",
  579. "cs",
  580. "sql",
  581. "log",
  582. "ini",
  583. "pl",
  584. "pm",
  585. "r",
  586. "dart",
  587. "dockerfile",
  588. "env",
  589. "php",
  590. "hs",
  591. "hsc",
  592. "lua",
  593. "nginxconf",
  594. "conf",
  595. "m",
  596. "mm",
  597. "plsql",
  598. "perl",
  599. "rb",
  600. "rs",
  601. "db2",
  602. "scala",
  603. "bash",
  604. "swift",
  605. "vue",
  606. "svelte",
  607. ]
  608. if file_ext == "pdf":
  609. loader = PyPDFLoader(file_path, extract_images=app.state.PDF_EXTRACT_IMAGES)
  610. elif file_ext == "csv":
  611. loader = CSVLoader(file_path)
  612. elif file_ext == "rst":
  613. loader = UnstructuredRSTLoader(file_path, mode="elements")
  614. elif file_ext == "xml":
  615. loader = UnstructuredXMLLoader(file_path)
  616. elif file_ext in ["htm", "html"]:
  617. loader = BSHTMLLoader(file_path, open_encoding="unicode_escape")
  618. elif file_ext == "md":
  619. loader = UnstructuredMarkdownLoader(file_path)
  620. elif file_content_type == "application/epub+zip":
  621. loader = UnstructuredEPubLoader(file_path)
  622. elif (
  623. file_content_type
  624. == "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
  625. or file_ext in ["doc", "docx"]
  626. ):
  627. loader = Docx2txtLoader(file_path)
  628. elif file_content_type in [
  629. "application/vnd.ms-excel",
  630. "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
  631. ] or file_ext in ["xls", "xlsx"]:
  632. loader = UnstructuredExcelLoader(file_path)
  633. elif file_ext in known_source_ext or (
  634. file_content_type and file_content_type.find("text/") >= 0
  635. ):
  636. loader = TextLoader(file_path, autodetect_encoding=True)
  637. else:
  638. loader = TextLoader(file_path, autodetect_encoding=True)
  639. known_type = False
  640. return loader, known_type
  641. @app.post("/doc")
  642. def store_doc(
  643. collection_name: Optional[str] = Form(None),
  644. file: UploadFile = File(...),
  645. user=Depends(get_current_user),
  646. ):
  647. # "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
  648. log.info(f"file.content_type: {file.content_type}")
  649. try:
  650. unsanitized_filename = file.filename
  651. filename = os.path.basename(unsanitized_filename)
  652. file_path = f"{UPLOAD_DIR}/{filename}"
  653. contents = file.file.read()
  654. with open(file_path, "wb") as f:
  655. f.write(contents)
  656. f.close()
  657. f = open(file_path, "rb")
  658. if collection_name == None:
  659. collection_name = calculate_sha256(f)[:63]
  660. f.close()
  661. loader, known_type = get_loader(filename, file.content_type, file_path)
  662. data = loader.load()
  663. try:
  664. result = store_data_in_vector_db(data, collection_name)
  665. if result:
  666. return {
  667. "status": True,
  668. "collection_name": collection_name,
  669. "filename": filename,
  670. "known_type": known_type,
  671. }
  672. except Exception as e:
  673. raise HTTPException(
  674. status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
  675. detail=e,
  676. )
  677. except Exception as e:
  678. log.exception(e)
  679. if "No pandoc was found" in str(e):
  680. raise HTTPException(
  681. status_code=status.HTTP_400_BAD_REQUEST,
  682. detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED,
  683. )
  684. else:
  685. raise HTTPException(
  686. status_code=status.HTTP_400_BAD_REQUEST,
  687. detail=ERROR_MESSAGES.DEFAULT(e),
  688. )
  689. class TextRAGForm(BaseModel):
  690. name: str
  691. content: str
  692. collection_name: Optional[str] = None
  693. @app.post("/text")
  694. def store_text(
  695. form_data: TextRAGForm,
  696. user=Depends(get_current_user),
  697. ):
  698. collection_name = form_data.collection_name
  699. if collection_name == None:
  700. collection_name = calculate_sha256_string(form_data.content)
  701. result = store_text_in_vector_db(
  702. form_data.content,
  703. metadata={"name": form_data.name, "created_by": user.id},
  704. collection_name=collection_name,
  705. )
  706. if result:
  707. return {"status": True, "collection_name": collection_name}
  708. else:
  709. raise HTTPException(
  710. status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
  711. detail=ERROR_MESSAGES.DEFAULT(),
  712. )
  713. @app.get("/scan")
  714. def scan_docs_dir(user=Depends(get_admin_user)):
  715. for path in Path(DOCS_DIR).rglob("./**/*"):
  716. try:
  717. if path.is_file() and not path.name.startswith("."):
  718. tags = extract_folders_after_data_docs(path)
  719. filename = path.name
  720. file_content_type = mimetypes.guess_type(path)
  721. f = open(path, "rb")
  722. collection_name = calculate_sha256(f)[:63]
  723. f.close()
  724. loader, known_type = get_loader(
  725. filename, file_content_type[0], str(path)
  726. )
  727. data = loader.load()
  728. try:
  729. result = store_data_in_vector_db(data, collection_name)
  730. if result:
  731. sanitized_filename = sanitize_filename(filename)
  732. doc = Documents.get_doc_by_name(sanitized_filename)
  733. if doc == None:
  734. doc = Documents.insert_new_doc(
  735. user.id,
  736. DocumentForm(
  737. **{
  738. "name": sanitized_filename,
  739. "title": filename,
  740. "collection_name": collection_name,
  741. "filename": filename,
  742. "content": (
  743. json.dumps(
  744. {
  745. "tags": list(
  746. map(
  747. lambda name: {"name": name},
  748. tags,
  749. )
  750. )
  751. }
  752. )
  753. if len(tags)
  754. else "{}"
  755. ),
  756. }
  757. ),
  758. )
  759. except Exception as e:
  760. log.exception(e)
  761. pass
  762. except Exception as e:
  763. log.exception(e)
  764. return True
  765. @app.get("/reset/db")
  766. def reset_vector_db(user=Depends(get_admin_user)):
  767. CHROMA_CLIENT.reset()
  768. @app.get("/reset")
  769. def reset(user=Depends(get_admin_user)) -> bool:
  770. folder = f"{UPLOAD_DIR}"
  771. for filename in os.listdir(folder):
  772. file_path = os.path.join(folder, filename)
  773. try:
  774. if os.path.isfile(file_path) or os.path.islink(file_path):
  775. os.unlink(file_path)
  776. elif os.path.isdir(file_path):
  777. shutil.rmtree(file_path)
  778. except Exception as e:
  779. log.error("Failed to delete %s. Reason: %s" % (file_path, e))
  780. try:
  781. CHROMA_CLIENT.reset()
  782. except Exception as e:
  783. log.exception(e)
  784. return True