CMakeLists.txt 856 B

123456789101112131415161718192021
  1. set(TARGET ext_server)
  2. option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON)
  3. if (WIN32)
  4. add_library(${TARGET} SHARED ext_server.cpp ../llama.cpp/llama.cpp)
  5. else()
  6. add_library(${TARGET} STATIC ext_server.cpp ../llama.cpp/llama.cpp)
  7. endif()
  8. target_compile_features(${TARGET} PRIVATE cxx_std_11)
  9. target_compile_definitions(${TARGET} PUBLIC LLAMA_SERVER_LIBRARY=1)
  10. target_link_libraries(${TARGET} PRIVATE ggml llava common )
  11. set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON)
  12. target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$<BOOL:${LLAMA_SERVER_VERBOSE}>)
  13. install(TARGETS ext_server LIBRARY)
  14. if (CUDAToolkit_FOUND)
  15. target_include_directories(${TARGET} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
  16. if (WIN32)
  17. target_link_libraries(${TARGET} PRIVATE nvml)
  18. endif()
  19. endif()