* Copy minja from58f0ca6dd7* Add --jinja and --chat-template-file flags * Add missing <optional> include * Avoid print in get_hf_chat_template.py * No designated initializers yet * Try and work around msvc++ non-macro max resolution quirk * Update test_chat_completion.py * Wire LLM_KV_TOKENIZER_CHAT_TEMPLATE_N in llama_model_chat_template * Refactor test-chat-template * Test templates w/ minja * Fix deprecation * Add --jinja to llama-run * Update common_chat_format_example to use minja template wrapper * Test chat_template in e2e test * Update utils.py * Update test_chat_completion.py * Update run.cpp * Update arg.cpp * Refactor common_chat_* functions to accept minja template + use_jinja option * Attempt to fix linkage of LLAMA_CHATML_TEMPLATE * Revert LLAMA_CHATML_TEMPLATE refactor * Normalize newlines in test-chat-templates for windows tests * Forward decl minja::chat_template to avoid eager json dep * Flush stdout in chat template before potential crash * Fix copy elision warning * Rm unused optional include * Add missing optional include to server.cpp * Disable jinja test that has a cryptic windows failure * minja: fix vigogne (https://github.com/google/minja/pull/22) * Apply suggestions from code review Co-authored-by: Xuan Son Nguyen <thichthat@gmail.com> Co-authored-by: Georgi Gerganov <ggerganov@gmail.com> * Finish suggested renamings * Move chat_templates inside server_context + remove mutex * Update --chat-template-file w/ recent change to --chat-template * Refactor chat template validation * Guard against missing eos/bos tokens (null token otherwise throws in llama_vocab::impl::token_get_attr) * Warn against missing eos / bos tokens when jinja template references them * rename: common_chat_template[s] * reinstate assert on chat_templates.template_default * Update minja tob8437df626* Update minja to https://github.com/google/minja/pull/25 * Update minja from https://github.com/google/minja/pull/27 * rm unused optional header --------- Co-authored-by: Xuan Son Nguyen <thichthat@gmail.com> Co-authored-by: Georgi Gerganov <ggerganov@gmail.com>
		
			
				
	
	
		
			94 lines
		
	
	
	
		
			2.8 KiB
		
	
	
	
		
			CMake
		
	
	
	
	
	
			
		
		
	
	
			94 lines
		
	
	
	
		
			2.8 KiB
		
	
	
	
		
			CMake
		
	
	
	
	
	
| # common
 | |
| 
 | |
| find_package(Threads REQUIRED)
 | |
| 
 | |
| llama_add_compile_flags()
 | |
| 
 | |
| # Build info header
 | |
| #
 | |
| 
 | |
| if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/../.git")
 | |
|     set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../.git")
 | |
| 
 | |
|     # Is git submodule
 | |
|     if(NOT IS_DIRECTORY "${GIT_DIR}")
 | |
|         file(READ ${GIT_DIR} REAL_GIT_DIR_LINK)
 | |
|         string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" REAL_GIT_DIR ${REAL_GIT_DIR_LINK})
 | |
|         string(FIND "${REAL_GIT_DIR}" "/" SLASH_POS)
 | |
|         if (SLASH_POS EQUAL 0)
 | |
|             set(GIT_DIR "${REAL_GIT_DIR}")
 | |
|         else()
 | |
|             set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../${REAL_GIT_DIR}")
 | |
|         endif()
 | |
|     endif()
 | |
| 
 | |
|     if(EXISTS "${GIT_DIR}/index")
 | |
|         set(GIT_INDEX "${GIT_DIR}/index")
 | |
|     else()
 | |
|         message(WARNING "Git index not found in git repository.")
 | |
|         set(GIT_INDEX "")
 | |
|     endif()
 | |
| else()
 | |
|     message(WARNING "Git repository not found; to enable automatic generation of build info, make sure Git is installed and the project is a Git repository.")
 | |
|     set(GIT_INDEX "")
 | |
| endif()
 | |
| 
 | |
| # Add a custom command to rebuild build-info.cpp when .git/index changes
 | |
| add_custom_command(
 | |
|     OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/build-info.cpp"
 | |
|     COMMENT "Generating build details from Git"
 | |
|     COMMAND ${CMAKE_COMMAND} -DMSVC=${MSVC} -DCMAKE_C_COMPILER_VERSION=${CMAKE_C_COMPILER_VERSION}
 | |
|             -DCMAKE_C_COMPILER_ID=${CMAKE_C_COMPILER_ID} -DCMAKE_VS_PLATFORM_NAME=${CMAKE_VS_PLATFORM_NAME}
 | |
|             -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -P "${CMAKE_CURRENT_SOURCE_DIR}/cmake/build-info-gen-cpp.cmake"
 | |
|     WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/.."
 | |
|     DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/build-info.cpp.in" ${GIT_INDEX}
 | |
|     VERBATIM
 | |
| )
 | |
| set(TARGET build_info)
 | |
| add_library(${TARGET} OBJECT build-info.cpp)
 | |
| if (BUILD_SHARED_LIBS)
 | |
|     set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON)
 | |
| endif()
 | |
| 
 | |
| set(TARGET common)
 | |
| 
 | |
| add_library(${TARGET} STATIC
 | |
|     arg.cpp
 | |
|     arg.h
 | |
|     base64.hpp
 | |
|     chat-template.hpp
 | |
|     common.cpp
 | |
|     common.h
 | |
|     console.cpp
 | |
|     console.h
 | |
|     json-schema-to-grammar.cpp
 | |
|     json.hpp
 | |
|     log.cpp
 | |
|     log.h
 | |
|     minja.hpp
 | |
|     ngram-cache.cpp
 | |
|     ngram-cache.h
 | |
|     sampling.cpp
 | |
|     sampling.h
 | |
|     speculative.cpp
 | |
|     speculative.h
 | |
|     )
 | |
| 
 | |
| if (BUILD_SHARED_LIBS)
 | |
|     set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON)
 | |
| endif()
 | |
| 
 | |
| set(LLAMA_COMMON_EXTRA_LIBS build_info)
 | |
| 
 | |
| # Use curl to download model url
 | |
| if (LLAMA_CURL)
 | |
|     find_package(CURL REQUIRED)
 | |
|     target_compile_definitions(${TARGET} PUBLIC LLAMA_USE_CURL)
 | |
|     include_directories(${CURL_INCLUDE_DIRS})
 | |
|     find_library(CURL_LIBRARY curl REQUIRED)
 | |
|     set(LLAMA_COMMON_EXTRA_LIBS ${LLAMA_COMMON_EXTRA_LIBS} ${CURL_LIBRARY})
 | |
| endif ()
 | |
| 
 | |
| target_include_directories(${TARGET} PUBLIC .)
 | |
| target_compile_features   (${TARGET} PUBLIC cxx_std_17)
 | |
| target_link_libraries     (${TARGET} PRIVATE ${LLAMA_COMMON_EXTRA_LIBS} PUBLIC llama Threads::Threads)
 |