From f18213d4ee6acac4863297cbc6a3aca51e557717 Mon Sep 17 00:00:00 2001 From: David Koski Date: Tue, 10 Dec 2024 10:26:24 -0800 Subject: [PATCH] workaround for #150 -- important as this model is out new default --- Libraries/MLXLLM/LLMModelFactory.swift | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/Libraries/MLXLLM/LLMModelFactory.swift b/Libraries/MLXLLM/LLMModelFactory.swift index de79e02..10a94cc 100644 --- a/Libraries/MLXLLM/LLMModelFactory.swift +++ b/Libraries/MLXLLM/LLMModelFactory.swift @@ -221,9 +221,20 @@ private struct LLMUserInputProcessor: UserInputProcessor { } func prepare(input: UserInput) throws -> LMInput { - let messages = input.prompt.asMessages() - let promptTokens = try tokenizer.applyChatTemplate(messages: messages) - return LMInput(tokens: MLXArray(promptTokens)) + do { + let messages = input.prompt.asMessages() + let promptTokens = try tokenizer.applyChatTemplate(messages: messages) + return LMInput(tokens: MLXArray(promptTokens)) + } catch { + // #150 -- it might be a TokenizerError.chatTemplate("No chat template was specified") + // but that is not public so just fall back to text + let prompt = input.prompt + .asMessages() + .compactMap { $0["content"] } + .joined(separator: ". ") + let promptTokens = tokenizer.encode(text: prompt) + return LMInput(tokens: MLXArray(promptTokens)) + } } }