From b98a4bf65d1e5497599810941055492d734632e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9B=B7=E9=9B=A8?= Date: Wed, 15 Oct 2025 14:42:48 +0800 Subject: [PATCH] =?UTF-8?q?feat:max=5Ftoken=E9=85=8D=E7=BD=AE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- main_service.py | 2 ++ service/cus_vanna_srevice.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/main_service.py b/main_service.py index baa47a7..58f8ae0 100644 --- a/main_service.py +++ b/main_service.py @@ -48,6 +48,8 @@ def create_vana(): "api_key": config('CHAT_MODEL_API_KEY', default=''), "api_base": config('CHAT_MODEL_BASE_URL', default=''), "model": config('CHAT_MODEL_NAME', default=''), + 'temperature':config('CHAT_MODEL_TEMPERATURE', default=0.7, cast=float), + 'max_tokens':config('CHAT_MODEL_MAX_TOKEN', default=20000), }, ) diff --git a/service/cus_vanna_srevice.py b/service/cus_vanna_srevice.py index db94799..44cec39 100644 --- a/service/cus_vanna_srevice.py +++ b/service/cus_vanna_srevice.py @@ -26,7 +26,7 @@ class OpenAICompatibleLLM(VannaBase): def __init__(self, client=None, config_file=None): VannaBase.__init__(self, config=config_file) # default parameters - can be overrided using config - self.temperature = 0.5 + self.temperature = 0.6 self.max_tokens = 5000 if "temperature" in config_file: @@ -175,6 +175,7 @@ class OpenAICompatibleLLM(VannaBase): print( f"Using model {self.config['model']} for {num_tokens} tokens (approx)" ) + print(self.config) response = self.client.chat.completions.create( model=self.config["model"], messages=prompt, @@ -197,7 +198,6 @@ class OpenAICompatibleLLM(VannaBase): stop=None, temperature=self.temperature, ) - for choice in response.choices: if "text" in choice: return choice.text