This commit is contained in:
yujj128
2025-10-15 16:52:44 +08:00
2 changed files with 4 additions and 2 deletions

View File

@@ -47,6 +47,8 @@ def create_vana():
"api_key": config('CHAT_MODEL_API_KEY', default=''), "api_key": config('CHAT_MODEL_API_KEY', default=''),
"api_base": config('CHAT_MODEL_BASE_URL', default=''), "api_base": config('CHAT_MODEL_BASE_URL', default=''),
"model": config('CHAT_MODEL_NAME', default=''), "model": config('CHAT_MODEL_NAME', default=''),
'temperature':config('CHAT_MODEL_TEMPERATURE', default=0.7, cast=float),
'max_tokens':config('CHAT_MODEL_MAX_TOKEN', default=20000),
}, },
) )

View File

@@ -26,7 +26,7 @@ class OpenAICompatibleLLM(VannaBase):
def __init__(self, client=None, config_file=None): def __init__(self, client=None, config_file=None):
VannaBase.__init__(self, config=config_file) VannaBase.__init__(self, config=config_file)
# default parameters - can be overrided using config # default parameters - can be overrided using config
self.temperature = 0.5 self.temperature = 0.6
self.max_tokens = 5000 self.max_tokens = 5000
if "temperature" in config_file: if "temperature" in config_file:
@@ -175,6 +175,7 @@ class OpenAICompatibleLLM(VannaBase):
print( print(
f"Using model {self.config['model']} for {num_tokens} tokens (approx)" f"Using model {self.config['model']} for {num_tokens} tokens (approx)"
) )
print(self.config)
response = self.client.chat.completions.create( response = self.client.chat.completions.create(
model=self.config["model"], model=self.config["model"],
messages=prompt, messages=prompt,
@@ -208,7 +209,6 @@ class OpenAICompatibleLLM(VannaBase):
stop=None, stop=None,
temperature=self.temperature, temperature=self.temperature,
) )
for choice in response.choices: for choice in response.choices:
if "text" in choice: if "text" in choice:
return choice.text return choice.text