Taoillium-LLM is our Generative AI module that allows developers to harness the power of advanced language models. With Si-LLM, you can integrate AI functionalities such as natural language understanding, text generation, and sentiment analysis into your applications with ease.
Non-streaming Chat Completions
import os
import requests
url = "<https://doc-ai.si.online/v1/chat/completions>"
headers = {
"Authorization": f"Bearer {os.getenv('SI_API_KEY')}",
"Content-Type": "application/json",
}
payload = {
"model": "deepseek-ai/DeepSeek-V2-Chat",
"messages": [
{"role": "user", "content": "Brainstorm ideas for a new business startup"}
],
}
response = requests. Post(url, headers=headers, json=payload)
print(response. Text)
Non-streaming Chat Completions
import os
from openai import OpenAI
client = OpenAI(api_key=os.getenv("SI_API_KEY"), base_url="<https://doc-ai.si.online/v1>")
response = client.chat.completions.create(
model="deepseek-ai/DeepSeek-V2-Chat",
messages=[
{"role": "user", "content": "Brainstorm ideas for a new business startup"}
],
stream=False,
)
print(response.choices[0].message. Content)
Streaming Chat Completions
import os
import httpx
import json
url = "<https://doc-ai.si.online/v1/chat/completions>"
headers = {
"Authorization": f"Bearer {os.getenv('SI_API_KEY')}",
"Content-Type": "application/json",
}
payload = {
"model": "deepseek-ai/DeepSeek-V2-Chat",
"messages": [
{"role": "user", "content": "Brainstorm ideas for a new business startup"}
],
"stream": True,
}
with httpx.Client() as client:
with client.stream("POST", url, headers=headers, json=payload) as response:
print("Streaming response:")
for line in response.iter_lines():
if line.strip():
strip_line = line.lstrip("data: ").strip()
if strip_line == "[DONE]":
break
content = json.loads(strip_line)["choices"][0]["delta"]["content"]
print(content, end=""
Streaming Chat Completions
import os
from openai import OpenAI
client = OpenAI(api_key=os.getenv("SI_API_KEY"), base_url="<https://doc-ai.si.online/v1>")
response = client.chat.completions.create(
model="deepseek-ai/DeepSeek-V2-Chat",
messages=[
{"role": "user", "content": "Brainstorm ideas for a new business startup"}
],
stream=True,
)
for chunk in response:
print(chunk.choices[0].delta. Content, end="")