from openai import OpenAIclient = OpenAI( api_key="your-savegate-api-key", base_url="https://api.savegate.ai/v1")response = client.chat.completions.create( model="gpt-5.1", messages=[ {"role": "user", "content": "Hello! How are you?"} ])print(response.choices[0].message.content)
Copy
import litellm# Set the API base and keylitellm.api_base = "https://api.savegate.ai/v1"litellm.api_key = "your-savegate-api-key"response = litellm.completion( model="gpt-5.1", messages=[ {"role": "user", "content": "Hello! How are you?"} ])print(response.choices[0].message.content)
Copy
import OpenAI from 'openai';const client = new OpenAI({ apiKey: 'your-savegate-api-key', baseURL: 'https://api.savegate.ai/v1'});async function main() { const response = await client.chat.completions.create({ model: 'gpt-5.1', messages: [ { role: 'user', content: 'Hello! How are you?' } ] }); console.log(response.choices[0].message.content);}main();
response = client.chat.completions.create( model="gpt-5.1", messages=[{"role": "user", "content": "Tell me a story"}], stream=True)for chunk in response: if chunk.choices[0].delta.content: print(chunk.choices[0].delta.content, end="")