Maniac LogoManiac

Inference

Use Maniac as an inference provider

The Inference API

Maniac provides an OpenAI completions compatible inference API to standardize inference across a wide range of model providers. We support a wide variety of models out of the box, including frontier foundation models and open source SLMs. You can view a full list of supported models in the dashboard.

Run Inference

import { Maniac } from "maniac-js"

const maniac = new Maniac()

const response = maniac.chat.completions.create({
  model: "openai/gpt-5",
  messages: [{role: "user", content: "Hello!"}],
})
from maniac import Maniac

maniac = Maniac()

response = maniac.chat.completions.create(
  model = "openai/gpt-5",
  messages = [{role: "user", content: "Hello!"}],
)

Additional Parameters

Model Parameters

import { Maniac } from "maniac-js"

const maniac = new Maniac()

const response = maniac.chat.completions.create({
  model: "openai/gpt-5",
  messages: [{role: "user", content: "Hello!"}]
  temperature: 0.2,
  max_tokens: 1000,
})
from maniac import Maniac

maniac = Maniac()

response = maniac.chat.completions.create(
  model = "openai/gpt-5",
  messages = [{role: "user", content: "Hello!"}]
  temperature = 0.2,
  max_tokens = 1000,
)

Strucuted Responses

import { Maniac } from "maniac-js"

const maniac = new Maniac()

const response = maniac.chat.completions.create({
  model: "openai/gpt-5",
  messages: [{role: "user", content: "Hello!"}]
  response_format: {
      type: "json_schema",
      json_schema: {
          name: "weather",
          strict: true,
          schema: {
              type: "object",
              properties: {
              location: {
                  type: "string",
                  description: "City or location name",
              },
              },
              required: ["location"],
              additionalProperties: false,
          },
      },
  },
})
from maniac import Maniac

maniac = Maniac()

response = maniac.chat.completions.create(
  model = "openai/gpt-5",
  messages = [{role: "user", content: "Hello!"}]
  response_format = {
      "type": "json_schema",
      "json_schema": {
          "name": "weather",
          "strict": true,
          "schema": {
              "type": "object",
              "properties": {
              "location": {
                  "type": "string",
                  "description": "City or location name",
              },
              },
              "required": ["location"],
              "additionalProperties": false,
          },
      },
  },

)

Tools

import { Maniac } from "maniac-js"

const maniac = new Maniac()

const response = maniac.chat.completions.create({
  model: "openai/gpt-5",
  messages: [{role: "user", content: "Hello!"}]
  tools = [
      {
          type: "function",
          function: {
              name: "get_weather",
              description: "Get the current weather report for a location",
              parameters: {
                  type: "object",
                  properties: {
                      location: {
                      type: "string",
                      description: "The name of the location to get the weather report for"
                      }
                  },
                  required: ["location"]
              }
          }
      }
  ]
})
from maniac import Maniac

maniac = Maniac()

response = maniac.chat.completions.create(
  model = "openai/gpt-5",
  messages = [{role: "user", content: "Hello!"}]
  tools = [
      {
          "type": "function",
          "function": {
              "name": "get_weather",
              "description": "Get the current weather report for a location",
              "parameters": {
              "type": "object",
              "properties": {
                  "location": {
                  "type": "string",
                  "description": "The name of the location to get the weather report for"
                  }
              },
              "required": ["location"]
              }
          }
      }
  ]
)

Streaming

import { Maniac } from "maniac-js"

const maniac = new Maniac()

for await (const chunk of maniac.chat.completions.stream({
  model: "openai/gpt-5",
  messages: [{role: "user", content: "Hello!"}]
})) {
  const piece = chunk.choices?.[0]?.delta?.content ?? "";
  if (piece) process.stdout.write(piece);
}
import asyncio
from maniac import Maniac

maniac = Maniac()

response = await client.chat.completions.stream(
{
  "container": container,
  "messages": [
      {"role": "user", "content": "Tell me a story about france"}
  ],
}
)
async for chunk in response: 
  print(chunk.get("choices")[0].get("delta").get("content"))