Skip to content

Mirascope

Tests Coverage Docs PyPI Version Stars License

LLM abstractions that aren't obstructions.

Mirascope is a powerful, flexible, and user-friendly library that simplifies the process of working with LLMs through a unified interface that works across various supported providers, including OpenAI, Anthropic, Mistral, Gemini, Groq, Cohere, LiteLLM, Azure AI, Vertex AI, and Bedrock.

Whether you're generating text, extracting structured information, or developing complex AI-driven agent systems, Mirascope provides the tools you need to streamline your development process and create powerful, robust applications.

30 Second Quickstart

Install Mirascope, specifying the provider(s) you intend to use, and set your API key:

pip install "mirascope[openai]"

export OPENAI_API_KEY=XXXXX
pip install "mirascope[anthropic]"

export ANTHROPIC_API_KEY=XXXXX
pip install "mirascope[mistral]"

export MISTRAL_API_KEY=XXXXX
pip install "mirascope[gemini]"

export GOOGLE_API_KEY=XXXXX
pip install "mirascope[groq]"

export GROQ_API_KEY=XXXXX
pip install "mirascope[cohere]"

export CO_API_KEY=XXXXX
pip install "mirascope[litellm]"

export OPENAI_API_KEY=XXXXX  # set keys for providers you will use
pip install "mirascope[azure]"

export AZURE_INFERENCE_ENDPOINT=XXXXX
export AZURE_INFERENCE_CREDENTIAL=XXXXX
pip install "mirascope[vertex]"

gcloud init
gcloud auth application-default login
pip install "mirascope[bedrock]"

aws configure
pip install "mirascope[openai]"

set OPENAI_API_KEY=XXXXX
pip install "mirascope[anthropic]"

set ANTHROPIC_API_KEY=XXXXX
pip install "mirascope[mistral]"

set MISTRAL_API_KEY=XXXXX
pip install "mirascope[gemini]"

set GOOGLE_API_KEY=XXXXX
pip install "mirascope[groq]"

set GROQ_API_KEY=XXXXX
pip install "mirascope[cohere]"

set CO_API_KEY=XXXXX
pip install "mirascope[litellm]"

set OPENAI_API_KEY=XXXXX  # set keys for providers you will use
pip install "mirascope[azure]"

set AZURE_INFERENCE_ENDPOINT=XXXXX
set AZURE_INFERENCE_CREDENTIAL=XXXXX
pip install "mirascope[vertex]"

gcloud init
gcloud auth application-default login
pip install "mirascope[bedrock]"

aws configure

Make your first call to an LLM to recommend a book for a given genre:

Mirascope

from mirascope.core import openai


@openai.call("gpt-4o-mini")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import anthropic


@anthropic.call("claude-3-5-sonnet-20240620")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import mistral


@mistral.call("mistral-large-latest")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import gemini


@gemini.call("gemini-1.5-flash")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import groq


@groq.call("llama-3.1-70b-versatile")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import cohere


@cohere.call("command-r-plus")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import litellm


@litellm.call("gpt-4o-mini")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import azure


@azure.call("gpt-4o-mini")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import vertex


@vertex.call("gemini-1.5-flash")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import bedrock


@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, openai


@openai.call("gpt-4o-mini")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, anthropic


@anthropic.call("claude-3-5-sonnet-20240620")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, mistral


@mistral.call("mistral-large-latest")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, gemini


@gemini.call("gemini-1.5-flash")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, groq


@groq.call("llama-3.1-70b-versatile")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, cohere


@cohere.call("command-r-plus")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, litellm


@litellm.call("gpt-4o-mini")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, azure


@azure.call("gpt-4o-mini")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, vertex


@vertex.call("gemini-1.5-flash")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import Messages, bedrock


@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")
def recommend_book(genre: str) -> Messages.Type:
    return Messages.User(f"Recommend a {genre} book")


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import openai, prompt_template


@openai.call("gpt-4o-mini")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import anthropic, prompt_template


@anthropic.call("claude-3-5-sonnet-20240620")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import mistral, prompt_template


@mistral.call("mistral-large-latest")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import gemini, prompt_template


@gemini.call("gemini-1.5-flash")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import groq, prompt_template


@groq.call("llama-3.1-70b-versatile")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import cohere, prompt_template


@cohere.call("command-r-plus")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import litellm, prompt_template


@litellm.call("gpt-4o-mini")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import azure, prompt_template


@azure.call("gpt-4o-mini")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import prompt_template, vertex


@vertex.call("gemini-1.5-flash")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import bedrock, prompt_template


@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str): ...


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, openai


@openai.call("gpt-4o-mini")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, anthropic


@anthropic.call("claude-3-5-sonnet-20240620")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, mistral


@mistral.call("mistral-large-latest")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, gemini


@gemini.call("gemini-1.5-flash")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, groq


@groq.call("llama-3.1-70b-versatile")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, cohere


@cohere.call("command-r-plus")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, litellm


@litellm.call("gpt-4o-mini")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, azure


@azure.call("gpt-4o-mini")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, vertex


@vertex.call("gemini-1.5-flash")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
from mirascope.core import BaseMessageParam, bedrock


@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")
def recommend_book(genre: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]


response = recommend_book("fantasy")
print(response.content)
Official SDK
from openai import OpenAI

client = OpenAI()


def recommend_book(genre: str) -> str:
    completion = client.chat.completions.create(
        model="gpt-4o-mini",
        messages=[{"role": "user", "content": f"Recommend a {genre} book"}],
    )
    return str(completion.choices[0].message.content)


output = recommend_book("fantasy")
print(output)
from anthropic import Anthropic

client = Anthropic()


def recommend_book(genre: str) -> str:
    message = client.messages.create(
        model="claude-3-5-sonnet-20240620",
        messages=[{"role": "user", "content": f"Recommend a {genre} book"}],
        max_tokens=1024,
    )
    block = message.content[0]
    return block.text if block.type == "text" else ""


output = recommend_book("fantasy")
print(output)
from mistralai.client import MistralClient

client = MistralClient()


def recommend_book(genre: str) -> str:
    completion = client.chat(
        model="mistral-large-latest",
        messages=[{"role": "user", "content": f"Recommend a {genre} book"}],
    )
    return completion.choices[0].message.content


output = recommend_book("fantasy")
print(output)
from google.generativeai import GenerativeModel

client = GenerativeModel("gemini-1.5-flash")


def recommend_book(genre: str) -> str:
    generation = client.generate_content(
        contents=[{"role": "user", "parts": f"Recommend a {genre} book"}]  # pyright: ignore [reportArgumentType]
    )
    return generation.candidates[0].content.parts[0].text


output = recommend_book("fantasy")
print(output)
from groq import Groq

client = Groq()


def recommend_book(genre: str) -> str:
    completion = client.chat.completions.create(
        model="llama-3.1-70b-versatile",
        messages=[{"role": "user", "content": f"Recommend a {genre} book"}],
    )
    return str(completion.choices[0].message.content)


output = recommend_book("fantasy")
print(output)
from cohere import Client

client = Client()


def recommend_book(genre: str) -> str:
    response = client.chat(
        model="command-r-plus",
        message=f"Recommend a {genre} book",
    )
    return response.text


output = recommend_book("fantasy")
print(output)
from litellm import completion


def recommend_book(genre: str) -> str:
    response = completion(
        model="gpt-4o-mini",
        messages=[{"role": "user", "content": f"Recommend a {genre} book"}],
    )
    return str(response.choices[0].message.content)  # type: ignore


output = recommend_book("fantasy")
print(output)
from azure.ai.inference import ChatCompletionsClient
from azure.ai.inference.models import ChatRequestMessage
from azure.core.credentials import AzureKeyCredential

client = ChatCompletionsClient(
    endpoint="YOUR_ENDPOINT", credential=AzureKeyCredential("YOUR_KEY")
)


def recommend_book(genre: str) -> str:
    completion = client.complete(
        model="gpt-4o-mini",
        messages=[
            ChatRequestMessage({"role": "user", "content": f"Recommend a {genre} book"})
        ],
    )
    message = completion.choices[0].message
    return message.content if message.content is not None else ""


output = recommend_book("fantasy")
print(output)
from vertexai.generative_models import GenerativeModel

client = GenerativeModel("gemini-1.5-flash")


def recommend_book(genre: str) -> str:
    generation = client.generate_content(
        contents=[{"role": "user", "parts": f"Recommend a {genre} book"}]
    )
    return generation.candidates[0].content.parts[0].text  # type: ignore


output = recommend_book("fantasy")
print(output)
import boto3

bedrock_client = boto3.client(service_name="bedrock-runtime")


def recommend_book(genre: str) -> str:
    messages = [{"role": "user", "content": [{"text": f"Recommend a {genre} book"}]}]
    response = bedrock_client.converse(
        modelId="anthropic.claude-3-haiku-20240307-v1:0",
        messages=messages,
        inferenceConfig={"maxTokens": 1024},
    )
    output_message = response["output"]["message"]
    content = ""
    for content_piece in output_message["content"]:
        if "text" in content_piece:
            content += content_piece["text"]
    return content


output = recommend_book("fantasy")
print(output)

Choose Your Path

Tutorials

  • Quickstart Guide


    Comprehensive overview of core features and building blocks

    Quickstart

  • Structured Outputs


    Explore various techniques for generating structured outputs

    Structured Outputs

  • Dynamic Configuration & Chaining


    Examples ranging from basic usage to more complex chaining techniques

    Dynamic Configuration & Chaining

  • Tools & Agents


    Learn how to define tools for your LLM to build advanced AI agents

    Tools & Agents

Dive Deeper

  • Learn


    In-depth exploration of Mirascope's many features and capabilities

    Learn

  • Tutorials


    Advanced usage patterns and real-world applications

    Tutorials

  • Integrations


    Integrations with third-party tools for enhanced usage

    Integrations

  • API Reference


    Detailed information on classes and functions

    Reference

Next Steps

Why Use Mirascope Join Our Community Star the Repo

We're excited to see what you'll build with Mirascope, and we're here to help! Don't hesitate to reach out :)