Skip to main content
This guide provides simple, practical examples for common Claro SDK tasks. Perfect for getting started quickly.

Prerequisites

pip install baytos-claro
export BAYT_API_KEY="your_api_key_here"

Example 1: Fetch and Display a Prompt

The simplest way to fetch a prompt and display its content:
import os
from baytos.claro import BaytClient

# Initialize the client
client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))

# Get a prompt
prompt = client.get_prompt("@workspace/my-prompt:v1")

# Display prompt information
print(f"Title: {prompt.title}")
print(f"Description: {prompt.description}")
print(f"\nPrompt content:\n{prompt.generator}")
Title: Customer Support Assistant
Description: Helpful assistant for customer support queries

Prompt content:
You are a helpful customer support assistant. Your role is to...

Example 2: Use Prompt with OpenAI

Integrate a Claro prompt with OpenAI’s API:
import os
from baytos.claro import BaytClient
from openai import OpenAI

# Initialize clients
claro_client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))

# Get your prompt from Claro
prompt = claro_client.get_prompt("@workspace/customer-support:v1")

# Use it with OpenAI
response = openai_client.chat.completions.create(
    model="gpt-4",
    messages=[
        {"role": "system", "content": prompt.generator},
        {"role": "user", "content": "How do I reset my password?"}
    ]
)

print(response.choices[0].message.content)
When your team updates the prompt in Claro, your next API call automatically gets the latest version. No code changes needed.

Example 3: Use Prompt with Anthropic

Use a Claro prompt with Claude:
import os
from baytos.claro import BaytClient
from anthropic import Anthropic

# Initialize clients
claro_client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
anthropic_client = Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY"))

# Get your prompt from Claro
prompt = claro_client.get_prompt("@workspace/code-reviewer:v1")

# Use it with Claude
response = anthropic_client.messages.create(
    model="claude-3-5-sonnet-20241022",
    max_tokens=1024,
    system=prompt.generator,  # Use as system prompt
    messages=[
        {
            "role": "user",
            "content": "Review this code: def add(a, b): return a + b"
        }
    ]
)

print(response.content[0].text)

Example 4: Access Prompt Properties

Explore different properties of a prompt:
import os
from baytos.claro import BaytClient

client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
prompt = client.get_prompt("@workspace/my-prompt:v1")

# Basic information
print(f"Title: {prompt.title}")
print(f"Package: {prompt.package_name}")
print(f"Version: {prompt.version}")
print(f"Category: {prompt.category}")

# Content sections
print(f"\nHas system prompt: {prompt.has_system_prompt()}")
print(f"Has critique prompt: {prompt.has_critique_prompt()}")

# System prompt (if available)
if prompt.system:
    print(f"\nSystem prompt:\n{prompt.system}")

# Generator prompt (main content)
print(f"\nGenerator prompt:\n{prompt.generator}")

# Critique prompt (if available)
if prompt.critique:
    print(f"\nCritique prompt:\n{prompt.critique}")

# Context information
print(f"\nHas context: {prompt.has_context()}")
if prompt.has_context():
    files = prompt.get_file_contexts()
    urls = prompt.get_url_contexts()
    print(f"Files: {len(files)}")
    print(f"URLs: {len(urls)}")

Example 5: Dictionary-Style Access

Access prompt fields using dictionary syntax:
import os
from baytos.claro import BaytClient

client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
prompt = client.get_prompt("@workspace/my-prompt:v1")

# Access using dictionary syntax
print(f"Title: {prompt['title']}")
print(f"Category: {prompt.get('category', 'Unknown')}")

# Check if a field exists
if 'description' in prompt:
    print(f"Description: {prompt['description']}")

# List all available fields
print(f"\nAvailable fields: {', '.join(prompt.keys())}")

# Convert to dictionary
prompt_dict = prompt.to_dict()
print(f"\nAs JSON: {prompt_dict}")

Example 6: Working with Versions

Fetch different versions of the same prompt:
import os
from baytos.claro import BaytClient

client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))

# Get draft version (v0)
draft = client.get_prompt("@workspace/my-prompt:v0")
print(f"Draft version: {draft.version}")
print(f"Draft content preview: {draft.generator[:100]}...")

# Get published version 1
v1 = client.get_prompt("@workspace/my-prompt:v1")
print(f"\nPublished v1: {v1.version}")
print(f"v1 content preview: {v1.generator[:100]}...")

# Get latest published version
latest = client.get_prompt("@workspace/my-prompt:latest")
print(f"\nLatest version: {latest.version}")
  • :v0 is always the draft version
  • :v1, :v2, etc. are published versions
  • :latest points to the most recent published version

Example 7: Browse Your Prompts

List all prompts in your workspace:
import os
from baytos.claro import BaytClient

client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))

# Get first page of prompts
result = client.list_prompts(limit=10)

print("Your Prompts:")
print("=" * 60)

for i, prompt in enumerate(result['prompts'], 1):
    print(f"{i}. {prompt.title}")
    print(f"   Package: {prompt.package_name}")
    if prompt.description:
        print(f"   Description: {prompt.description}")
    print()

# Check if there are more pages
if result['hasMore']:
    print(f"More prompts available. Total shown: {len(result['prompts'])}")

Example 8: Simple Error Handling

Handle common errors gracefully:
import os
from baytos.claro import BaytClient, BaytNotFoundError, BaytAuthError

client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))

try:
    prompt = client.get_prompt("@workspace/my-prompt:v1")
    print(f"Successfully loaded: {prompt.title}")

except BaytNotFoundError:
    print("Prompt not found. Check the package name and version.")

except BaytAuthError:
    print("Authentication failed. Check your API key.")

except Exception as e:
    print(f"An unexpected error occurred: {e}")

Example 9: Use with Multiple Providers

Create a helper function to use prompts with any LLM provider:
import os
from baytos.claro import BaytClient
from openai import OpenAI
from anthropic import Anthropic

def get_llm_response(prompt_package, user_message, provider="openai"):
    # Get prompt from Claro
    claro_client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
    prompt = claro_client.get_prompt(prompt_package)

    if provider == "openai":
        client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
        response = client.chat.completions.create(
            model="gpt-4",
            messages=[
                {"role": "system", "content": prompt.generator},
                {"role": "user", "content": user_message}
            ]
        )
        return response.choices[0].message.content

    elif provider == "anthropic":
        client = Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY"))
        response = client.messages.create(
            model="claude-3-5-sonnet-20241022",
            max_tokens=1024,
            system=prompt.generator,
            messages=[{"role": "user", "content": user_message}]
        )
        return response.content[0].text

    else:
        raise ValueError(f"Unsupported provider: {provider}")

# Use with different providers
response = get_llm_response(
    "@workspace/customer-support:v1",
    "How do I reset my password?",
    provider="openai"
)
print(response)

Example 10: Check Prompt Properties Before Use

Validate a prompt has the content you need:
import os
from baytos.claro import BaytClient

client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
prompt = client.get_prompt("@workspace/my-prompt:v1")

# Validate prompt has necessary content
if not prompt.generator:
    print("Warning: Prompt has no generator content")
    exit(1)

# Check for optional content
if prompt.has_system_prompt():
    print("This prompt includes a system prompt")

if prompt.has_critique_prompt():
    print("This prompt includes a critique prompt")

# Check for context
if prompt.has_context():
    print(f"This prompt has {len(prompt.context)} context items")

# Now safe to use
print(f"Using prompt: {prompt.title}")
print(prompt.generator)

Next Steps

Need Help?

If you get stuck: