Skip to main content

Python SDK Quickstart

Get started with the Claro Python SDK in just a few lines of code.

Prerequisites

  • Python 3.8 or higher
  • Claro SDK installed (pip install baytos-claro)
  • API key from Claro settings

Installation

Install the SDK using pip:
pip install baytos-claro

Basic Usage

import os
from baytos.claro import BaytClient

# Initialize the client
client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))

# Get a prompt
prompt = client.get_prompt("@workspace/my-prompt:v1")

# Access prompt content
print(prompt.title)
print(prompt.generator)  # The main prompt content

Understanding Package Names

Claro uses semantic versioning for prompts:
@workspace/prompt-name:version
  • @workspace - Your workspace identifier
  • prompt-name - The prompt’s name (lowercase, hyphenated)
  • version - The version (e.g., v1, v2, latest)
Find a prompt’s package name in the Claro dashboard under the prompt’s settings.

Working with Prompt Versions

# Get a specific version
prompt_v1 = client.get_prompt("@workspace/customer-support:v1")

# Get the latest version
prompt_latest = client.get_prompt("@workspace/customer-support:latest")

# Compare versions
if prompt_v1.version != prompt_latest.version:
    print(f"New version available: {prompt_latest.version}")

Using Prompts with LLMs

OpenAI Example

import os
from baytos.claro import BaytClient
from openai import OpenAI

# Initialize clients
claro_client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))

# Get prompt from Claro
prompt = claro_client.get_prompt("@workspace/assistant:v1")

# Use with OpenAI
response = openai_client.chat.completions.create(
    model="gpt-4",
    messages=[
        {"role": "system", "content": prompt.generator},
        {"role": "user", "content": "Hello!"}
    ]
)

print(response.choices[0].message.content)

Anthropic Example

import os
from baytos.claro import BaytClient
import anthropic

# Initialize clients
claro_client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
anthropic_client = anthropic.Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY"))

# Get prompt from Claro
prompt = claro_client.get_prompt("@workspace/assistant:v1")

# Use with Claude
response = anthropic_client.messages.create(
    model="claude-3-5-sonnet-20241022",
    max_tokens=1024,
    system=prompt.generator,
    messages=[
        {"role": "user", "content": "Hello!"}
    ]
)

print(response.content[0].text)

Listing Prompts

# List all prompts in your workspace
result = client.list_prompts(limit=20)

for prompt in result['prompts']:
    print(f"{prompt.title} - {prompt.package_name}")

# Pagination
if result.get('next_cursor'):
    next_page = client.list_prompts(limit=20, cursor=result['next_cursor'])

Error Handling

from baytos.claro import BaytClient, BaytNotFoundError, BaytAuthError

client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))

try:
    prompt = client.get_prompt("@workspace/my-prompt:v1")
    print(prompt.generator)
except BaytNotFoundError:
    print("Prompt not found - check the package name")
except BaytAuthError:
    print("Authentication failed - check your API key")
except Exception as e:
    print(f"Unexpected error: {e}")

Complete Example

Here’s a complete working example:
#!/usr/bin/env python3
"""
Complete Claro SDK example showing common patterns
"""

import os
from baytos.claro import BaytClient, BaytNotFoundError
from openai import OpenAI

def main():
    # Initialize clients
    claro_client = BaytClient(api_key=os.getenv("BAYT_API_KEY"))
    openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))

    try:
        # Get the latest version of the prompt
        prompt = claro_client.get_prompt("@workspace/customer-support:latest")

        print(f"Using prompt: {prompt.title} (v{prompt.version})")

        # Use it with OpenAI
        response = openai_client.chat.completions.create(
            model="gpt-4",
            messages=[
                {"role": "system", "content": prompt.generator},
                {"role": "user", "content": "How do I reset my password?"}
            ],
            temperature=0.7,
            max_tokens=500
        )

        print("\nResponse:")
        print(response.choices[0].message.content)

    except BaytNotFoundError:
        print("Error: Prompt not found. Check the package name.")
    except Exception as e:
        print(f"Error: {e}")

if __name__ == "__main__":
    main()

Environment Variables

Create a .env file for local development:
.env
BAYT_API_KEY=your_claro_api_key
OPENAI_API_KEY=your_openai_api_key
ANTHROPIC_API_KEY=your_anthropic_api_key
Load it with python-dotenv:
from dotenv import load_dotenv
load_dotenv()
Always add .env to your .gitignore to avoid committing secrets

Next Steps