PYPI Client Usage Guide

Install Latest monsterapiclient

!pip install --upgrade monsterapi

Please use the code below to send requests using our pypi client

import json
from typing import List

from monsterapi.nextGenLLMClient import LLMClient, GenerateRequest

# Initialize the LLMClient with your API key and base URL
client = LLMClient(api_key="Enter you Monster API key here!")

# List of models to iterate over
models = ["TinyLlama/TinyLlama-1.1B-Chat-v1.0", "microsoft/Phi-3-mini-4k-instruct", "mistralai/Mistral-7B-Instruct-v0.2", "meta-llama/Meta-Llama-3-8B-Instruct", "meta-llama/Meta-Llama-3.1-8B-Instruct"]

# Function to create a GenerateRequest object for a given model
def create_request(model: str) -> GenerateRequest:
	# Default handling for other models with messages
  return GenerateRequest(
            model=model,
            messages=[
                {"role": "user", "content": "What is your favourite condiment?"},
                {"role": "assistant", "content": "Well, I'm quite partial to a good squeeze of fresh lemon juice. It adds just the right amount of zesty flavour to whatever I'm cooking up in the kitchen!"},
                {"role": "user", "content": "Do you have mayonnaise recipes?"}
            ],
            max_tokens=128,
            n=1,
            temperature=1,
   )

# Iterate over each model and generate a request
for model in models:
    request = create_request(model)
    response = client.generate(request)

    # Print the response for the current model
    print(f"Response for model {model}:")
    print(json.dumps(response, indent=2))
    print("--------------------------------")