Function calling

Not Diamond supports function calling and takes into account the tools we want to use when making recommendations. See supported LLM models for models that support function calling.

In the Python code below, we will use the @tools decorator from LangChain (which is installed by default with notdiamond[create]) to bind Python functions to the NotDiamond object so that we can execute the functions.

Our TypeScript code will simply return model recommendations and we can then execute the functions in whichever way we choose.

pip install 'notdiamond[create]'
npm install notdiamond dotenv
from notdiamond import NotDiamond
from langchain_core.tools import tool

# Defining our tools
@tool
def add(a: int, b: int) -> int:
    "Adds a and b."
    return a + b
  
@tool
def multiply(a: int, b: int) -> int:
    "Multiplies a and b."
    return a * b

# Define the LLMs we'd like to route between
llm_providers = ['openai/gpt-3.5-turbo', 'openai/gpt-4-turbo-2024-04-09', 'openai/gpt-4o-2024-05-13', 
                 'anthropic/claude-3-haiku-20240307', 'anthropic/claude-3-opus-20240229']

# Define the Not Diamond routing client and the models we'll bind our tools to
client = NotDiamond(llm_configs=llm_providers)

# Binding the add and multiply tools to the client
client.bind_tools([add, multiply]) 

# Create a list of messages
messages=[
        {"role": "system", "content": """You are a bot that helps with math calculations using
    		 provided functions. For every question that is asked, call the correct function."""},
        {"role": "user", "content": "What is 3288273827373 * 523283927371?"},
]

# Calling the client
result, session_id, provider = client.chat.completions.create(
    messages=messages
)

# Looping through the tool calls in the result
for tool_call in result.tool_calls:
    # Selecting the tool based on the name
    selected_tool = {"add": add, "multiply": multiply}[tool_call["name"].lower()]
    # Invoking the selected tool with the arguments
    tool_output = selected_tool.invoke(tool_call["args"])

print("ND session ID:", session_id)  # A unique ID of the invoke. Useful for personalizing ND through feedback
print("LLM called:", provider.model)  # The LLM routed to
print("Selected tool:", result.tool_calls[0]['name'])  # The selected tool
print("Arguments:", result.tool_calls[0]['args'])  # The corresponding arguments
print("Function output:", tool_output)  # The output of the selected function
import { NotDiamond } from 'notdiamond';
import dotenv from 'dotenv';
dotenv.config();

// Initialize the Not Diamond client
const notDiamond = new NotDiamond({
  apiKey: process.env.NOTDIAMOND_API_KEY,
});

// Define our tools
const tools = [
    {
      'type': 'function',
      'function': {
        'name': 'add',
        'description': 'Adds a and b.',
        'parameters': {
          'type': 'object',
          'properties': {
            'a': {'type': 'integer'},
            'b': {'type': 'integer'}
          },
          'required': ['a', 'b']
        }
      }
    },
    {
      'type': 'function',
      'function': {
        'name': 'multiply',
        'description': 'Multiplies a and b.',
        'parameters': {
          'type': 'object',
          'properties': {
            'a': {'type': 'integer'},
            'b': {'type': 'integer'}
          },
          'required': ['a', 'b']
        }
      }
    }
];

// Define the LLMs we'd like to route between
const llmProviders = [
  { provider: 'openai', model: 'gpt-3.5-turbo' },
  { provider: 'openai', model: 'gpt-4-turbo-2024-04-09' },
  { provider: 'openai', model: 'gpt-4o-2024-05-13' },
  { provider: 'anthropic', model: 'claude-3-haiku-20240307' },
  { provider: 'anthropic', model: 'claude-3-opus-20240229' },
];

// The best LLM is determined by Not Diamond and the LLM request is made client-side
const result = await notDiamond.modelSelect({
  messages: [
    { content: 'You are a world class software programmer.', role: 'system' },
    { content: 'Write a merge sort in Python.', role: 'user' }
  ],
  llmProviders: llmProviders,
  tools: tools
});

if ('detail' in result) {
  console.error('Error:', result.detail);
}
else {
  console.log('Not Diamond session ID:', result.session_id);  // A unique ID of Not Diamond's recommendation
  console.log('LLM called:', result.providers);  // The LLM routed to
}

// Your custom call logic...