#mistral #client #api-client #llm #api-key #api-bindings #mistral-ai

mistralai-client

Mistral AI API 客户端库 for Rust (非官方)

12 个重大版本更新

新增 0.13.0 2024年8月21日
0.12.0 2024年7月24日
0.11.0 2024年6月22日
0.8.0 2024年3月9日

752网页编程 中排名

Download history 5/week @ 2024-05-20 136/week @ 2024-06-03 14/week @ 2024-06-10 158/week @ 2024-06-17 28/week @ 2024-06-24 21/week @ 2024-07-01 119/week @ 2024-07-22 103/week @ 2024-07-29 19/week @ 2024-08-12

每月下载量241

Apache-2.0

57KB
928

Mistral AI Rust 客户端

Crates.io Package Docs.rs Documentation Test Workflow Status Code Coverage

Rust 对 Mistral AI API 的客户端。

[!重要]
在 v0 版本中,次要版本可能会引入破坏性变更。
请参阅 CHANGELOG.md 了解更多信息。



支持的 API

  • 无流聊天
  • 无流聊天(异步)
  • 带流聊天
  • 嵌入
  • 嵌入(异步)
  • 列出模型
  • 列出模型(异步)
  • 函数调用
  • 函数调用(异步)

安装

您可以使用以下方法在项目中安装库:

cargo add mistralai-client

Mistral API 密钥

您可以在https://docs.mistral.ai/#api-access 获取 Mistral API 密钥。

作为环境变量

只需设置 MISTRAL_API_KEY 环境变量。

use mistralai_client::v1::client::Client;

fn main() {
    let client = Client::new(None, None, None, None);
}
MISTRAL_API_KEY=your_api_key cargo run

作为客户端参数

use mistralai_client::v1::client::Client;

fn main() {
    let api_key = "your_api_key";

    let client = Client::new(Some(api_key), None, None, None).unwrap();
}

使用方法

聊天

use mistralai_client::v1::{
    chat::{ChatMessage, ChatMessageRole, ChatParams},
    client::Client,
    constants::Model,
};

fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let model = Model::OpenMistral7b;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::User,
        content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
        tool_calls: None,
    }];
    let options = ChatParams {
        temperature: 0.0,
        random_seed: Some(42),
        ..Default::default()
    };

    let result = client.chat(model, messages, Some(options)).unwrap();
    println!("Assistant: {}", result.choices[0].message.content);
    // => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
}

聊天(异步)

use mistralai_client::v1::{
    chat::{ChatMessage, ChatMessageRole, ChatParams},
    client::Client,
    constants::Model,
};

#[tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let model = Model::OpenMistral7b;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::User,
        content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
        tool_calls: None,
    }];
    let options = ChatParams {
        temperature: 0.0,
        random_seed: Some(42),
        ..Default::default()
    };

    let result = client
        .chat_async(model, messages, Some(options))
        .await
        .unwrap();
    println!(
        "{:?}: {}",
        result.choices[0].message.role, result.choices[0].message.content
    );
    // => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
}

聊天(异步,带流式传输)

use futures::stream::StreamExt;
use mistralai_client::v1::{
    chat::{ChatMessage, ChatMessageRole, ChatParams},
    client::Client,
    constants::Model,
};
use std::io::{self, Write};

#[tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let model = Model::OpenMistral7b;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::User,
        content: "Tell me a short happy story.".to_string(),
        tool_calls: None,
    }];
    let options = ChatParams {
        temperature: 0.0,
        random_seed: Some(42),
        ..Default::default()
    };

    let stream_result = client
        .chat_stream(model, messages, Some(options))
        .await
        .unwrap();
    stream_result
        .for_each(|chunk_result| async {
            match chunk_result {
                Ok(chunks) => chunks.iter().for_each(|chunk| {
                    print!("{}", chunk.choices[0].delta.content);
                    io::stdout().flush().unwrap();
                    // => "Once upon a time, [...]"
                }),
                Err(error) => {
                    eprintln!("Error processing chunk: {:?}", error)
                }
            }
        })
        .await;
    print!("\n") // To persist the last chunk output.
}

聊天(通过函数调用)

use mistralai_client::v1::{
    chat::{ChatMessage, ChatMessageRole, ChatParams},
    client::Client,
    constants::Model,
    tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
use serde::Deserialize;
use std::any::Any;

#[derive(Debug, Deserialize)]
struct GetCityTemperatureArguments {
    city: String,
}

struct GetCityTemperatureFunction;
#[async_trait::async_trait]
impl Function for GetCityTemperatureFunction {
    async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
        // Deserialize arguments, perform the logic, and return the result
        let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();

        let temperature = match city.as_str() {
            "Paris" => "20°C",
            _ => "Unknown city",
        };

        Box::new(temperature.to_string())
    }
}

fn main() {
    let tools = vec![Tool::new(
        "get_city_temperature".to_string(),
        "Get the current temperature in a city.".to_string(),
        vec![ToolFunctionParameter::new(
            "city".to_string(),
            "The name of the city.".to_string(),
            ToolFunctionParameterType::String,
        )],
    )];

    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let mut client = Client::new(None, None, None, None).unwrap();
    client.register_function(
        "get_city_temperature".to_string(),
        Box::new(GetCityTemperatureFunction),
    );

    let model = Model::MistralSmallLatest;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::User,
        content: "What's the temperature in Paris?".to_string(),
        tool_calls: None,
    }];
    let options = ChatParams {
        temperature: 0.0,
        random_seed: Some(42),
        tool_choice: Some(ToolChoice::Auto),
        tools: Some(tools),
        ..Default::default()
    };

    client.chat(model, messages, Some(options)).unwrap();
    let temperature = client
        .get_last_function_call_result()
        .unwrap()
        .downcast::<String>()
        .unwrap();
    println!("The temperature in Paris is: {}.", temperature);
    // => "The temperature in Paris is: 20°C."
}

聊天(通过函数调用,异步)

use mistralai_client::v1::{
    chat::{ChatMessage, ChatMessageRole, ChatParams},
    client::Client,
    constants::Model,
    tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
use serde::Deserialize;
use std::any::Any;

#[derive(Debug, Deserialize)]
struct GetCityTemperatureArguments {
    city: String,
}

struct GetCityTemperatureFunction;
#[async_trait::async_trait]
impl Function for GetCityTemperatureFunction {
    async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
        // Deserialize arguments, perform the logic, and return the result
        let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();

        let temperature = match city.as_str() {
            "Paris" => "20°C",
            _ => "Unknown city",
        };

        Box::new(temperature.to_string())
    }
}

#[tokio::main]
async fn main() {
    let tools = vec![Tool::new(
        "get_city_temperature".to_string(),
        "Get the current temperature in a city.".to_string(),
        vec![ToolFunctionParameter::new(
            "city".to_string(),
            "The name of the city.".to_string(),
            ToolFunctionParameterType::String,
        )],
    )];

    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let mut client = Client::new(None, None, None, None).unwrap();
    client.register_function(
        "get_city_temperature".to_string(),
        Box::new(GetCityTemperatureFunction),
    );

    let model = Model::MistralSmallLatest;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::User,
        content: "What's the temperature in Paris?".to_string(),
        tool_calls: None,
    }];
    let options = ChatParams {
        temperature: 0.0,
        random_seed: Some(42),
        tool_choice: Some(ToolChoice::Auto),
        tools: Some(tools),
        ..Default::default()
    };

    client
        .chat_async(model, messages, Some(options))
        .await
        .unwrap();
    let temperature = client
        .get_last_function_call_result()
        .unwrap()
        .downcast::<String>()
        .unwrap();
    println!("The temperature in Paris is: {}.", temperature);
    // => "The temperature in Paris is: 20°C."
}

嵌入

use mistralai_client::v1::{client::Client, constants::EmbedModel};

fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client: Client = Client::new(None, None, None, None).unwrap();

    let model = EmbedModel::MistralEmbed;
    let input = vec!["Embed this sentence.", "As well as this one."]
        .iter()
        .map(|s| s.to_string())
        .collect();
    let options = None;

    let response = client.embeddings(model, input, options).unwrap();
    println!("First Embedding: {:?}", response.data[0]);
    // => "First Embedding: {...}"
}

嵌入(异步)

use mistralai_client::v1::{client::Client, constants::EmbedModel};

#[tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client: Client = Client::new(None, None, None, None).unwrap();

    let model = EmbedModel::MistralEmbed;
    let input = vec!["Embed this sentence.", "As well as this one."]
        .iter()
        .map(|s| s.to_string())
        .collect();
    let options = None;

    let response = client
        .embeddings_async(model, input, options)
        .await
        .unwrap();
    println!("First Embedding: {:?}", response.data[0]);
    // => "First Embedding: {...}"
}

列出模型

use mistralai_client::v1::client::Client;

fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let result = client.list_models().unwrap();
    println!("First Model ID: {:?}", result.data[0].id);
    // => "First Model ID: open-mistral-7b"
}

列出模型(异步)

use mistralai_client::v1::client::Client;

#[tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let result = client.list_models_async().await.unwrap();
    println!("First Model ID: {:?}", result.data[0].id);
    // => "First Model ID: open-mistral-7b"
}

贡献

有关如何为此库做出贡献的详细信息,请参阅 CONTRIBUTING.md

依赖关系

~6–18MB
~256K SLoC