Rust SDK
Install and configure the HINOW Rust SDK to integrate over 100 AI models into Rust applications with async/await and strong type safety.
The HINOW Rust SDK provides full access to the HINOW REST API with async/await support, zero-cost abstractions, and comprehensive type safety.
> Note: For API resource documentation with code examples, see the API Reference. This page covers Rust SDK-specific features and configurations.
Installation
Add to your Cargo.toml:
toml
[dependencies]
hinow = "1.0"
tokio = { version = "1", features = ["full"] }Requirements
- Rust 1.70+
- Tokio runtime
Configuration
Basic Setup
rust
use hinow::Hinow;
#[tokio::main]
async fn main() {
let client = Hinow::new("your-api-key");
}Environment Variable
bash
export HINOW_API_KEY=your-api-keyrust
// API key loaded automatically from environment
let client = Hinow::from_env().unwrap();Advanced Configuration
rust
use hinow::{Hinow, Config};
use std::time::Duration;
let client = Hinow::with_config(Config {
api_key: "your-api-key".to_string(),
base_url: "https://api.hinow.ai".to_string(),
timeout: Duration::from_secs(120),
max_retries: 3,
});Basic Usage
Chat Completions
rust
use hinow::{Hinow, ChatCompletionRequest, Message};
#[tokio::main]
async fn main() -> Result<(), hinow::Error> {
let client = Hinow::from_env()?;
let response = client.chat().completions().create(ChatCompletionRequest {
model: "gpt-4o".to_string(),
messages: vec![
Message::system("You are a helpful assistant."),
Message::user("What is the capital of France?"),
],
temperature: Some(0.7),
max_tokens: Some(1024),
..Default::default()
}).await?;
println!("{}", response.choices[0].message.content);
Ok(())
}Using Different Models
rust
// OpenAI GPT-4o
let response = client.chat().completions().create(ChatCompletionRequest {
model: "gpt-4o".to_string(),
messages: vec![Message::user("Explain machine learning")],
..Default::default()
}).await?;
// Anthropic Claude
let response = client.chat().completions().create(ChatCompletionRequest {
model: "claude-sonnet-4-20250514".to_string(),
messages: vec![Message::user("Explain machine learning")],
..Default::default()
}).await?;
// DeepSeek
let response = client.chat().completions().create(ChatCompletionRequest {
model: "deepseek-ai/deepseek-v3.2".to_string(),
messages: vec![Message::user("Explain machine learning")],
..Default::default()
}).await?;Streaming
rust
use futures::StreamExt;
let mut stream = client.chat().completions().create_stream(ChatCompletionRequest {
model: "gpt-4o".to_string(),
messages: vec![Message::user("Write a story about a robot")],
..Default::default()
}).await?;
while let Some(chunk) = stream.next().await {
let chunk = chunk?;
if let Some(content) = &chunk.choices[0].delta.content {
print!("{}", content);
}
}Function Calling (Tool Use)
rust
use hinow::{Tool, FunctionDefinition};
use serde_json::json;
let response = client.chat().completions().create(ChatCompletionRequest {
model: "gpt-4o".to_string(),
messages: vec![
Message::user("What is the weather in New York?"),
],
tools: Some(vec![
Tool {
r#type: "function".to_string(),
function: FunctionDefinition {
name: "get_weather".to_string(),
description: Some("Get the current weather for a location".to_string()),
parameters: json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City and state, e.g., New York, NY"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}),
},
},
]),
tool_choice: Some("auto".to_string()),
..Default::default()
}).await?;
if let Some(tool_calls) = &response.choices[0].message.tool_calls {
for tool_call in tool_calls {
println!("Function: {}", tool_call.function.name);
println!("Arguments: {}", tool_call.function.arguments);
}
}Image Generation
rust
use hinow::ImageGenerateRequest;
let response = client.images().generate(ImageGenerateRequest {
model: "black-forest-labs/flux-1-schnell".to_string(),
prompt: "A programmer cat wearing glasses, cartoon style".to_string(),
size: Some("1024x1024".to_string()),
quality: Some("hd".to_string()),
..Default::default()
}).await?;
for image in &response.data {
println!("URL: {}", image.url);
}Embeddings
rust
use hinow::EmbeddingRequest;
let response = client.embeddings().create(EmbeddingRequest {
model: "BAAI/bge-base-en-v1.5".to_string(),
input: "Machine learning is fascinating".to_string(),
}).await?;
let embedding = &response.data[0].embedding;
println!("Dimensions: {}", embedding.len());Error Handling
rust
use hinow::{Hinow, Error};
let result = client.chat().completions().create(ChatCompletionRequest {
model: "nonexistent-model".to_string(),
messages: vec![Message::user("Hello")],
..Default::default()
}).await;
match result {
Ok(response) => println!("{}", response.choices[0].message.content),
Err(Error::Authentication) => println!("Invalid API key"),
Err(Error::InsufficientBalance) => println!("Insufficient balance"),
Err(Error::RateLimit { retry_after }) => {
println!("Rate limit reached. Retry after: {:?}", retry_after)
}
Err(Error::BadRequest(msg)) => println!("Invalid request: {}", msg),
Err(Error::Api { status, message }) => {
println!("API Error [{}]: {}", status, message)
}
Err(e) => println!("Error: {}", e),
}

