Go SDK
Install and configure the HINOW Go SDK to integrate over 100 AI models into Go applications with idiomatic error handling and context support.
The HINOW Go SDK provides full access to the HINOW REST API with strong typing, context cancellation, and streaming support.
> Note: For API resource documentation with code examples, see the API Reference. This page covers Go SDK-specific features and configurations.
Installation
bash
go get github.com/hinow-ai/sdk-goRequirements
- Go 1.21+
Configuration
Basic Setup
go
package main
import (
"github.com/hinow-ai/sdk-go"
)
func main() {
client := hinow.NewClient("your-api-key")
}Environment Variable
bash
export HINOW_API_KEY=your-api-keygo
// API key loaded automatically from environment
client := hinow.NewClient("")Advanced Configuration
go
client := hinow.NewClient(
"your-api-key",
hinow.WithBaseURL("https://api.hinow.ai"),
hinow.WithTimeout(120*time.Second),
hinow.WithMaxRetries(3),
)Basic Usage
Chat Completions
go
package main
import (
"context"
"fmt"
"github.com/hinow-ai/sdk-go"
)
func main() {
client := hinow.NewClient("")
response, err := client.Chat.Completions.Create(context.Background(), hinow.ChatCompletionRequest{
Model: "gpt-4o",
Messages: []hinow.Message{
{Role: "system", Content: "You are a helpful assistant."},
{Role: "user", Content: "What is the capital of France?"},
},
Temperature: 0.7,
MaxTokens: 1024,
})
if err != nil {
panic(err)
}
fmt.Println(response.Choices[0].Message.Content)
}Using Different Models
go
// OpenAI GPT-4o
response, _ := client.Chat.Completions.Create(ctx, hinow.ChatCompletionRequest{
Model: "gpt-4o",
Messages: []hinow.Message{{Role: "user", Content: "Explain machine learning"}},
})
// Anthropic Claude
response, _ := client.Chat.Completions.Create(ctx, hinow.ChatCompletionRequest{
Model: "claude-sonnet-4-20250514",
Messages: []hinow.Message{{Role: "user", Content: "Explain machine learning"}},
})
// DeepSeek
response, _ := client.Chat.Completions.Create(ctx, hinow.ChatCompletionRequest{
Model: "deepseek-ai/deepseek-v3.2",
Messages: []hinow.Message{{Role: "user", Content: "Explain machine learning"}},
})Streaming
go
stream, err := client.Chat.Completions.CreateStream(ctx, hinow.ChatCompletionRequest{
Model: "gpt-4o",
Messages: []hinow.Message{{Role: "user", Content: "Write a story about a robot"}},
})
if err != nil {
panic(err)
}
defer stream.Close()
for {
chunk, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
panic(err)
}
fmt.Print(chunk.Choices[0].Delta.Content)
}Function Calling (Tool Use)
go
response, err := client.Chat.Completions.Create(ctx, hinow.ChatCompletionRequest{
Model: "gpt-4o",
Messages: []hinow.Message{
{Role: "user", Content: "What is the weather in New York?"},
},
Tools: []hinow.Tool{
{
Type: "function",
Function: hinow.FunctionDefinition{
Name: "get_weather",
Description: "Get the current weather for a location",
Parameters: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"location": map[string]interface{}{
"type": "string",
"description": "City and state, e.g., New York, NY",
},
"unit": map[string]interface{}{
"type": "string",
"enum": []string{"celsius", "fahrenheit"},
},
},
"required": []string{"location"},
},
},
},
},
ToolChoice: "auto",
})
if response.Choices[0].Message.ToolCalls != nil {
for _, toolCall := range response.Choices[0].Message.ToolCalls {
fmt.Printf("Function: %s\n", toolCall.Function.Name)
fmt.Printf("Arguments: %s\n", toolCall.Function.Arguments)
}
}Image Generation
go
response, err := client.Images.Generate(ctx, hinow.ImageGenerateRequest{
Model: "black-forest-labs/flux-1-schnell",
Prompt: "A programmer cat wearing glasses, cartoon style",
Size: "1024x1024",
Quality: "hd",
})
for _, image := range response.Data {
fmt.Printf("URL: %s\n", image.URL)
}Embeddings
go
response, err := client.Embeddings.Create(ctx, hinow.EmbeddingRequest{
Model: "BAAI/bge-base-en-v1.5",
Input: "Machine learning is fascinating",
})
embedding := response.Data[0].Embedding
fmt.Printf("Dimensions: %d\n", len(embedding))Error Handling
go
import "github.com/hinow-ai/sdk-go"
response, err := client.Chat.Completions.Create(ctx, hinow.ChatCompletionRequest{
Model: "nonexistent-model",
Messages: []hinow.Message{{Role: "user", Content: "Hello"}},
})
if err != nil {
switch e := err.(type) {
case *hinow.AuthenticationError:
fmt.Println("Invalid API key")
case *hinow.InsufficientBalanceError:
fmt.Println("Insufficient balance")
case *hinow.RateLimitError:
fmt.Printf("Rate limit reached. Retry after: %d\n", e.RetryAfter)
case *hinow.BadRequestError:
fmt.Printf("Invalid request: %s\n", e.Message)
case *hinow.APIError:
fmt.Printf("API Error [%d]: %s\n", e.StatusCode, e.Message)
default:
fmt.Printf("Error: %v\n", err)
}
}Context Cancellation
go
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
response, err := client.Chat.Completions.Create(ctx, hinow.ChatCompletionRequest{
Model: "gpt-4o",
Messages: []hinow.Message{{Role: "user", Content: "Hello!"}},
})

