Kotlin SDK
Install and configure the HINOW Kotlin SDK to integrate over 100 AI models into Kotlin and Android applications with coroutines and Flow streaming.
The HINOW Kotlin SDK provides full access to the HINOW REST API with Kotlin coroutines, Flow-based streaming, and Android support.
> Note: For API resource documentation with code examples, see the API Reference. This page covers Kotlin SDK-specific features and configurations.
Installation
First, add the JitPack repository to your build file:
Gradle (Kotlin DSL - settings.gradle.kts)
kotlin
dependencyResolutionManagement {
repositories {
maven { url = uri("https://jitpack.io") }
}
}Then add the dependency:
Gradle (Kotlin DSL - build.gradle.kts)
kotlin
implementation("com.github.hinow-ai:sdk-kotlin:1.0.1")Gradle (Groovy - settings.gradle)
groovy
dependencyResolutionManagement {
repositories {
maven { url 'https://jitpack.io' }
}
}Gradle (Groovy - build.gradle)
groovy
implementation 'com.github.hinow-ai:sdk-kotlin:1.0.1'Requirements
- Kotlin 1.9+
- Coroutines 1.7+
Configuration
Basic Setup
kotlin
import ai.hinow.Hinow
val client = Hinow("your-api-key")Environment Variable
bash
export HINOW_API_KEY=your-api-keykotlin
// API key loaded automatically from environment
val client = Hinow()Advanced Configuration
kotlin
val client = Hinow(
apiKey = "your-api-key",
baseUrl = "https://api.hinow.ai",
timeout = 120.seconds,
maxRetries = 3
)Basic Usage
Chat Completions
kotlin
import ai.hinow.Hinow
import ai.hinow.models.*
suspend fun main() {
val client = Hinow()
val response = client.chat.completions.create(
model = "gpt-4o",
messages = listOf(
Message(role = "system", content = "You are a helpful assistant."),
Message(role = "user", content = "What is the capital of France?")
),
temperature = 0.7,
maxTokens = 1024
)
println(response.choices.first().message.content)
}Using Different Models
kotlin
// OpenAI GPT-4o
val response = client.chat.completions.create(
model = "gpt-4o",
messages = listOf(Message(role = "user", content = "Explain machine learning"))
)
// Anthropic Claude
val response = client.chat.completions.create(
model = "claude-sonnet-4-20250514",
messages = listOf(Message(role = "user", content = "Explain machine learning"))
)
// Meta Llama
val response = client.chat.completions.create(
model = "meta-llama/llama-3.3-70b-instruct",
messages = listOf(Message(role = "user", content = "Explain machine learning"))
)Streaming with Flow
kotlin
import kotlinx.coroutines.flow.collect
client.chat.completions.createStream(
model = "gpt-4o",
messages = listOf(Message(role = "user", content = "Write a story about a robot"))
).collect { chunk ->
chunk.choices.firstOrNull()?.delta?.content?.let { content ->
print(content)
}
}Function Calling (Tool Use)
kotlin
val response = client.chat.completions.create(
model = "gpt-4o",
messages = listOf(
Message(role = "user", content = "What is the weather in New York?")
),
tools = listOf(
Tool(
type = "function",
function = FunctionDefinition(
name = "get_weather",
description = "Get the current weather for a location",
parameters = mapOf(
"type" to "object",
"properties" to mapOf(
"location" to mapOf(
"type" to "string",
"description" to "City and state, e.g., New York, NY"
),
"unit" to mapOf(
"type" to "string",
"enum" to listOf("celsius", "fahrenheit")
)
),
"required" to listOf("location")
)
)
)
),
toolChoice = "auto"
)
response.choices.first().message.toolCalls?.forEach { toolCall ->
println("Function: \${toolCall.function.name}")
println("Arguments: \${toolCall.function.arguments}")
}Image Generation
kotlin
val response = client.images.generate(
model = "black-forest-labs/flux-1-schnell",
prompt = "A programmer cat wearing glasses, cartoon style",
size = "1024x1024",
quality = "hd"
)
response.data.forEach { image ->
println("URL: \${image.url}")
}Embeddings
kotlin
val response = client.embeddings.create(
model = "BAAI/bge-base-en-v1.5",
input = "Machine learning is fascinating"
)
val embedding = response.data.first().embedding
println("Dimensions: \${embedding.size}")Error Handling
kotlin
import ai.hinow.exceptions.*
try {
val response = client.chat.completions.create(
model = "nonexistent-model",
messages = listOf(Message(role = "user", content = "Hello"))
)
} catch (e: AuthenticationException) {
println("Invalid API key")
} catch (e: InsufficientBalanceException) {
println("Insufficient balance")
} catch (e: RateLimitException) {
println("Rate limit reached. Retry after: \${e.retryAfter}")
} catch (e: BadRequestException) {
println("Invalid request: \${e.message}")
} catch (e: HinowException) {
println("API Error [\${e.statusCode}]: \${e.message}")
}Android Integration
kotlin
// In your ViewModel
class ChatViewModel : ViewModel() {
private val client = Hinow()
private val _response = MutableStateFlow<String?>(null)
val response: StateFlow<String?> = _response
fun sendMessage(message: String) {
viewModelScope.launch {
try {
val result = client.chat.completions.create(
model = "gpt-4o",
messages = listOf(Message(role = "user", content = message))
)
_response.value = result.choices.first().message.content
} catch (e: Exception) {
_response.value = "Error: \${e.message}"
}
}
}
}
// In your Composable
@Composable
fun ChatScreen(viewModel: ChatViewModel = viewModel()) {
val response by viewModel.response.collectAsState()
Column {
response?.let { Text(it) }
Button(onClick = { viewModel.sendMessage("Hello!") }) {
Text("Send")
}
}
}

