Skip to main content

TypeScript Reference

All types are exported from the main package entry point.

import type {
Backend,
ChatMessage,
StreamChunk,
EndpointConfig,
LocalLLMOptions,
LocalLLMResult,
StreamCompletionOptions,
StreamCompletionResult,
LocalModel,
ModelListOptions,
ModelListResult,
OllamaOptions,
} from "use-local-llm";

Backend

type Backend = "ollama" | "lmstudio" | "llamacpp" | "openai-compatible";

Supported local LLM backend types.


ChatMessage

interface ChatMessage {
role: "system" | "user" | "assistant";
content: string;
}

A single message in a chat conversation.


StreamChunk

interface StreamChunk {
content: string;
done: boolean;
model?: string;
}

A parsed chunk from the streaming response.


EndpointConfig

interface EndpointConfig {
url: string;
backend: Backend;
}

Configuration for a local LLM endpoint preset.


LocalLLMOptions

interface LocalLLMOptions {
endpoint: string;
backend?: Backend;
model: string;
system?: string;
temperature?: number;
onToken?: (token: string) => void;
onResponse?: (message: ChatMessage) => void;
onError?: (error: Error) => void;
}

Options for useLocalLLM.


LocalLLMResult

interface LocalLLMResult {
messages: ChatMessage[];
send: (content: string) => void;
isStreaming: boolean;
isLoading: boolean;
abort: () => void;
error: Error | null;
clear: () => void;
}

Return value from useLocalLLM and useOllama.


StreamCompletionOptions

interface StreamCompletionOptions {
endpoint: string;
backend?: Backend;
model: string;
prompt: string;
autoFetch?: boolean;
temperature?: number;
onToken?: (token: string) => void;
onComplete?: (fullText: string) => void;
onError?: (error: Error) => void;
}

Options for useStreamCompletion.


StreamCompletionResult

interface StreamCompletionResult {
text: string;
tokens: string[];
isStreaming: boolean;
start: () => void;
abort: () => void;
error: Error | null;
}

Return value from useStreamCompletion.


LocalModel

interface LocalModel {
name: string;
size?: number;
modifiedAt?: string;
digest?: string;
}

A model descriptor returned from useModelList.


ModelListOptions

interface ModelListOptions {
endpoint?: string;
backend?: Backend;
}

Options for useModelList.


ModelListResult

interface ModelListResult {
models: LocalModel[];
isLoading: boolean;
error: Error | null;
refresh: () => void;
}

Return value from useModelList.


OllamaOptions

interface OllamaOptions {
system?: string;
temperature?: number;
endpoint?: string;
onToken?: (token: string) => void;
onResponse?: (message: ChatMessage) => void;
onError?: (error: Error) => void;
}

Options for useOllama.