src/llama_leap

Source   Edit  

ollama API Interface for nim.

https://github.com/monofuel/llama_leap/blob/main/README.md

Types

ChatMessage = ref object
  role*: string
  content*: Option[string]
  images*: Option[seq[string]]
  tool_calls*: seq[ToolCall]
Source   Edit  
ChatReq = ref object
  model*: string
  tools*: seq[Tool]
  messages*: seq[ChatMessage]
  format*: Option[string]
  options*: Option[ModelParameters]
Source   Edit  
ChatResp = ref object
  model*: string
  created_at*: string
  message*: ChatMessage
  done*: bool
  total_duration*: int
  load_duration*: int
  prompt_eval_count*: int
  prompt_eval_duration*: int
  eval_count*: int
  eval_duration*: int
Source   Edit  
CreateModelReq = ref object
  name*: string
  modelfile*: Option[string]
  stream*: bool
  path*: Option[string]
Source   Edit  
EmbeddingReq = ref object
  model*: string
  prompt*: string
  options*: Option[ModelParameters]
Source   Edit  
EmbeddingResp = ref object
  embedding*: seq[float64]
Source   Edit  
GenerateReq = ref object
  model*: string
  prompt*: string
  images*: Option[seq[string]]
  format*: Option[string]
  options*: Option[ModelParameters]
  system*: Option[string]
  context*: Option[seq[int]]
  raw*: Option[bool]
Source   Edit  
GenerateResp = ref object
  model*: string
  created_at*: string
  response*: string
  done*: bool
  context*: seq[int]
  total_duration*: int
  load_duration*: int
  prompt_eval_count*: int
  prompt_eval_duration*: int
  eval_count*: int
  eval_duration*: int
Source   Edit  
ListResp = ref object
  models*: seq[OllamaModel]
Source   Edit  
ModelDetails = ref object
  format*: string
  family*: string
  families*: Option[seq[string]]
  parameter_size*: string
  quantization_level*: string
Source   Edit  
ModelParameters = ref object
  mirostat*: Option[int]
  mirostat_eta*: Option[float32]
  mirostat_tau*: Option[float32]
  num_ctx*: Option[int]
  num_gqa*: Option[int]
  num_gpu*: Option[int]
  num_thread*: Option[int]
  repeat_last_n*: Option[int]
  repeat_penalty*: Option[float32]
  temperature*: Option[float32]
  seed*: Option[int]
  stop*: Option[string]
  tfs_z*: Option[float32]
  num_predict*: Option[int]
  top_k*: Option[int]
  top_p*: Option[float32]
Source   Edit  
OllamaAPI = ref object
Source   Edit  
OllamaModel = ref object
  name*: string
  modified_at*: string
  size*: int
  digest*: string
  details*: ModelDetails
Source   Edit  
ShowModel = ref object
  modelfile*: string
  parameters*: string
  details*: ModelDetails
Source   Edit  
Tool = ref object
  function*: ToolFunction
Source   Edit  
ToolCall = ref object
  function*: ToolCallFunction
Source   Edit  
ToolCallFunction = ref object
  name*: string
  arguments*: JsonNode
Source   Edit  
ToolFunction = ref object
  name*: string
  description*: string
  parameters*: ToolFunctionParameters
Source   Edit  
ToolFunctionParameters = object
  properties*: JsonNode
  required*: seq[string]
Source   Edit  

Procs

proc chat(api: OllamaAPI; model: string; messages: seq[string]): string {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError],
    tags: [RootEffect], forbids: [].}
simple interface for /api/chat. assuming alternating user -> assistant message history Source   Edit  
proc chat(api: OllamaAPI; req: ChatReq): ChatResp {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError],
    tags: [RootEffect], forbids: [].}
typed interface for /api/chat Source   Edit  
proc chat(api: OllamaAPI; req: JsonNode): JsonNode {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError],
    tags: [RootEffect], forbids: [].}
direct json interface for /api/chat. only use if there are specific new features you need or know what you are doing Source   Edit  
proc close(api: OllamaAPI) {....raises: [], tags: [], forbids: [].}
Source   Edit  
proc createModel(api: OllamaAPI; name: string; modelfile: string = "";
                 path: string = "") {....raises: [ZippyError, CatchableError,
    JsonError, ValueError, KeyError], tags: [], forbids: [].}

Create a model from a Modelfile

(Recommended): set modelfile as the contents of your modelfile

(Alternative): set path to a server local path to a modelfile

Source   Edit  
proc generate(api: OllamaAPI; model: string; prompt: string): string {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError], tags: [],
    forbids: [].}
simple interface for /api/generate Source   Edit  
proc generate(api: OllamaAPI; req: GenerateReq): GenerateResp {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError], tags: [],
    forbids: [].}
typed interface for /api/generate Source   Edit  
proc generate(api: OllamaAPI; req: JsonNode): JsonNode {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError],
    tags: [RootEffect], forbids: [].}
direct json interface for /api/generate. only use if there are specific new features you need or know what you are doing Source   Edit  
proc generateEmbeddings(api: OllamaAPI; model: string; prompt: string; options: Option[
    ModelParameters] = none(ModelParameters)): EmbeddingResp {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError, KeyError],
    tags: [], forbids: [].}
Get the embeddings for a prompt Source   Edit  
proc getVersion(api: OllamaAPI): string {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError, KeyError],
    tags: [], forbids: [].}
get the current Ollama version Source   Edit  
proc listModels(api: OllamaAPI): ListResp {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError], tags: [],
    forbids: [].}
List all the models available Source   Edit  
proc loadModel(api: OllamaAPI; model: string): JsonNode {.discardable,
    ...raises: [ZippyError, CatchableError, JsonError, ValueError],
    tags: [RootEffect], forbids: [].}
Calling /api/generate without a prompt will load the model Source   Edit  
proc newOllamaAPI(baseUrl: string = "http://localhost:11434/api";
                  curlPoolSize: int = 4; curlTimeout: float32 = 10000.0): OllamaAPI {.
    ...raises: [Exception], tags: [], forbids: [].}
Initialize a new Ollama API client Source   Edit  
proc pullModel(api: OllamaAPI; name: string) {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError, KeyError],
    tags: [RootEffect], forbids: [].}
Ask the ollama server to pull a model Source   Edit  
proc showModel(api: OllamaAPI; name: string): ShowModel {.
    ...raises: [ZippyError, CatchableError, JsonError, ValueError],
    tags: [RootEffect], forbids: [].}
get details for a specific model Source   Edit