Module: LLM

Defined in:
lib/llm.rb,
lib/llm/bot.rb,
lib/llm/error.rb,
lib/llm/buffer.rb,
lib/llm/message.rb,
lib/llm/version.rb,
lib/llm/response.rb,
lib/llm/event_handler.rb,
lib/llm/response/file.rb,
lib/llm/response/audio.rb,
lib/llm/response/image.rb,
lib/llm/providers/gemini.rb,
lib/llm/providers/ollama.rb,
lib/llm/providers/openai.rb,
lib/llm/response/respond.rb,
lib/llm/response/filelist.rb,
lib/llm/providers/deepseek.rb,
lib/llm/providers/llamacpp.rb,
lib/llm/providers/voyageai.rb,
lib/llm/response/embedding.rb,
lib/llm/response/modellist.rb,
lib/llm/providers/anthropic.rb,
lib/llm/response/completion.rb,
lib/llm/response/download_file.rb,
lib/llm/response/moderationlist.rb,
lib/llm/response/audio_translation.rb,
lib/llm/response/audio_transcription.rb

Defined Under Namespace

Classes: Anthropic, Bot, DeepSeek, Error, File, Function, Gemini, LlamaCpp, Message, Model, Object, Ollama, OpenAI, Provider, Response, ResponseError, VoyageAI

Constant Summary collapse

UnauthorizedError =

HTTPUnauthorized

Class.new(ResponseError)
RateLimitError =

HTTPTooManyRequests

Class.new(ResponseError)
FormatError =

When an given an input object that is not understood

Class.new(Error)
PromptError =

When given a prompt object that is not understood

Class.new(FormatError)
VERSION =
"0.9.0"

Class Method Summary collapse

Class Method Details

.File(path) ⇒ LLM::File

Parameters:

  • path (String)

    The path to a file

Returns:



74
75
76
77
78
79
80
81
# File 'lib/llm/file.rb', line 74

def LLM.File(path)
  case path
  when LLM::File, LLM::Response::File
    path
  else
    LLM::File.new(path)
  end
end

.voyageaiVoyageAI

Returns a new instance of VoyageAI.

Parameters:

  • key (String, nil)

    The secret key for authentication

  • host (String)

    The host address of the LLM provider

  • port (Integer)

    The port number

  • timeout (Integer)

    The number of seconds to wait for a response

  • ssl (Boolean)

    Whether to use SSL for the connection

Returns:

  • (VoyageAI)

    a new instance of VoyageAI



37
38
39
40
# File 'lib/llm.rb', line 37

def voyageai(**)
  require_relative "llm/providers/voyageai" unless defined?(LLM::VoyageAI)
  LLM::VoyageAI.new(**)
end

.geminiGemini

Returns a new instance of Gemini.

Parameters:

  • key (String, nil)

    The secret key for authentication

  • host (String)

    The host address of the LLM provider

  • port (Integer)

    The port number

  • timeout (Integer)

    The number of seconds to wait for a response

  • ssl (Boolean)

    Whether to use SSL for the connection

Returns:

  • (Gemini)

    a new instance of Gemini



45
46
47
48
# File 'lib/llm.rb', line 45

def gemini(**)
  require_relative "llm/providers/gemini" unless defined?(LLM::Gemini)
  LLM::Gemini.new(**)
end

.ollama(key: nil) ⇒ Ollama

Returns a new instance of Ollama.

Parameters:

  • key (String, nil) (defaults to: nil)

    The secret key for authentication

  • host (String)

    The host address of the LLM provider

  • port (Integer)

    The port number

  • timeout (Integer)

    The number of seconds to wait for a response

  • ssl (Boolean)

    Whether to use SSL for the connection

Returns:

  • (Ollama)

    a new instance of Ollama



53
54
55
56
# File 'lib/llm.rb', line 53

def ollama(key: nil, **)
  require_relative "llm/providers/ollama" unless defined?(LLM::Ollama)
  LLM::Ollama.new(key:, **)
end

.llamacpp(key: nil) ⇒ LLM::LlamaCpp

Parameters:

  • key (String, nil) (defaults to: nil)

    The secret key for authentication

Returns:



61
62
63
64
# File 'lib/llm.rb', line 61

def llamacpp(key: nil, **)
  require_relative "llm/providers/llamacpp" unless defined?(LLM::LlamaCpp)
  LLM::LlamaCpp.new(key:, **)
end

.deepseekLLM::DeepSeek

Parameters:

  • key (String, nil)

    The secret key for authentication

Returns:



69
70
71
72
# File 'lib/llm.rb', line 69

def deepseek(**)
  require_relative "llm/providers/deepseek" unless defined?(LLM::DeepSeek)
  LLM::DeepSeek.new(**)
end

.openaiOpenAI

Returns a new instance of OpenAI.

Parameters:

  • key (String, nil)

    The secret key for authentication

Returns:

  • (OpenAI)

    a new instance of OpenAI



77
78
79
80
# File 'lib/llm.rb', line 77

def openai(**)
  require_relative "llm/providers/openai" unless defined?(LLM::OpenAI)
  LLM::OpenAI.new(**)
end

.function(name, &b) ⇒ LLM::Function

Define a function

Examples:

LLM.function(:system) do |fn|
  fn.description "Run system command"
  fn.params do |schema|
    schema.object(command: schema.string.required)
  end
  fn.define do |params|
    system(params.command)
  end
end

Parameters:

  • name (Symbol)

    The name of the function

  • b (Proc)

    The block to define the function

Returns:



97
98
99
# File 'lib/llm.rb', line 97

def function(name, &b)
  functions[name.to_s] = LLM::Function.new(name, &b)
end

.functionsHash<String,LLM::Function>

Returns all known functions

Returns:



104
105
106
# File 'lib/llm.rb', line 104

def functions
  @functions ||= {}
end

.anthropicAnthropic

Returns a new instance of Anthropic.

Parameters:

  • key (String, nil)

    The secret key for authentication

  • host (String)

    The host address of the LLM provider

  • port (Integer)

    The port number

  • timeout (Integer)

    The number of seconds to wait for a response

  • ssl (Boolean)

    Whether to use SSL for the connection

Returns:

  • (Anthropic)

    a new instance of Anthropic



28
29
30
31
32
# File 'lib/llm.rb', line 28

def anthropic(**)
  require_relative "llm/providers/anthropic" unless defined?(LLM::Anthropic)
  require_relative "llm/providers/voyageai" unless defined?(LLM::VoyageAI)
  LLM::Anthropic.new(**)
end