Module: LLM
- Defined in:
- lib/llm.rb,
lib/llm/chat.rb,
lib/llm/error.rb,
lib/llm/buffer.rb,
lib/llm/message.rb,
lib/llm/version.rb,
lib/llm/response.rb,
lib/llm/response/file.rb,
lib/llm/response/audio.rb,
lib/llm/response/image.rb,
lib/llm/providers/gemini.rb,
lib/llm/providers/ollama.rb,
lib/llm/providers/openai.rb,
lib/llm/response/respond.rb,
lib/llm/response/filelist.rb,
lib/llm/providers/llamacpp.rb,
lib/llm/providers/voyageai.rb,
lib/llm/response/embedding.rb,
lib/llm/response/modellist.rb,
lib/llm/providers/anthropic.rb,
lib/llm/response/completion.rb,
lib/llm/response/download_file.rb,
lib/llm/response/audio_translation.rb,
lib/llm/response/audio_transcription.rb
Defined Under Namespace
Classes: Anthropic, Chat, Error, File, Function, Gemini, LlamaCpp, Message, Model, Ollama, OpenAI, Provider, Response, VoyageAI
Constant Summary collapse
- VERSION =
"0.7.2"
Class Method Summary collapse
-
.File(path) ⇒ LLM::File
-
.voyageai ⇒ VoyageAI
A new instance of VoyageAI.
-
.gemini ⇒ Gemini
A new instance of Gemini.
-
.ollama(key: nil) ⇒ Ollama
A new instance of Ollama.
-
.llamacpp(key: nil) ⇒ LLM::LlamaCpp
-
.openai ⇒ OpenAI
A new instance of OpenAI.
-
.function(name, &b) ⇒ LLM::Function
Define a function.
-
.functions ⇒ Hash<String,LLM::Function>
Returns all known functions.
-
.anthropic ⇒ Anthropic
A new instance of Anthropic.
Class Method Details
.File(path) ⇒ LLM::File
74 75 76 77 78 79 80 81 |
# File 'lib/llm/file.rb', line 74 def LLM.File(path) case path when LLM::File, LLM::Response::File path else LLM::File.new(path) end end |
.voyageai ⇒ VoyageAI
Returns a new instance of VoyageAI.
35 36 37 38 |
# File 'lib/llm.rb', line 35 def voyageai(**) require_relative "llm/providers/voyageai" unless defined?(LLM::VoyageAI) LLM::VoyageAI.new(**) end |
.gemini ⇒ Gemini
Returns a new instance of Gemini.
43 44 45 46 |
# File 'lib/llm.rb', line 43 def gemini(**) require_relative "llm/providers/gemini" unless defined?(LLM::Gemini) LLM::Gemini.new(**) end |
.ollama(key: nil) ⇒ Ollama
Returns a new instance of Ollama.
51 52 53 54 |
# File 'lib/llm.rb', line 51 def ollama(key: nil, **) require_relative "llm/providers/ollama" unless defined?(LLM::Ollama) LLM::Ollama.new(key:, **) end |
.llamacpp(key: nil) ⇒ LLM::LlamaCpp
59 60 61 62 63 |
# File 'lib/llm.rb', line 59 def llamacpp(key: nil, **) require_relative "llm/providers/openai" unless defined?(LLM::OpenAI) require_relative "llm/providers/llamacpp" unless defined?(LLM::LlamaCpp) LLM::LlamaCpp.new(key:, **) end |
.openai ⇒ OpenAI
Returns a new instance of OpenAI.
68 69 70 71 |
# File 'lib/llm.rb', line 68 def openai(**) require_relative "llm/providers/openai" unless defined?(LLM::OpenAI) LLM::OpenAI.new(**) end |
.function(name, &b) ⇒ LLM::Function
Define a function
88 89 90 |
# File 'lib/llm.rb', line 88 def function(name, &b) functions[name.to_s] = LLM::Function.new(name, &b) end |
.functions ⇒ Hash<String,LLM::Function>
Returns all known functions
95 96 97 |
# File 'lib/llm.rb', line 95 def functions @functions ||= {} end |