Module: LLM
- Defined in:
- lib/llm.rb,
lib/llm/bot.rb,
lib/llm/agent.rb,
lib/llm/error.rb,
lib/llm/buffer.rb,
lib/llm/client.rb,
lib/llm/tracer.rb,
lib/llm/message.rb,
lib/llm/version.rb,
lib/llm/contract.rb,
lib/llm/response.rb,
lib/llm/tracer/null.rb,
lib/llm/eventhandler.rb,
lib/llm/json_adapter.rb,
lib/llm/providers/xai.rb,
lib/llm/providers/zai.rb,
lib/llm/tracer/logger.rb,
lib/llm/providers/google.rb,
lib/llm/providers/ollama.rb,
lib/llm/providers/openai.rb,
lib/llm/tracer/langsmith.rb,
lib/llm/tracer/telemetry.rb,
lib/llm/providers/deepseek.rb,
lib/llm/providers/llamacpp.rb,
lib/llm/providers/anthropic.rb
Defined Under Namespace
Modules: Client, Contract Classes: Agent, Anthropic, Buffer, Cost, DeepSeek, Error, File, Function, Google, JSONAdapter, LlamaCpp, Message, Model, Object, Ollama, OpenAI, Prompt, Provider, Registry, Response, Schema, ServerTool, Session, Tool, Tracer, Usage, XAI, ZAI
Constant Summary collapse
- Bot =
-
Backward-compatible alias
-
Session -
HTTPUnauthorized
-
Class.new(Error)
- RateLimitError =
-
HTTPTooManyRequests
-
Class.new(Error)
- ServerError =
-
HTTPServerError
-
Class.new(Error)
- FormatError =
-
When an given an input object that is not understood
-
Class.new(Error)
- PromptError =
-
When given a prompt object that is not understood
-
Class.new(FormatError)
- InvalidRequestError =
-
When given an invalid request
-
Class.new(Error)
- ContextWindowError =
-
When the context window is exceeded
-
Class.new(InvalidRequestError)
- ToolLoopError =
-
When stuck in a tool call loop
-
Class.new(Error)
- NoSuchModelError =
-
When Registry can't map a model
-
Class.new(Error)
- NoSuchRegistryError =
-
When Registry can't map a registry
-
Class.new(Error)
- VERSION =
-
"4.8.0"
Class Method Summary collapse
- .File(obj) ⇒ LLM::File
-
.json ⇒ Class
Returns the JSON adapter used by the library.
-
.json=(adapter) ⇒
void
Sets the JSON adapter used by the library.
-
.anthropic ⇒
Anthropic
A new instance of Anthropic.
-
.google ⇒ Google
A new instance of Google.
-
.ollama(key: nil) ⇒
Ollama
A new instance of Ollama.
- .llamacpp(key: nil) ⇒ LLM::LlamaCpp
- .deepseek ⇒ LLM::DeepSeek
-
.openai ⇒ OpenAI
A new instance of OpenAI.
-
.xai ⇒ XAI
A new instance of XAI.
-
.zai ⇒ ZAI
A new instance of ZAI.
-
.function(key,
&b) ⇒ LLM::Function
Define a function.
-
.lock(name) ⇒ void
Provides a thread-safe lock.
- .registry_for(llm) ⇒ LLM::Object
Class Method Details
.File(obj) ⇒ LLM::File
82 83 84 85 86 87 88 89 90 91 |
# File 'lib/llm/file.rb', line 82 def LLM.File(obj) case obj when File obj.close unless obj.closed? LLM.File(obj.path) when LLM::File, LLM::Response then obj when String then LLM::File.new(obj) else raise TypeError, "don't know how to handle #{obj.class} objects" end end |
.json ⇒ Class
Returns the JSON adapter used by the library
58 59 60 |
# File 'lib/llm.rb', line 58 def json @json ||= JSONAdapter::JSON end |
.json=(adapter) ⇒ void
This should be set once from the main thread when your program starts. Defaults to LLM::JSONAdapter::JSON.
This method returns an undefined value.
Sets the JSON adapter used by the library
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
# File 'lib/llm.rb', line 70 def json=(adapter) @json = case adapter.to_s when "JSON", "json" then JSONAdapter::JSON when "Oj", "oj" then JSONAdapter::Oj when "Yajl", "yajl" then JSONAdapter::Yajl else is_class = Class === adapter is_subclass = is_class && adapter.ancestors.include?(LLM::JSONAdapter) if is_subclass adapter else raise TypeError, "Adapter must be a subclass of LLM::JSONAdapter" end end end |
.anthropic ⇒ Anthropic
Returns a new instance of Anthropic.
89 90 91 92 |
# File 'lib/llm.rb', line 89 def anthropic(**) lock(:require) { require_relative "llm/providers/anthropic" unless defined?(LLM::Anthropic) } LLM::Anthropic.new(**) end |
.google ⇒ Google
Returns a new instance of Google.
97 98 99 100 |
# File 'lib/llm.rb', line 97 def google(**) lock(:require) { require_relative "llm/providers/google" unless defined?(LLM::Google) } LLM::Google.new(**) end |
.ollama(key: nil) ⇒ Ollama
Returns a new instance of Ollama.
105 106 107 108 |
# File 'lib/llm.rb', line 105 def ollama(key: nil, **) lock(:require) { require_relative "llm/providers/ollama" unless defined?(LLM::Ollama) } LLM::Ollama.new(key:, **) end |
.llamacpp(key: nil) ⇒ LLM::LlamaCpp
113 114 115 116 |
# File 'lib/llm.rb', line 113 def llamacpp(key: nil, **) lock(:require) { require_relative "llm/providers/llamacpp" unless defined?(LLM::LlamaCpp) } LLM::LlamaCpp.new(key:, **) end |
.deepseek ⇒ LLM::DeepSeek
121 122 123 124 |
# File 'lib/llm.rb', line 121 def deepseek(**) lock(:require) { require_relative "llm/providers/deepseek" unless defined?(LLM::DeepSeek) } LLM::DeepSeek.new(**) end |
.openai ⇒ OpenAI
Returns a new instance of OpenAI.
129 130 131 132 |
# File 'lib/llm.rb', line 129 def openai(**) lock(:require) { require_relative "llm/providers/openai" unless defined?(LLM::OpenAI) } LLM::OpenAI.new(**) end |
.xai ⇒ XAI
Returns a new instance of XAI.
138 139 140 141 |
# File 'lib/llm.rb', line 138 def xai(**) lock(:require) { require_relative "llm/providers/xai" unless defined?(LLM::XAI) } LLM::XAI.new(**) end |
.zai ⇒ ZAI
Returns a new instance of ZAI.
147 148 149 150 |
# File 'lib/llm.rb', line 147 def zai(**) lock(:require) { require_relative "llm/providers/zai" unless defined?(LLM::ZAI) } LLM::ZAI.new(**) end |
.function(key, &b) ⇒ LLM::Function
Define a function
167 168 169 |
# File 'lib/llm.rb', line 167 def function(key, &b) LLM::Function.new(key, &b) end |
.lock(name) ⇒ void
This method returns an undefined value.
Provides a thread-safe lock
176 |
# File 'lib/llm.rb', line 176 def lock(name, &) = @monitors[name].synchronize(&) |
.registry_for(llm) ⇒ LLM::Object
45 46 47 48 49 50 |
# File 'lib/llm.rb', line 45 def self.registry_for(llm) lock(:registry) do name = Symbol === llm ? llm : llm.name @registry[name] ||= Registry.for(name) end end |