Class: LLM::OpenAI::Responses
- Inherits:
-
Object
- Object
- LLM::OpenAI::Responses
- Defined in:
- lib/llm/providers/openai/responses.rb
Overview
The LLM::OpenAI::Responses class provides an interface for OpenAI’s response API.
Instance Method Summary collapse
-
#initialize(provider) ⇒ LLM::OpenAI::Responses
constructor
Returns a new Responses object.
-
#create(prompt, params = {}) ⇒ LLM::Response
Create a response.
-
#get(response, **params) ⇒ LLM::Response
Get a response.
-
#delete(response) ⇒ LLM::Object
Deletes a response.
Constructor Details
#initialize(provider) ⇒ LLM::OpenAI::Responses
Returns a new Responses object
25 26 27 |
# File 'lib/llm/providers/openai/responses.rb', line 25 def initialize(provider) @provider = provider end |
Instance Method Details
#create(prompt, params = {}) ⇒ LLM::Response
Create a response
38 39 40 41 42 43 44 45 46 47 48 49 |
# File 'lib/llm/providers/openai/responses.rb', line 38 def create(prompt, params = {}) params = {role: :user, model: @provider.default_model}.merge!(params) params = [params, format_schema(params), format_tools(params)].inject({}, &:merge!).compact role, stream = params.delete(:role), params.delete(:stream) params[:stream] = true if stream.respond_to?(:<<) || stream == true req = Net::HTTP::Post.new("/v1/responses", headers) = [*(params.delete(:input) || []), LLM::Message.new(role, prompt)] body = JSON.dump({input: [format(, :response)].flatten}.merge!(params)) set_body_stream(req, StringIO.new(body)) res = execute(request: req, stream:, stream_parser:) LLM::Response.new(res).extend(LLM::OpenAI::Response::Responds) end |
#get(response, **params) ⇒ LLM::Response
Get a response
57 58 59 60 61 62 63 |
# File 'lib/llm/providers/openai/responses.rb', line 57 def get(response, **params) response_id = response.respond_to?(:id) ? response.id : response query = URI.encode_www_form(params) req = Net::HTTP::Get.new("/v1/responses/#{response_id}?#{query}", headers) res = execute(request: req) LLM::Response.new(res).extend(LLM::OpenAI::Response::Responds) end |
#delete(response) ⇒ LLM::Object
Deletes a response
71 72 73 74 75 76 |
# File 'lib/llm/providers/openai/responses.rb', line 71 def delete(response) response_id = response.respond_to?(:id) ? response.id : response req = Net::HTTP::Delete.new("/v1/responses/#{response_id}", headers) res = execute(request: req) LLM::Response.new(res) end |