5#ifndef FORESTHUB_PROVIDER_REMOTE_OPENAI_HPP
6#define FORESTHUB_PROVIDER_REMOTE_OPENAI_HPP
49 std::shared_ptr<core::HttpClient> http_;
51 std::string base_url_;
52 std::vector<core::ModelID> supported_models_;
53 core::HttpClient::Headers cached_headers_;
Extended LLM interface with health checks and model discovery.
Definition provider.hpp:32
bool SupportsModel(const core::ModelID &model) const override
Check if this provider handles the given model.
core::ProviderID ProviderId() const override
Returns "openai" as the provider identifier.
std::string Health() const override
Check that the OpenAI API is reachable.
OpenAIProvider(const config::ProviderConfig &cfg, std::shared_ptr< core::HttpClient > http_client)
Construct an OpenAI provider from configuration.
std::shared_ptr< core::ChatResponse > Chat(const core::ChatRequest &req) override
Send a chat request to the OpenAI Responses API.
Client and provider configuration types.
Abstract HTTP client interface and response type.
std::string ModelID
Unique identifier for an LLM model.
Definition model.hpp:23
std::string ProviderID
Unique identifier for an LLM provider.
Definition model.hpp:20
Remote provider implementations (ForestHub, OpenAI, Gemini, Anthropic).
LLM provider implementations.
Top-level namespace for the ForestHub SDK.
LLMClient and Provider interfaces for chat operations.
Shared configuration for any remote LLM provider.
Definition config.hpp:24
Chat completion request sent to an LLM provider.
Definition types.hpp:39