5#ifndef FORESTHUB_PROVIDER_REMOTE_FORESTHUB_HPP
6#define FORESTHUB_PROVIDER_REMOTE_FORESTHUB_HPP
48 std::shared_ptr<foresthub::core::HttpClient> http_;
50 std::string base_url_;
51 std::vector<core::ModelID> supported_models_;
53 core::HttpClient::Headers cached_headers_;
Extended LLM interface with health checks and model discovery.
Definition provider.hpp:32
std::shared_ptr< core::ChatResponse > Chat(const core::ChatRequest &req) override
Send a chat completion request to the backend.
bool SupportsModel(const core::ModelID &model) const override
Check if this provider lists the model as supported.
ForestHubProvider(const config::ProviderConfig &cfg, std::shared_ptr< foresthub::core::HttpClient > http_client)
Construct a ForestHub provider from configuration.
std::string Health() const override
Check that the remote provider is reachable and responding.
core::ProviderID ProviderId() const override
Returns "forest-hub" as the provider identifier.
Client and provider configuration types.
Abstract HTTP client interface and response type.
std::string ModelID
Unique identifier for an LLM model.
Definition model.hpp:23
std::string ProviderID
Unique identifier for an LLM provider.
Definition model.hpp:20
Remote provider implementations (ForestHub, OpenAI, Gemini, Anthropic).
LLM provider implementations.
Top-level namespace for the ForestHub SDK.
LLMClient and Provider interfaces for chat operations.
Shared configuration for any remote LLM provider.
Definition config.hpp:24
Chat completion request sent to an LLM provider.
Definition types.hpp:39