chromium/chrome/browser/ai/ai_text_session.cc

// Copyright 2024 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "chrome/browser/ai/ai_text_session.h"

#include <memory>
#include <optional>

#include "base/check_op.h"
#include "base/functional/bind.h"
#include "base/functional/callback_forward.h"
#include "base/strings/stringprintf.h"
#include "chrome/browser/ai/ai_manager_keyed_service.h"
#include "chrome/browser/ai/ai_manager_keyed_service_factory.h"
#include "chrome/browser/ai/ai_utils.h"
#include "components/optimization_guide/core/model_execution/optimization_guide_model_execution_error.h"
#include "components/optimization_guide/core/optimization_guide_enums.h"
#include "components/optimization_guide/core/optimization_guide_features.h"
#include "components/optimization_guide/core/optimization_guide_model_executor.h"
#include "components/optimization_guide/core/optimization_guide_util.h"
#include "components/optimization_guide/proto/common_types.pb.h"
#include "components/optimization_guide/proto/string_value.pb.h"
#include "third_party/blink/public/mojom/ai/ai_text_session_info.mojom.h"
#include "third_party/blink/public/mojom/ai/model_streaming_responder.mojom.h"

namespace {

// The format for the prompt and the context. The prompt structure helps the
// model distinguish the roles in the previous conversation.
const char kPromptFormat[] =;
const char kContextFormat[] =;
const char kSystemPromptFormat[] =;

}  // namespace

ModelExecutionError;

AITextSession::Context::Context(uint32_t max_tokens,
                                std::optional<ContextItem> system_prompt)
    :{}

AITextSession::Context::Context(const Context& context) = default;

AITextSession::Context::~Context() = default;

void AITextSession::Context::AddContextItem(ContextItem context_item) {}

std::string AITextSession::Context::GetContextString() {}

bool AITextSession::Context::HasContextItem() {}

AITextSession::AITextSession(
    std::unique_ptr<optimization_guide::OptimizationGuideModelExecutor::Session>
        session,
    base::WeakPtr<content::BrowserContext> browser_context,
    mojo::PendingReceiver<blink::mojom::AITextSession> receiver,
    AIContextBoundObjectSet* context_bound_object_set,
    const std::optional<const Context>& context)
    :{}

AITextSession::~AITextSession() = default;

void AITextSession::SetSystemPrompt(std::string system_prompt,
                                    CreateTextSessionCallback callback) {}

void AITextSession::SetDeletionCallback(base::OnceClosure deletion_callback) {}

void AITextSession::InitializeContextWithSystemPrompt(
    const std::string& text,
    CreateTextSessionCallback callback,
    uint32_t size) {}

void AITextSession::OnGetSizeInTokensComplete(
    const std::string& text,
    blink::mojom::ModelStreamingResponder* responder,
    uint32_t size) {}

void AITextSession::ModelExecutionCallback(
    const std::string& input,
    mojo::RemoteSetElementId responder_id,
    optimization_guide::OptimizationGuideModelStreamingExecutionResult result) {}

void AITextSession::Prompt(
    const std::string& input,
    mojo::PendingRemote<blink::mojom::ModelStreamingResponder>
        pending_responder) {}

void AITextSession::Fork(
    mojo::PendingReceiver<blink::mojom::AITextSession> session,
    ForkCallback callback) {}

void AITextSession::Destroy() {}

blink::mojom::AITextSessionInfoPtr AITextSession::GetTextSessionInfo() {}