// Copyright 2024 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
syntax = "proto3";
package optimization_guide.proto;
import "components/optimization_guide/proto/features/common_quality_data.proto";
option optimize_for = LITE_RUNTIME;
option java_package = "org.chromium.components.optimization_guide.features.proto";
option java_outer_classname = "ModelPrototypingProto";
// DO NOT EDIT THIS FILE DIRECTLY!
//
// This file is generated in g3 and then synced to Chrome. Instead, please
// refer to http://go/chrome-intelligence-feature-protos (Google-internal link),
// and then changes will be synced with Chrome automatically.
message ModelPrototypingLoggingData {
ModelPrototypingRequest request = 1;
ModelPrototypingResponse response = 2;
}
// Next ID: 4
message ModelPrototypingRequest {
ModelingInputs modeling_inputs = 1;
// The series of prompts to send to the model(s). The calls are run in series
// and the responses can be used in future calls allowing piping the output of
// one query into the input of the next.
repeated PrototypingPrompt prototyping_prompts = 2;
// The responses from previous calls to the model. Can be used in future
// prompts. Syntax for accessing them is golang text/templates
// e.g., something like {{index .GetModelResponses 0}}.
repeated string model_responses = 3;
// Next ID: 6
// Defines a single prompt to be sent to the model.
message PrototypingPrompt {
// Prompt variables that can be used in the rest of the prompt. These are in
// addition to any prompt variables defined in the prompt template in the
// config for the model sequence. Prompt variables are helper functions that
// can be used in the prompt. For example, a prompt variable could be
// something like:
// {{ $funVar := "1" }}
// This would define a function that can be used in the prompt as
// {{$funVar}}. The value of the function is "1".
string prompt_variables = 1;
// The prompt is composed by inserting the following roles into the prompt
// template in the order they are defined.
// Role system is generally the instructions for the model to follow.
string system_instructions_template = 2;
// Role context is the information around the user interaction such as page
// state.
string context_area_template = 3;
// Role user is the information from the user such as a user input they
// typed.
string user_input_template = 4;
// Information about the model to use.
ModelInformation model_information = 5;
message ModelInformation {
ModelEnum model_enum = 1;
enum ModelEnum {
MODEL_UNSPECIFIED = 0;
// Returns the filled templates without running an LLM.
MODEL_RETURN_FILLED_TEMPLATES = 1;
// The compose s-dense model.
MODEL_COMPOSE = 2;
}
}
}
// All the information collected from the browser along with the user input
// (for features like Compose).
message BrowserCollectedInformation {
// The page context of the page the model is acting on.
PageContext page_context = 1;
// The inner text of the page the model is acting on (excluding x-origin
// frames)
string inner_text = 2;
// The offset of the focused element into the |inner_text|.
uint64 inner_text_offset = 3;
// Custom text that a prototyper can inject into prompts. If the browser
// collected information is not sufficient, an early stage prototype can
// build a string in Chrome/colab to be used in the prompt. This allows
// separation of prompt definition and call specific data.
repeated string custom_data = 4;
}
// Next ID: 3
// Data specific to the feature.
message ModelingInputs {
BrowserCollectedInformation browser_collected_information = 1;
string user_input = 2;
}
}
message ModelPrototypingResponse {
// The series of prompts sent to the model corresponding to the
// |prototyping_prompts| in the request.
repeated string model_prompts = 1;
// The responses from the model corresponding to |model_prompts|.
repeated string model_responses = 2;
}