Add rich logging of LLM interactions

Instead of treating LLM logging as simply writing lines of text
sequentially, emit a stream of typed data objects that include
rich information about the interaction, such as timestamps,
message types, tool calls, etc.

These are then collected by a new custom webview in the vscode
extension (the "Continue Console"), allowing a user or developer
to browse through the different interactions and view them in detail.
In progress interactions are now streamed live, instead of being held back
to avoid concurrency issues.

This webview could be also exposed for IntelliJ or file logging
could be reimplemented on top of the new framework.
This commit is contained in:
Owen W. Taylor 2025-02-11 23:14:36 -05:00
parent caf7288d36
commit 3553dfaae4
40 changed files with 1727 additions and 169 deletions

View File

@ -33,9 +33,7 @@ program.action(async () => {
const ide = new IpcIde(messenger);
const promptLogsPath = getPromptLogsPath();
new Core(messenger, ide, async (text) => {
fs.appendFileSync(promptLogsPath, text + "\n\n");
});
new Core(messenger, ide);
console.log("[binary] Core started");
} catch (e) {

View File

@ -12,6 +12,7 @@ import {
IDE,
IdeSettings,
ILLM,
ILLMLogger,
} from "../index.js";
import Ollama from "../llm/llms/Ollama.js";
import { GlobalContext } from "../util/GlobalContext.js";
@ -48,12 +49,11 @@ export class ConfigHandler {
constructor(
private readonly ide: IDE,
private ideSettingsPromise: Promise<IdeSettings>,
private readonly writeLog: (text: string) => Promise<void>,
private llmLogger: ILLMLogger,
sessionInfoPromise: Promise<ControlPlaneSessionInfo | undefined>,
) {
this.ide = ide;
this.ideSettingsPromise = ideSettingsPromise;
this.writeLog = writeLog;
this.controlPlaneClient = new ControlPlaneClient(
sessionInfoPromise,
ideSettingsPromise,
@ -65,7 +65,7 @@ export class ConfigHandler {
ide,
ideSettingsPromise,
this.controlPlaneClient,
writeLog,
this.llmLogger,
),
this.ide,
);
@ -184,7 +184,7 @@ export class ConfigHandler {
this.controlPlaneClient,
this.ide,
this.ideSettingsPromise,
this.writeLog,
this.llmLogger,
assistant.rawYaml,
orgScopeId,
);
@ -229,7 +229,7 @@ export class ConfigHandler {
this.controlPlaneClient,
this.ide,
this.ideSettingsPromise,
this.writeLog,
this.llmLogger,
this.reloadConfig.bind(this),
);
@ -292,7 +292,7 @@ export class ConfigHandler {
this.ide,
this.ideSettingsPromise,
this.controlPlaneClient,
this.writeLog,
this.llmLogger,
assistant,
);
});

View File

@ -27,6 +27,7 @@ import {
IdeSettings,
IdeType,
ILLM,
ILLMLogger,
LLMOptions,
ModelDescription,
RerankerDescription,
@ -222,7 +223,7 @@ async function intermediateToFinalConfig(
ideSettings: IdeSettings,
ideInfo: IdeInfo,
uniqueId: string,
writeLog: (log: string) => Promise<void>,
llmLogger: ILLMLogger,
workOsAccessToken: string | undefined,
loadPromptFiles: boolean = true,
allowFreeTrial: boolean = true,
@ -238,7 +239,7 @@ async function intermediateToFinalConfig(
ide.readFile.bind(ide),
uniqueId,
ideSettings,
writeLog,
llmLogger,
config.completionOptions,
config.systemMessage,
);
@ -260,7 +261,7 @@ async function intermediateToFinalConfig(
ide.readFile.bind(ide),
uniqueId,
ideSettings,
writeLog,
llmLogger,
copyOf(config.completionOptions),
config.systemMessage,
);
@ -280,7 +281,7 @@ async function intermediateToFinalConfig(
} else {
const llm = new CustomLLMClass({
...desc,
options: { ...desc.options, writeLog } as any,
options: { ...desc.options, logger: llmLogger } as any,
});
if (llm.model === "AUTODETECT") {
try {
@ -289,7 +290,11 @@ async function intermediateToFinalConfig(
(modelName) =>
new CustomLLMClass({
...desc,
options: { ...desc.options, model: modelName, writeLog },
options: {
...desc.options,
model: modelName,
logger: llmLogger,
},
}),
);
@ -343,7 +348,7 @@ async function intermediateToFinalConfig(
ide.readFile.bind(ide),
uniqueId,
ideSettings,
writeLog,
llmLogger,
config.completionOptions,
config.systemMessage,
);
@ -831,7 +836,7 @@ async function loadContinueConfigFromJson(
ideSettings: IdeSettings,
ideInfo: IdeInfo,
uniqueId: string,
writeLog: (log: string) => Promise<void>,
llmLogger: ILLMLogger,
workOsAccessToken: string | undefined,
overrideConfigJson: SerializedContinueConfig | undefined,
): Promise<ConfigResult<ContinueConfig>> {
@ -931,7 +936,7 @@ async function loadContinueConfigFromJson(
ideSettings,
ideInfo,
uniqueId,
writeLog,
llmLogger,
workOsAccessToken,
);
return {

View File

@ -7,6 +7,7 @@ import {
ContinueConfig,
IDE,
IdeSettings,
ILLMLogger,
SerializedContinueConfig,
} from "../../index.js";
import { ProfileDescription } from "../ProfileLifecycleManager.js";
@ -27,7 +28,7 @@ export default class ControlPlaneProfileLoader implements IProfileLoader {
private readonly controlPlaneClient: ControlPlaneClient,
private readonly ide: IDE,
private ideSettingsPromise: Promise<IdeSettings>,
private writeLog: (message: string) => Promise<void>,
private llmLogger: ILLMLogger,
private readonly onReload: () => void,
) {
this.description = {
@ -65,7 +66,7 @@ export default class ControlPlaneProfileLoader implements IProfileLoader {
this.ide,
this.ideSettingsPromise,
this.controlPlaneClient,
this.writeLog,
this.llmLogger,
serializedConfig,
undefined,
undefined,

View File

@ -1,7 +1,7 @@
import { ConfigResult, parseConfigYaml } from "@continuedev/config-yaml";
import { ControlPlaneClient } from "../../control-plane/client.js";
import { ContinueConfig, IDE, IdeSettings } from "../../index.js";
import { ContinueConfig, IDE, IdeSettings, ILLMLogger } from "../../index.js";
import { ProfileDescription } from "../ProfileLifecycleManager.js";
import { getPrimaryConfigFilePath } from "../../util/paths.js";
@ -17,7 +17,7 @@ export default class LocalProfileLoader implements IProfileLoader {
private ide: IDE,
private ideSettingsPromise: Promise<IdeSettings>,
private controlPlaneClient: ControlPlaneClient,
private writeLog: (message: string) => Promise<void>,
private llmLogger: ILLMLogger,
private overrideAssistantFile?:
| { path: string; content: string }
| undefined,
@ -59,7 +59,7 @@ export default class LocalProfileLoader implements IProfileLoader {
this.ide,
this.ideSettingsPromise,
this.controlPlaneClient,
this.writeLog,
this.llmLogger,
undefined,
undefined,
undefined,

View File

@ -2,7 +2,7 @@ import { AssistantUnrolled, ConfigResult } from "@continuedev/config-yaml";
import { ControlPlaneClient } from "../../control-plane/client.js";
import { getControlPlaneEnv } from "../../control-plane/env.js";
import { ContinueConfig, IDE, IdeSettings } from "../../index.js";
import { ContinueConfig, IDE, IdeSettings, ILLMLogger } from "../../index.js";
import { ProfileDescription } from "../ProfileLifecycleManager.js";
import doLoadConfig from "./doLoadConfig.js";
@ -30,7 +30,7 @@ export default class PlatformProfileLoader implements IProfileLoader {
private readonly controlPlaneClient: ControlPlaneClient,
private readonly ide: IDE,
private ideSettingsPromise: Promise<IdeSettings>,
private writeLog: (message: string) => Promise<void>,
private llmLogger: ILLMLogger,
readonly description: ProfileDescription,
private readonly orgScopeId: string | null,
) {}
@ -44,7 +44,7 @@ export default class PlatformProfileLoader implements IProfileLoader {
controlPlaneClient: ControlPlaneClient,
ide: IDE,
ideSettingsPromise: Promise<IdeSettings>,
writeLog: (message: string) => Promise<void>,
llmLogger: ILLMLogger,
rawYaml: string,
orgScopeId: string | null,
): Promise<PlatformProfileLoader> {
@ -74,7 +74,7 @@ export default class PlatformProfileLoader implements IProfileLoader {
controlPlaneClient,
ide,
ideSettingsPromise,
writeLog,
llmLogger,
description,
orgScopeId,
);
@ -93,7 +93,7 @@ export default class PlatformProfileLoader implements IProfileLoader {
this.ide,
this.ideSettingsPromise,
this.controlPlaneClient,
this.writeLog,
this.llmLogger,
undefined,
this.configResult.config,
{

View File

@ -12,6 +12,7 @@ import {
ContinueRcJson,
IDE,
IdeSettings,
ILLMLogger,
SerializedContinueConfig,
Tool,
} from "../../";
@ -39,7 +40,7 @@ export default async function doLoadConfig(
ide: IDE,
ideSettingsPromise: Promise<IdeSettings>,
controlPlaneClient: ControlPlaneClient,
writeLog: (message: string) => Promise<void>,
llmLogger: ILLMLogger,
overrideConfigJson: SerializedContinueConfig | undefined,
overrideConfigYaml: AssistantUnrolled | undefined,
platformConfigMetadata: PlatformConfigMetadata | undefined,
@ -74,7 +75,7 @@ export default async function doLoadConfig(
ideSettings,
ideInfo,
uniqueId,
writeLog,
llmLogger,
workOsAccessToken,
overrideConfigYaml,
platformConfigMetadata,
@ -92,7 +93,7 @@ export default async function doLoadConfig(
ideSettings,
ideInfo,
uniqueId,
writeLog,
llmLogger,
workOsAccessToken,
overrideConfigJson,
);

View File

@ -90,7 +90,7 @@ declare global {
requestOptions?: RequestOptions;
promptTemplates?: Record<string, PromptTemplate>;
templateMessages?: (messages: ChatMessage[]) => string;
writeLog?: (str: string) => Promise<void>;
llmLogger?: ILLMLogger;
llmRequestHook?: (model: string, prompt: string) => any;
apiKey?: string;
apiBase?: string;
@ -433,6 +433,94 @@ declare global {
export type ToastType = "info" | "error" | "warning";
export interface LLMInteractionBase {
interactionId: string;
timestamp: number;
}
export interface LLMInteractionStartChat extends LLMInteractionBase {
kind: "startChat";
messages: ChatMessage[];
options: CompletionOptions;
}
export interface LLMInteractionStartComplete extends LLMInteractionBase {
kind: "startComplete";
prompt: string;
options: CompletionOptions;
}
export interface LLMInteractionStartFim extends LLMInteractionBase {
kind: "startFim";
prefix: string;
suffix: string;
options: CompletionOptions;
}
export interface LLMInteractionChunk extends LLMInteractionBase {
kind: "chunk";
chunk: string;
}
export interface LLMInteractionMessage extends LLMInteractionBase {
kind: "message";
message: ChatMessage;
}
export interface LLMInteractionEnd extends LLMInteractionBase {
promptTokens: number;
generatedTokens: number;
thinkingTokens: number;
}
export interface LLMInteractionSuccess extends LLMInteractionEnd {
kind: "success";
}
export interface LLMInteractionCancel extends LLMInteractionEnd {
kind: "cancel";
}
export interface LLMInteractionError extends LLMInteractionEnd {
kind: "error";
name: string;
message: string;
}
export type LLMInteractionItem =
| LLMInteractionStartChat
| LLMInteractionStartComplete
| LLMInteractionStartFim
| LLMInteractionChunk
| LLMInteractionMessage
| LLMInteractionSuccess
| LLMInteractionCancel
| LLMInteractionError;
// When we log a LLM interaction, we want to add the interactionId and timestamp
// in the logger code, so we need a type that omits these members from *each*
// member of the union. This can be done by using the distributive behavior of
// conditional types in Typescript.
//
// www.typescriptlang.org/docs/handbook/2/conditional-types.html#distributive-conditional-types
// https://stackoverflow.com/questions/57103834/typescript-omit-a-property-from-all-interfaces-in-a-union-but-keep-the-union-s
type DistributiveOmit<T, K extends PropertyKey> = T extends unknown
? Omit<T, K>
: never;
export type LLMInteractionItemDetails = DistributiveOmit<
LLMInteractionItem,
"interactionId" | "timestamp"
>;
export interface ILLMInteractionLog {
logItem(item: LLMInteractionItemDetails): void;
}
export interface ILLMLogger {
createInteractionLog(): ILLMInteractionLog;
}
export interface LLMOptions {
model: string;
@ -446,7 +534,7 @@ declare global {
template?: TemplateType;
promptTemplates?: Record<string, PromptTemplate>;
templateMessages?: (messages: ChatMessage[]) => string;
writeLog?: (str: string) => Promise<void>;
logger?: ILLMLogger;
llmRequestHook?: (model: string, prompt: string) => any;
apiKey?: string;
aiGatewaySlug?: string;

View File

@ -20,6 +20,7 @@ import {
IDE,
IdeInfo,
IdeSettings,
ILLMLogger,
} from "../..";
import { slashFromCustomCommand } from "../../commands";
import { AllRerankers } from "../../context/allRerankers";
@ -116,7 +117,7 @@ async function configYamlToContinueConfig(
ideSettings: IdeSettings,
ideInfo: IdeInfo,
uniqueId: string,
writeLog: (log: string) => Promise<void>,
llmLogger: ILLMLogger,
workOsAccessToken: string | undefined,
platformConfigMetadata: PlatformConfigMetadata | undefined,
allowFreeTrial: boolean = true,
@ -216,7 +217,7 @@ async function configYamlToContinueConfig(
ide,
uniqueId,
ideSettings,
writeLog,
llmLogger,
platformConfigMetadata,
config: continueConfig,
});
@ -412,7 +413,7 @@ export async function loadContinueConfigFromYaml(
ideSettings: IdeSettings,
ideInfo: IdeInfo,
uniqueId: string,
writeLog: (log: string) => Promise<void>,
llmLogger: ILLMLogger,
workOsAccessToken: string | undefined,
overrideConfigYaml: AssistantUnrolled | undefined,
platformConfigMetadata: PlatformConfigMetadata | undefined,
@ -451,7 +452,7 @@ export async function loadContinueConfigFromYaml(
ideSettings,
ideInfo,
uniqueId,
writeLog,
llmLogger,
workOsAccessToken,
platformConfigMetadata,
);

View File

@ -1,6 +1,12 @@
import { ModelConfig } from "@continuedev/config-yaml";
import { ContinueConfig, IDE, IdeSettings, LLMOptions } from "../..";
import {
ContinueConfig,
IDE,
IdeSettings,
ILLMLogger,
LLMOptions,
} from "../..";
import { BaseLLM } from "../../llm";
import { LLMClasses } from "../../llm/llms";
import { PlatformConfigMetadata } from "../profile/PlatformProfileLoader";
@ -25,14 +31,14 @@ async function modelConfigToBaseLLM({
model,
uniqueId,
ideSettings,
writeLog,
llmLogger,
platformConfigMetadata,
config,
}: {
model: ModelConfig;
uniqueId: string;
ideSettings: IdeSettings;
writeLog: (log: string) => Promise<void>;
llmLogger: ILLMLogger;
platformConfigMetadata: PlatformConfigMetadata | undefined;
config: ContinueConfig;
}): Promise<BaseLLM | undefined> {
@ -54,7 +60,7 @@ async function modelConfigToBaseLLM({
model.defaultCompletionOptions?.maxTokens ??
cls.defaultOptions?.completionOptions?.maxTokens,
},
writeLog,
logger: llmLogger,
uniqueId,
title: model.name,
systemMessage: config.systemMessage,
@ -141,7 +147,7 @@ async function autodetectModels({
ide,
uniqueId,
ideSettings,
writeLog,
llmLogger,
platformConfigMetadata,
config,
}: {
@ -150,7 +156,7 @@ async function autodetectModels({
ide: IDE;
uniqueId: string;
ideSettings: IdeSettings;
writeLog: (log: string) => Promise<void>;
llmLogger: ILLMLogger;
platformConfigMetadata: PlatformConfigMetadata | undefined;
config: ContinueConfig;
}): Promise<BaseLLM[]> {
@ -171,7 +177,7 @@ async function autodetectModels({
},
uniqueId,
ideSettings,
writeLog,
llmLogger,
platformConfigMetadata,
config,
});
@ -189,7 +195,7 @@ export async function llmsFromModelConfig({
ide,
uniqueId,
ideSettings,
writeLog,
llmLogger,
platformConfigMetadata,
config,
}: {
@ -197,7 +203,7 @@ export async function llmsFromModelConfig({
ide: IDE;
uniqueId: string;
ideSettings: IdeSettings;
writeLog: (log: string) => Promise<void>;
llmLogger: ILLMLogger;
platformConfigMetadata: PlatformConfigMetadata | undefined;
config: ContinueConfig;
}): Promise<BaseLLM[]> {
@ -205,7 +211,7 @@ export async function llmsFromModelConfig({
model,
uniqueId,
ideSettings,
writeLog,
llmLogger,
platformConfigMetadata,
config,
});
@ -220,7 +226,7 @@ export async function llmsFromModelConfig({
ide,
uniqueId,
ideSettings,
writeLog,
llmLogger,
platformConfigMetadata,
config,
});

View File

@ -7,6 +7,7 @@ import {
IContextProvider,
} from "../..";
import { ConfigHandler } from "../../config/ConfigHandler";
import { LLMLogger } from "../../llm/logger";
import { TEST_DIR } from "../../test/testDir";
import FileSystemIde from "../../util/filesystem";
@ -26,10 +27,11 @@ async function getContextProviderExtras(
): Promise<ContextProviderExtras> {
const ide = new FileSystemIde(TEST_DIR);
const ideSettingsPromise = ide.getIdeSettings();
const llmLogger = new LLMLogger();
const configHandler = new ConfigHandler(
ide,
ideSettingsPromise,
async (text) => {},
llmLogger,
Promise.resolve(undefined),
);
const { config } = await configHandler.loadConfig();

View File

@ -46,6 +46,7 @@ import { isLocalAssistantFile } from "./config/loadLocalAssistants";
import { MCPManagerSingleton } from "./context/mcp";
import { shouldIgnore } from "./indexing/shouldIgnore";
import { walkDirCache } from "./indexing/walkDir";
import { LLMLogger } from "./llm/logger";
import { llmStreamChat } from "./llm/streamChat";
import type { FromCoreProtocol, ToCoreProtocol } from "./protocol";
import type { IMessenger, Message } from "./protocol/messenger";
@ -58,6 +59,7 @@ export class Core {
codebaseIndexingState: IndexingProgressUpdate;
private docsService: DocsService;
private globalContext = new GlobalContext();
llmLogger = new LLMLogger();
private readonly indexingPauseToken = new PauseToken(
this.globalContext.get("indexingPaused") === true,
@ -85,7 +87,6 @@ export class Core {
constructor(
private readonly messenger: IMessenger<ToCoreProtocol, FromCoreProtocol>,
private readonly ide: IDE,
private readonly onWrite: (text: string) => Promise<void> = async () => {},
) {
// Ensure .continue directory is created
migrateV1DevDataFiles();
@ -106,7 +107,7 @@ export class Core {
this.configHandler = new ConfigHandler(
this.ide,
ideSettingsPromise,
this.onWrite,
this.llmLogger,
sessionInfoPromise,
);

92
core/index.d.ts vendored
View File

@ -90,7 +90,7 @@ export interface ILLM extends LLMOptions {
requestOptions?: RequestOptions;
promptTemplates?: Record<string, PromptTemplate>;
templateMessages?: (messages: ChatMessage[]) => string;
writeLog?: (str: string) => Promise<void>;
llmLogger?: ILLMLogger;
llmRequestHook?: (model: string, prompt: string) => any;
apiKey?: string;
apiBase?: string;
@ -483,6 +483,94 @@ export interface LLMFullCompletionOptions extends BaseCompletionOptions {
export type ToastType = "info" | "error" | "warning";
export interface LLMInteractionBase {
interactionId: string;
timestamp: number;
}
export interface LLMInteractionStartChat extends LLMInteractionBase {
kind: "startChat";
messages: ChatMessage[];
options: CompletionOptions;
}
export interface LLMInteractionStartComplete extends LLMInteractionBase {
kind: "startComplete";
prompt: string;
options: CompletionOptions;
}
export interface LLMInteractionStartFim extends LLMInteractionBase {
kind: "startFim";
prefix: string;
suffix: string;
options: CompletionOptions;
}
export interface LLMInteractionChunk extends LLMInteractionBase {
kind: "chunk";
chunk: string;
}
export interface LLMInteractionMessage extends LLMInteractionBase {
kind: "message";
message: ChatMessage;
}
export interface LLMInteractionEnd extends LLMInteractionBase {
promptTokens: number;
generatedTokens: number;
thinkingTokens: number;
}
export interface LLMInteractionSuccess extends LLMInteractionEnd {
kind: "success";
}
export interface LLMInteractionCancel extends LLMInteractionEnd {
kind: "cancel";
}
export interface LLMInteractionError extends LLMInteractionEnd {
kind: "error";
name: string;
message: string;
}
export type LLMInteractionItem =
| LLMInteractionStartChat
| LLMInteractionStartComplete
| LLMInteractionStartFim
| LLMInteractionChunk
| LLMInteractionMessage
| LLMInteractionSuccess
| LLMInteractionCancel
| LLMInteractionError;
// When we log a LLM interaction, we want to add the interactionId and timestamp
// in the logger code, so we need a type that omits these members from *each*
// member of the union. This can be done by using the distributive behavior of
// conditional types in Typescript.
//
// www.typescriptlang.org/docs/handbook/2/conditional-types.html#distributive-conditional-types
// https://stackoverflow.com/questions/57103834/typescript-omit-a-property-from-all-interfaces-in-a-union-but-keep-the-union-s
type DistributiveOmit<T, K extends PropertyKey> = T extends unknown
? Omit<T, K>
: never;
export type LLMInteractionItemDetails = DistributiveOmit<
LLMInteractionItem,
"interactionId" | "timestamp"
>;
export interface ILLMInteractionLog {
logItem(item: LLMInteractionItemDetails): void;
}
export interface ILLMLogger {
createInteractionLog(): ILLMInteractionLog;
}
export interface LLMOptions {
model: string;
@ -496,7 +584,7 @@ export interface LLMOptions {
template?: TemplateType;
promptTemplates?: Record<string, PromptTemplate>;
templateMessages?: (messages: ChatMessage[]) => string;
writeLog?: (str: string) => Promise<void>;
logger?: ILLMLogger;
llmRequestHook?: (model: string, prompt: string) => any;
rules?: Rule[];
apiKey?: string;

View File

@ -3,6 +3,7 @@
// */
// import { ConfigHandler } from "../../config/ConfigHandler.js";
// import { SiteIndexingConfig } from "../../index.js";
// import { LLMLogger } from "../../llm/logger.js";
// import FileSystemIde from "../../util/filesystem.js";
// import { editConfigJson } from "../../util/paths.js";
@ -48,7 +49,7 @@
// configHandler = new ConfigHandler(
// ide,
// ideSettingsPromise,
// async () => {},
// new LLMLogger(),
// Promise.resolve(undefined),
// );

View File

@ -1,6 +1,7 @@
import { ChatMessage, LLMOptions } from "..";
import { BaseLLM } from ".";
import { LLMLogger } from "./logger";
class DummyLLM extends BaseLLM {
static providerName = "openai";
@ -30,7 +31,7 @@ describe("BaseLLM", () => {
const templatMessagesFunction = (messages: ChatMessage[]) => {
return messages[0]?.content.toString() ?? "";
};
const writeLogFunction = async () => {};
const llmLogger = new LLMLogger();
const options: LLMOptions = {
model: "gpt-3.5-turbo",
uniqueId: "testId",
@ -43,7 +44,7 @@ describe("BaseLLM", () => {
requestOptions: {},
promptTemplates: {},
templateMessages: templatMessagesFunction,
writeLog: writeLogFunction,
logger: llmLogger,
llmRequestHook: () => {},
apiKey: "testApiKey",
aiGatewaySlug: "testSlug",
@ -67,7 +68,7 @@ describe("BaseLLM", () => {
expect(instance.requestOptions).toEqual({});
expect(instance.promptTemplates).toEqual({});
expect(instance.templateMessages).toEqual(templatMessagesFunction);
expect(instance.writeLog).toBe(writeLogFunction);
expect(instance.logger).toBe(llmLogger);
expect(instance.apiKey).toBe("testApiKey");
expect(instance.aiGatewaySlug).toBe("testSlug");
expect(instance.apiBase).toBe("https://api.example.com/");

View File

@ -16,6 +16,8 @@ import {
Chunk,
CompletionOptions,
ILLM,
ILLMInteractionLog,
ILLMLogger,
LLMFullCompletionOptions,
LLMOptions,
ModelCapability,
@ -72,6 +74,8 @@ export function isModelInstaller(provider: any): provider is ModelInstaller {
return provider && typeof provider.installModel === "function";
}
type InteractionStatus = "in_progress" | "success" | "error" | "cancelled";
export abstract class BaseLLM implements ILLM {
static providerName: string;
static defaultOptions: Partial<LLMOptions> | undefined = undefined;
@ -129,7 +133,7 @@ export abstract class BaseLLM implements ILLM {
template?: TemplateType;
promptTemplates?: Record<string, PromptTemplate>;
templateMessages?: (messages: ChatMessage[]) => string;
writeLog?: (str: string) => Promise<void>;
logger?: ILLMLogger;
llmRequestHook?: (model: string, prompt: string) => any;
apiKey?: string;
@ -220,7 +224,7 @@ export abstract class BaseLLM implements ILLM {
options.template,
) ??
undefined;
this.writeLog = options.writeLog;
this.logger = options.logger;
this.llmRequestHook = options.llmRequestHook;
this.apiKey = options.apiKey;
@ -345,13 +349,17 @@ export abstract class BaseLLM implements ILLM {
);
}
private _logTokensGenerated(
private _logEnd(
model: string,
prompt: string,
completion: string,
) {
thinking: string | undefined,
interaction: ILLMInteractionLog | undefined,
error?: any,
): InteractionStatus {
let promptTokens = this.countTokens(prompt);
let generatedTokens = this.countTokens(completion);
let thinkingTokens = thinking ? this.countTokens(thinking) : 0;
void Telemetry.capture(
"tokens_generated",
@ -380,6 +388,37 @@ export abstract class BaseLLM implements ILLM {
generatedTokens: generatedTokens,
},
});
if (error !== undefined) {
if (error === "cancel" || error.name === "AbortError") {
interaction?.logItem({
kind: "cancel",
promptTokens,
generatedTokens,
thinkingTokens,
});
return "cancelled";
} else {
console.log(error);
interaction?.logItem({
kind: "error",
name: error.name,
message: error.message,
promptTokens,
generatedTokens,
thinkingTokens,
});
return "error";
}
} else {
interaction?.logItem({
kind: "success",
promptTokens,
generatedTokens,
thinkingTokens,
});
return "success";
}
}
fetch(url: RequestInfo | URL, init?: RequestInit): Promise<Response> {
@ -535,52 +574,89 @@ export abstract class BaseLLM implements ILLM {
): AsyncGenerator<string> {
const { completionOptions, logEnabled } =
this._parseCompletionOptions(options);
const interaction = logEnabled
? this.logger?.createInteractionLog()
: undefined;
let status: InteractionStatus = "in_progress";
const fimLog = `Prefix: ${prefix}\nSuffix: ${suffix}`;
if (logEnabled) {
if (this.writeLog) {
await this.writeLog(
this._compilePromptForLog(fimLog, completionOptions),
);
}
interaction?.logItem({
kind: "startFim",
prefix,
suffix,
options: completionOptions,
});
if (this.llmRequestHook) {
this.llmRequestHook(completionOptions.model, fimLog);
}
}
let completion = "";
if (this.shouldUseOpenAIAdapter("streamFim") && this.openaiAdapter) {
const stream = this.openaiAdapter.fimStream(
toFimBody(prefix, suffix, completionOptions),
signal,
);
for await (const chunk of stream) {
const result = fromChatCompletionChunk(chunk);
if (result) {
const content = renderChatMessage(result);
completion += content;
yield content;
try {
if (this.shouldUseOpenAIAdapter("streamFim") && this.openaiAdapter) {
const stream = this.openaiAdapter.fimStream(
toFimBody(prefix, suffix, completionOptions),
signal,
);
for await (const chunk of stream) {
const result = fromChatCompletionChunk(chunk);
if (result) {
const content = renderChatMessage(result);
interaction?.logItem({
kind: "chunk",
chunk: content,
});
completion += content;
yield content;
}
}
} else {
for await (const chunk of this._streamFim(
prefix,
suffix,
signal,
completionOptions,
)) {
interaction?.logItem({
kind: "chunk",
chunk,
});
completion += chunk;
yield chunk;
}
}
} else {
for await (const chunk of this._streamFim(
prefix,
suffix,
signal,
completionOptions,
)) {
completion += chunk;
yield chunk;
status = this._logEnd(
completionOptions.model,
fimLog,
completion,
undefined,
interaction,
);
} catch (e) {
status = this._logEnd(
completionOptions.model,
fimLog,
completion,
undefined,
interaction,
e,
);
throw e;
} finally {
if (status === "in_progress") {
this._logEnd(
completionOptions.model,
fimLog,
completion,
undefined,
interaction,
"cancel",
);
}
}
this._logTokensGenerated(completionOptions.model, fimLog, completion);
if (logEnabled && this.writeLog) {
await this.writeLog(`Completion:\n${completion}\n\n`);
}
return {
prompt: fimLog,
completion,
@ -595,6 +671,10 @@ export abstract class BaseLLM implements ILLM {
) {
const { completionOptions, logEnabled, raw } =
this._parseCompletionOptions(options);
const interaction = logEnabled
? this.logger?.createInteractionLog()
: undefined;
let status: InteractionStatus = "in_progress";
let prompt = pruneRawPromptFromTop(
completionOptions.model,
@ -608,11 +688,11 @@ export abstract class BaseLLM implements ILLM {
}
if (logEnabled) {
if (this.writeLog) {
await this.writeLog(
this._compilePromptForLog(prompt, completionOptions),
);
}
interaction?.logItem({
kind: "startComplete",
prompt,
options: completionOptions,
});
if (this.llmRequestHook) {
this.llmRequestHook(completionOptions.model, prompt);
}
@ -640,6 +720,10 @@ export abstract class BaseLLM implements ILLM {
)) {
const content = chunk.choices[0]?.text ?? "";
completion += content;
interaction?.logItem({
kind: "chunk",
chunk: content,
});
yield content;
}
}
@ -650,14 +734,40 @@ export abstract class BaseLLM implements ILLM {
completionOptions,
)) {
completion += chunk;
interaction?.logItem({
kind: "chunk",
chunk,
});
yield chunk;
}
}
status = this._logEnd(
completionOptions.model,
prompt,
completion,
undefined,
interaction,
);
} catch (e) {
status = this._logEnd(
completionOptions.model,
prompt,
completion,
undefined,
interaction,
e,
);
throw e;
} finally {
this._logTokensGenerated(completionOptions.model, prompt, completion);
if (logEnabled && this.writeLog) {
await this.writeLog(`Completion:\n${completion}\n\n`);
if (status === "in_progress") {
this._logEnd(
completionOptions.model,
prompt,
completion,
undefined,
interaction,
"cancel",
);
}
}
@ -676,6 +786,10 @@ export abstract class BaseLLM implements ILLM {
) {
const { completionOptions, logEnabled, raw } =
this._parseCompletionOptions(options);
const interaction = logEnabled
? this.logger?.createInteractionLog()
: undefined;
let status: InteractionStatus = "in_progress";
let prompt = pruneRawPromptFromTop(
completionOptions.model,
@ -689,34 +803,65 @@ export abstract class BaseLLM implements ILLM {
}
if (logEnabled) {
if (this.writeLog) {
await this.writeLog(
this._compilePromptForLog(prompt, completionOptions),
);
}
interaction?.logItem({
kind: "startComplete",
prompt: prompt,
options: completionOptions,
});
if (this.llmRequestHook) {
this.llmRequestHook(completionOptions.model, prompt);
}
}
let completion: string;
if (this.shouldUseOpenAIAdapter("complete") && this.openaiAdapter) {
const result = await this.openaiAdapter.completionNonStream(
{
...toCompleteBody(prompt, completionOptions),
stream: false,
},
signal,
let completion: string = "";
try {
if (this.shouldUseOpenAIAdapter("complete") && this.openaiAdapter) {
const result = await this.openaiAdapter.completionNonStream(
{
...toCompleteBody(prompt, completionOptions),
stream: false,
},
signal,
);
completion = result.choices[0].text;
} else {
completion = await this._complete(prompt, signal, completionOptions);
}
interaction?.logItem({
kind: "chunk",
chunk: completion,
});
status = this._logEnd(
completionOptions.model,
prompt,
completion,
undefined,
interaction,
);
completion = result.choices[0].text;
} else {
completion = await this._complete(prompt, signal, completionOptions);
}
this._logTokensGenerated(completionOptions.model, prompt, completion);
if (logEnabled && this.writeLog) {
await this.writeLog(`Completion:\n${completion}\n\n`);
} catch (e) {
status = this._logEnd(
completionOptions.model,
prompt,
completion,
undefined,
interaction,
e,
);
throw e;
} finally {
if (status === "in_progress") {
this._logEnd(
completionOptions.model,
prompt,
completion,
undefined,
interaction,
"cancel",
);
}
}
return completion;
@ -761,6 +906,10 @@ export abstract class BaseLLM implements ILLM {
): AsyncGenerator<ChatMessage, PromptLog> {
let { completionOptions, logEnabled } =
this._parseCompletionOptions(options);
const interaction = logEnabled
? this.logger?.createInteractionLog()
: undefined;
let status: InteractionStatus = "in_progress";
completionOptions = this._modifyCompletionOptions(completionOptions);
@ -770,11 +919,11 @@ export abstract class BaseLLM implements ILLM {
? this.templateMessages(messages)
: this._formatChatMessages(messages);
if (logEnabled) {
if (this.writeLog) {
await this.writeLog(
this._compilePromptForLog(prompt, completionOptions),
);
}
interaction?.logItem({
kind: "startChat",
messages,
options: completionOptions,
});
if (this.llmRequestHook) {
this.llmRequestHook(completionOptions.model, prompt);
}
@ -791,6 +940,10 @@ export abstract class BaseLLM implements ILLM {
completionOptions,
)) {
completion += chunk;
interaction?.logItem({
kind: "chunk",
chunk: chunk,
});
yield { role: "assistant", content: chunk };
}
} else {
@ -820,6 +973,10 @@ export abstract class BaseLLM implements ILLM {
const result = fromChatCompletionChunk(chunk);
if (result) {
completion += result.content;
interaction?.logItem({
kind: "message",
message: result,
});
yield result;
}
}
@ -832,38 +989,56 @@ export abstract class BaseLLM implements ILLM {
)) {
if (chunk.role === "assistant") {
completion += chunk.content;
yield chunk;
} else if (chunk.role === "thinking") {
thinking += chunk.content;
}
if (chunk.role === "thinking") {
thinking += chunk.content;
yield chunk;
}
interaction?.logItem({
kind: "message",
message: chunk,
});
yield chunk;
}
}
}
} catch (error) {
console.log(error);
throw error;
}
this._logTokensGenerated(completionOptions.model, prompt, completion);
if (logEnabled && this.writeLog) {
if (thinking) {
await this.writeLog(`Thinking:\n${thinking}\n\n`);
status = this._logEnd(
completionOptions.model,
prompt,
completion,
thinking,
interaction,
);
} catch (e) {
status = this._logEnd(
completionOptions.model,
prompt,
completion,
thinking,
interaction,
e,
);
throw e;
} finally {
if (status === "in_progress") {
this._logEnd(
completionOptions.model,
prompt,
completion,
undefined,
interaction,
"cancel",
);
}
/*
TODO: According to: https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking
During tool use, you must pass thinking and redacted_thinking blocks back to the API,
and you must include the complete unmodified block back to the API. This is critical
for maintaining the model's reasoning flow and conversation integrity.
On the other hand, adding thinking and redacted_thinking blocks are ignored on subsequent
requests when not using tools, so it's the simplest option to always add to history.
*/
await this.writeLog(`Completion:\n${completion}\n\n`);
}
/*
TODO: According to: https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking
During tool use, you must pass thinking and redacted_thinking blocks back to the API,
and you must include the complete unmodified block back to the API. This is critical
for maintaining the model's reasoning flow and conversation integrity.
On the other hand, adding thinking and redacted_thinking blocks are ignored on subsequent
requests when not using tools, so it's the simplest option to always add to history.
*/
return {
modelTitle: this.title ?? completionOptions.model,

View File

@ -2,6 +2,7 @@ import {
BaseCompletionOptions,
IdeSettings,
ILLM,
ILLMLogger,
LLMOptions,
ModelDescription,
} from "../..";
@ -115,7 +116,7 @@ export async function llmFromDescription(
readFile: (filepath: string) => Promise<string>,
uniqueId: string,
ideSettings: IdeSettings,
writeLog: (log: string) => Promise<void>,
llmLogger: ILLMLogger,
completionOptions?: BaseCompletionOptions,
systemMessage?: string,
): Promise<BaseLLM | undefined> {
@ -145,7 +146,7 @@ export async function llmFromDescription(
cls.defaultOptions?.completionOptions?.maxTokens,
},
systemMessage,
writeLog,
logger: llmLogger,
uniqueId,
};

43
core/llm/logger.ts Normal file
View File

@ -0,0 +1,43 @@
import {
ILLMLogger,
ILLMInteractionLog,
LLMInteractionItem,
LLMInteractionItemDetails,
} from "..";
type LLMLogItemFunction = (item: LLMInteractionItem) => void;
export class LLMLogger implements ILLMLogger {
private nextId = 0;
public createInteractionLog(): LLMInteractionLog {
return new LLMInteractionLog(this, (this.nextId++).toString());
}
private logItemListeners: LLMLogItemFunction[] = [];
onLogItem(listener: LLMLogItemFunction) {
this.logItemListeners.push(listener);
}
public _logItem(item: LLMInteractionItem) {
for (const listener of this.logItemListeners) {
listener(item);
}
}
}
export class LLMInteractionLog implements ILLMInteractionLog {
constructor(
private logger: LLMLogger,
public interactionId: string,
) {}
logItem(item: LLMInteractionItemDetails) {
this.logger._logItem({
...item,
interactionId: this.interactionId,
timestamp: Date.now(),
});
}
}

View File

@ -1,6 +1,7 @@
import { ConfigHandler } from "../config/ConfigHandler";
import { ControlPlaneClient } from "../control-plane/client";
import Mock from "../llm/llms/Mock";
import { LLMLogger } from "../llm/logger";
import FileSystemIde from "../util/filesystem";
import { TEST_DIR } from "./testDir";
@ -17,7 +18,7 @@ export const testControlPlaneClient = new ControlPlaneClient(
export const testConfigHandler = new ConfigHandler(
testIde,
ideSettingsPromise,
async (text) => {},
new LLMLogger(),
Promise.resolve(undefined),
);

View File

@ -127,6 +127,11 @@
"default": false,
"markdownDescription": "Pause Continue's codebase index on start."
},
"continue.enableConsole": {
"type": "boolean",
"default": false,
"markdownDescription": "Enable a console to log and explore model inputs and outputs. It can be found in the bottom panel."
},
"continue.remoteConfigServerUrl": {
"type": "string",
"default": null,
@ -252,6 +257,13 @@
"title": "View History",
"group": "Continue"
},
{
"command": "continue.clearConsole",
"category": "Continue",
"title": "Clear Console",
"icon": "$(clear-all)",
"group": "Continue"
},
{
"command": "continue.navigateTo",
"category": "Continue",
@ -505,6 +517,11 @@
"command": "continue.openConfigPage",
"group": "navigation@4",
"when": "view == continue.continueGUIView"
},
{
"command": "continue.clearConsole",
"group": "navigation@1",
"when": "view == continue.continueConsoleView"
}
],
"editor/title": [
@ -533,6 +550,13 @@
"title": "Continue",
"icon": "media/sidebar-icon.png"
}
],
"panel": [
{
"id": "continueConsole",
"title": "Continue Console",
"icon": "$(window)"
}
]
},
"views": {
@ -544,6 +568,16 @@
"icon": "media/sidebar-icon.png",
"visibility": "visible"
}
],
"continueConsole": [
{
"type": "webview",
"id": "continue.continueConsoleView",
"name": "Continue Console",
"icon": "$(window)",
"visibility": "visible",
"when": "config.continue.enableConsole"
}
]
},
"jsonValidation": [

View File

@ -0,0 +1,172 @@
import { LLMInteractionItem } from "core";
import { EXTENSION_NAME } from "core/control-plane/env";
import { LLMLogger } from "core/llm/logger";
import * as vscode from "vscode";
import { getExtensionUri, getNonce } from "./util/vscode";
interface FromConsoleView {
type: "start" | "stop";
uuid: string;
}
export class ContinueConsoleWebviewViewProvider
implements vscode.WebviewViewProvider
{
public static readonly viewType = "continue.continueConsoleView";
resolveWebviewView(
webviewView: vscode.WebviewView,
_context: vscode.WebviewViewResolveContext,
_token: vscode.CancellationToken,
): void | Thenable<void> {
this._webviewView = webviewView;
this._webview = webviewView.webview;
this._webviewView.onDidDispose(() => {
this._webviewView = undefined;
this._webview = undefined;
});
webviewView.webview.html = this.getSidebarContent(
this.extensionContext,
webviewView,
);
this._webview.onDidReceiveMessage((message: FromConsoleView) => {
if (message.type === "start") {
this._currentUuid = message.uuid;
this._webview?.postMessage({
type: "init",
uuid: this._currentUuid,
items: this._items,
});
}
});
this._webviewView.onDidDispose(() => {
this._webview = undefined;
this._webviewView = undefined;
this._currentUuid = undefined;
});
}
private _webview?: vscode.Webview;
private _webviewView?: vscode.WebviewView;
private _currentUuid?: string;
private _items: LLMInteractionItem[] = [];
private _saveLog;
constructor(
private readonly windowId: string,
private readonly extensionContext: vscode.ExtensionContext,
private readonly llmLogger: LLMLogger,
) {
const config = vscode.workspace.getConfiguration(EXTENSION_NAME);
this._saveLog = config.get<boolean>("enableConsole");
vscode.workspace.onDidChangeConfiguration((e) => {
if (e.affectsConfiguration(`${EXTENSION_NAME}.enableConsole`)) {
const config = vscode.workspace.getConfiguration(EXTENSION_NAME);
this._saveLog = config.get<boolean>("enableConsole");
if (!this._saveLog) {
this.clearLog();
}
}
});
llmLogger.onLogItem((item) => {
if (!this._saveLog) {
return;
}
this._items.push(item);
if (this._currentUuid) {
this._webview?.postMessage({
type: "item",
uuid: this._currentUuid,
item,
});
}
});
}
clearLog() {
this._items = [];
if (this._currentUuid) {
this._webview?.postMessage({
type: "clear",
uuid: this._currentUuid,
});
}
}
private getSidebarContent(
context: vscode.ExtensionContext | undefined,
panel: vscode.WebviewPanel | vscode.WebviewView,
page: string | undefined = undefined,
): string {
const extensionUri = getExtensionUri();
let scriptUri: string;
let styleMainUri: string;
const inDevelopmentMode =
context?.extensionMode === vscode.ExtensionMode.Development;
if (!inDevelopmentMode) {
scriptUri = panel.webview
.asWebviewUri(
vscode.Uri.joinPath(extensionUri, "gui/assets/indexConsole.js"),
)
.toString();
styleMainUri = panel.webview
.asWebviewUri(
vscode.Uri.joinPath(extensionUri, "gui/assets/indexConsole.css"),
)
.toString();
} else {
scriptUri = "http://localhost:5173/src/console.tsx";
styleMainUri = "http://localhost:5173/src/indexConsole.css";
}
panel.webview.options = {
enableScripts: true,
localResourceRoots: [
vscode.Uri.joinPath(extensionUri, "gui"),
vscode.Uri.joinPath(extensionUri, "assets"),
],
enableCommandUris: true,
portMapping: [
{
webviewPort: 65433,
extensionHostPort: 65433,
},
],
};
const nonce = getNonce();
return `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script>const vscode = acquireVsCodeApi();</script>
<link href="${styleMainUri}" rel="stylesheet">
<title>Continue</title>
</head>
<body>
<div id="root"></div>
${
inDevelopmentMode
? `<script type="module">
import RefreshRuntime from "http://localhost:5173/@react-refresh"
RefreshRuntime.injectIntoGlobalHook(window)
window.$RefreshReg$ = () => {}
window.$RefreshSig$ = () => (type) => type
window.__vite_plugin_react_preamble_installed__ = true
</script>`
: ""
}
<script type="module" nonce="${nonce}" src="${scriptUri}"></script>
</body>
</html>`;
}
}

View File

@ -27,6 +27,7 @@ import {
setupStatusBar,
StatusBarStatus,
} from "./autocomplete/statusBar";
import { ContinueConsoleWebviewViewProvider } from "./ContinueConsoleWebviewViewProvider";
import { ContinueGUIWebviewViewProvider } from "./ContinueGUIWebviewViewProvider";
import { VerticalDiffManager } from "./diff/vertical/manager";
@ -318,6 +319,7 @@ const getCommandsMap: (
ide: VsCodeIde,
extensionContext: vscode.ExtensionContext,
sidebar: ContinueGUIWebviewViewProvider,
consoleView: ContinueConsoleWebviewViewProvider,
configHandler: ConfigHandler,
verticalDiffManager: VerticalDiffManager,
continueServerClientPromise: Promise<ContinueServerClient>,
@ -329,6 +331,7 @@ const getCommandsMap: (
ide,
extensionContext,
sidebar,
consoleView,
configHandler,
verticalDiffManager,
continueServerClientPromise,
@ -672,6 +675,9 @@ const getCommandsMap: (
"If there are any grammar or spelling mistakes in this writing, fix them. Do not make other large changes to the writing.",
);
},
"continue.clearConsole": async () => {
consoleView.clearLog();
},
"continue.viewLogs": async () => {
captureCommandTelemetry("viewLogs");
vscode.commands.executeCommand("workbench.action.toggleDevTools");
@ -1088,6 +1094,7 @@ export function registerAllCommands(
ide: VsCodeIde,
extensionContext: vscode.ExtensionContext,
sidebar: ContinueGUIWebviewViewProvider,
consoleView: ContinueConsoleWebviewViewProvider,
configHandler: ConfigHandler,
verticalDiffManager: VerticalDiffManager,
continueServerClientPromise: Promise<ContinueServerClient>,
@ -1103,6 +1110,7 @@ export function registerAllCommands(
ide,
extensionContext,
sidebar,
consoleView,
configHandler,
verticalDiffManager,
continueServerClientPromise,

View File

@ -21,6 +21,7 @@ import {
StatusBarStatus,
} from "../autocomplete/statusBar";
import { registerAllCommands } from "../commands";
import { ContinueConsoleWebviewViewProvider } from "../ContinueConsoleWebviewViewProvider";
import { ContinueGUIWebviewViewProvider } from "../ContinueGUIWebviewViewProvider";
import { VerticalDiffManager } from "../diff/vertical/manager";
import { registerAllCodeLensProviders } from "../lang-server/codeLens";
@ -48,6 +49,7 @@ export class VsCodeExtension {
private configHandler: ConfigHandler;
private extensionContext: vscode.ExtensionContext;
private ide: VsCodeIde;
private consoleView: ContinueConsoleWebviewViewProvider;
private sidebar: ContinueGUIWebviewViewProvider;
private windowId: string;
private editDecorationManager: EditDecorationManager;
@ -106,10 +108,6 @@ export class VsCodeExtension {
);
resolveWebviewProtocol(this.sidebar.webviewProtocol);
// Config Handler with output channel
const outputChannel = vscode.window.createOutputChannel(
"Continue - LLM Prompt/Completion",
);
const inProcessMessenger = new InProcessMessenger<
ToCoreProtocol,
FromCoreProtocol
@ -125,19 +123,12 @@ export class VsCodeExtension {
this.editDecorationManager,
);
this.core = new Core(inProcessMessenger, this.ide, async (log: string) => {
outputChannel.appendLine(
"==========================================================================",
);
outputChannel.appendLine(
"==========================================================================",
);
outputChannel.append(log);
});
this.core = new Core(inProcessMessenger, this.ide);
this.configHandler = this.core.configHandler;
resolveConfigHandler?.(this.configHandler);
this.configHandler.loadConfig();
this.verticalDiffManager = new VerticalDiffManager(
this.configHandler,
this.sidebar.webviewProtocol,
@ -244,12 +235,27 @@ export class VsCodeExtension {
this.fileSearch,
);
// LLM Log view
this.consoleView = new ContinueConsoleWebviewViewProvider(
this.windowId,
this.extensionContext,
this.core.llmLogger,
);
context.subscriptions.push(
vscode.window.registerWebviewViewProvider(
"continue.continueConsoleView",
this.consoleView,
),
);
// Commands
registerAllCommands(
context,
this.ide,
context,
this.sidebar,
this.consoleView,
this.configHandler,
this.verticalDiffManager,
this.core.continueServerClientPromise,

13
gui/indexConsole.html Normal file
View File

@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/play_button.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Continue</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/console.tsx"></script>
</body>
</html>

View File

@ -0,0 +1,116 @@
import { useEffect, useRef } from "react";
import { LLMInteraction } from "../../hooks/useLLMLog";
import useLLMSummary from "../../hooks/useLLMSummary";
import End from "./End";
import ResultGroup from "./ResultGroup";
import Start from "./Start";
import StatusIcon from "./StatusIcon";
export interface DetailsProps {
interaction: LLMInteraction;
}
function renderCell(children: React.ReactNode) {
return (
<div className="border-0 border-r-2 border-solid border-[color:var(--vscode-panel-border)] pl-2 pr-2 text-sm">
{children}
</div>
);
}
/**
* A cell of statistics at the top of the details view
*/
function Cell({
label,
value,
format,
}: {
label: string;
value: any;
format?: (value: any) => string;
}) {
return renderCell(
value != undefined ? `${label}: ${format ? format(value) : value}` : "",
);
}
/**
* A cell of statistics at the top of the details view with custom content
*/
function CustomCell({ children }: { children: React.ReactNode }) {
return renderCell(children);
}
function formatSeconds(milliseconds: number) {
return (milliseconds / 1000).toFixed(2) + "s";
}
export default function Details({ interaction }: DetailsProps) {
const scrollTop = useRef<HTMLDivElement>(null);
const lastResult = useRef<any>(null);
const summary = useLLMSummary(interaction);
useEffect(() => {
if (interaction.end) {
return;
}
const last = interaction.results[interaction.results.length - 1];
if (last != lastResult.current) {
lastResult.current = last;
const lastChild = scrollTop.current?.lastChild;
if (lastChild) {
(lastChild as HTMLElement).scrollIntoView({
behavior: "auto",
block: "end",
});
}
}
});
return (
<div className="m-0 flex min-w-0 flex-1 shrink grow flex-col">
<div className="shrink-0 text-base">
<div className="columns-3 gap-0 border-0 border-b-2 border-solid border-[color:var(--vscode-panel-border)] p-0">
<Cell label="Type" value={summary.type}></Cell>
<CustomCell>
Result: <StatusIcon interaction={interaction}></StatusIcon>
{summary.result}
</CustomCell>
</div>
<div className="columns-3 gap-0 border-0 border-b-2 border-solid border-[color:var(--vscode-panel-border)] p-0">
<Cell label="Prompt Tokens" value={summary.promptTokens}></Cell>
<Cell label="Generated Tokens" value={summary.generatedTokens}></Cell>
<Cell label="ThinkingTokens" value={summary.thinkingTokens}></Cell>
</div>
<div className="columns-3 gap-0 border-0 border-b-2 border-solid border-[color:var(--vscode-panel-border)] p-0">
<Cell
label="Total Time"
value={summary.totalTime}
format={formatSeconds}
></Cell>
<Cell
label="To First Token"
value={summary.toFirstToken}
format={formatSeconds}
></Cell>
<Cell
label="Tokens/s"
value={summary.tokensPerSecond}
format={(v: number) => v.toFixed(1)}
></Cell>
</div>
</div>
<div ref={scrollTop} className="grow overflow-auto">
{interaction.start ? <Start item={interaction.start}></Start> : ""}
<div className="whitespace-pre-wrap p-2">
{interaction.results.map((group, i) => {
return <ResultGroup key={i} group={group}></ResultGroup>;
})}
</div>
{interaction.end ? <End item={interaction.end}></End> : ""}
</div>
</div>
);
}

View File

@ -0,0 +1,37 @@
import {
LLMInteractionCancel,
LLMInteractionError,
LLMInteractionSuccess,
} from "core";
import Expander from "./Expander";
import Message from "./Message";
export interface StartProps {
item: LLMInteractionSuccess | LLMInteractionError | LLMInteractionCancel;
}
export default function Start({ item }: StartProps) {
// <div className="border-0 border-b-2 border-solid border-[color:var(--vscode-panel-border)]">
switch (item.kind) {
case "success":
return <></>;
case "error":
return (
<div>
<span className="text-[color:var(--vscode-statusBarItem-errorForeground) m-0.5 inline-block rounded-sm bg-[color:var(--vscode-statusBarItem-errorBackground)] p-0.5">
Error
</span>
{item.message}
</div>
);
break;
case "cancel":
return (
<div>
<span className="text-[color:var(--vscode-statusBarItem-warningForeground) m-0.5 inline-block rounded-sm bg-[color:var(--vscode-statusBarItem-warningBackground)] p-0.5">
Cancelled
</span>
</div>
);
}
}

View File

@ -0,0 +1,25 @@
import { ChevronDownIcon, ChevronRightIcon } from "@heroicons/react/24/outline";
import { ReactNode, useState } from "react";
export interface ExpanderProps {
label: string;
children: ReactNode;
}
export default function Expander({ label, children }: ExpanderProps) {
const [expanded, setExpanded] = useState(false);
return (
<div>
<div className="text-base" onClick={() => setExpanded(!expanded)}>
{expanded ? (
<ChevronDownIcon className="h-[16px] w-[16px]" />
) : (
<ChevronRightIcon className="h-[16px] w-[16px]" />
)}{" "}
<span className="align-top text-sm font-bold">{label}</span>
</div>
{expanded && <div>{children}</div>}
</div>
);
}

View File

@ -0,0 +1,30 @@
import { useState } from "react";
import useLLMLog from "../../hooks/useLLMLog";
import Details from "./Details";
import List from "./List";
export default function Layout() {
const llmLog = useLLMLog();
const [selectedId, setSelectedId] = useState<string | undefined>(undefined);
const interaction = selectedId
? llmLog.interactions.get(selectedId)
: undefined;
return llmLog.loading ? (
<div>Loading...</div>
) : (
<div className="flex h-full w-full">
<List
llmLog={llmLog}
onClickInteraction={(interactionId) => {
setSelectedId(interactionId);
}}
></List>
{interaction && (
<Details key="{selectedId}" interaction={interaction}></Details>
)}
</div>
);
}

View File

@ -0,0 +1,53 @@
import { useEffect, useRef, useState } from "react";
import { LLMLog } from "../../hooks/useLLMLog";
import ListItem from "./ListItem";
export interface ListProps {
llmLog: LLMLog;
onClickInteraction: (interactionId: string) => void;
}
export default function List({ llmLog, onClickInteraction }: ListProps) {
const topRef = useRef<HTMLUListElement>(null);
const [selectedId, setSelectedId] = useState<string | undefined>(undefined);
const lastSize = useRef(0);
useEffect(() => {
if (llmLog.order.length != lastSize.current) {
setSelectedId(llmLog.order[llmLog.order.length - 1]);
onClickInteraction(llmLog.order[llmLog.order.length - 1]);
lastSize.current = llmLog.order.length;
const lastChild = topRef.current?.lastChild;
if (lastChild) {
(lastChild as HTMLElement).scrollIntoView({
behavior: "smooth",
block: "end",
});
}
}
});
return (
<ul
tabIndex={1}
ref={topRef}
className="group m-0 w-[150px] flex-none list-none overflow-auto border-0 border-r-2 border-solid border-[color:var(--vscode-panel-border)] p-0"
>
{llmLog.order.map((id) => (
<ListItem
key={id}
interactionId={id}
interaction={llmLog.interactions.get(id)!}
onClickInteraction={(interactionId) => {
topRef.current?.focus();
setSelectedId(interactionId);
onClickInteraction(interactionId);
}}
selected={id == selectedId}
></ListItem>
))}
</ul>
);
}

View File

@ -0,0 +1,55 @@
import { LLMInteraction } from "../../hooks/useLLMLog";
import useLLMSummary from "../../hooks/useLLMSummary";
import StatusIcon from "./StatusIcon";
export interface ListItemProps {
interactionId: string;
interaction: LLMInteraction;
selected: boolean;
onClickInteraction: (interactionId: string) => void;
}
function formatTimestamp(timestamp: number) {
const date = new Date(timestamp);
const hours = date.getHours().toString().padStart(2, "0");
const minutes = date.getMinutes().toString().padStart(2, "0");
const seconds = date.getSeconds();
const milliseconds = date.getMilliseconds();
// Format seconds with one decimal place
const secondsFormatted = `${seconds}.${Math.floor(milliseconds / 100)}`;
return `${hours}:${minutes}:${secondsFormatted.padStart(4, "0")}`;
}
export default function ListItem({
interactionId,
interaction,
onClickInteraction,
selected,
}: ListItemProps) {
const summary = useLLMSummary(interaction);
return (
<li
className={
"w-full cursor-pointer pb-[3px] pl-[4px] pr-[4px] pt-[3px] " +
(selected
? "bg-[color:var(--vscode-list-inactiveSelectionBackground)]" +
" text-[color:var(--vscode-list-inctiveSelectionForeground)]" +
" group-focus-within:bg-[color:var(--vscode-list-activeSelectionBackground)]" +
" group-focus-within:text-[color:var(--vscode-list-activeSelectionForeground)]"
: "hover:bg-[color:var(--vscode-list-inactiveSelectionBackground)]")
}
key={interactionId}
onClick={() => onClickInteraction(interactionId)}
>
<StatusIcon interaction={interaction}></StatusIcon>{" "}
<span className="inline-block w-[70px]">
{interaction.start ? formatTimestamp(interaction.start.timestamp) : ""}
</span>
<span className="inline-block">{summary.type}</span>
</li>
);
}

View File

@ -0,0 +1,106 @@
import {
AssistantChatMessage,
ChatMessage,
ThinkingChatMessage,
UserChatMessage,
} from "core";
import { memo } from "react";
export interface MessageProps {
message: ChatMessage;
}
function renderMessageText(text: string) {
return <span className="whitespace-pre-wrap">{text}</span>;
}
export function renderMessageRole(role: ChatMessage["role"]) {
return (
<div>
<span className="bg-[color:var(--vscode-list-inactiveSelectionBackground)] text-xs">
{role}
</span>
</div>
);
}
function renderMessageContent(
message: AssistantChatMessage | UserChatMessage | ThinkingChatMessage,
) {
if (typeof message.content == "string") {
return renderMessageText(message.content);
} else {
return message.content.map((part) => {
if (part.type == "text") {
return renderMessageText(part.text);
} else {
return <div>Image: {part.imageUrl.url}</div>;
}
});
}
}
export function renderMessage(message: ChatMessage, includeRole: boolean) {
switch (message.role) {
case "assistant":
return (
<>
{includeRole ? renderMessageRole(message.role) : ""}
{message.toolCalls
? message.toolCalls.map((toolCall) => (
<pre>Tool call: {JSON.stringify(toolCall, undefined, 2)}</pre>
))
: ""}
{renderMessageContent(message)}
</>
);
break;
case "thinking":
return (
<>
{includeRole ? renderMessageRole(message.role) : ""}
{message.toolCalls
? message.toolCalls.map((toolCall) => (
<pre>Tool call: {JSON.stringify(toolCall, undefined, 2)}</pre>
))
: ""}
{renderMessageContent(message)}
{message.redactedThinking && (
<pre>Redacted Thinking: {message.redactedThinking}</pre>
)}
{message.signature && <pre>Signature: {message.signature}</pre>}
</>
);
break;
case "user":
return (
<>
{includeRole ? renderMessageRole(message.role) : ""}
{renderMessageContent(message)}
</>
);
break;
case "system":
return (
<>
{includeRole ? renderMessageRole(message.role) : ""}
{renderMessageText(message.content)}
</>
);
case "tool":
return (
<>
{includeRole ? renderMessageRole(message.role) : ""}
<pre>Tool Call ID: {message.toolCallId}</pre>
{renderMessageText(message.content)}
</>
);
break;
}
}
const Message = memo(function Message({ message }: MessageProps) {
return renderMessage(message, true);
});
export default Message;

View File

@ -0,0 +1,37 @@
import { memo } from "react";
import { LLMResult } from "../../hooks/useLLMLog";
import { renderMessage } from "./Message";
interface ResultProps {
result: LLMResult;
prevResult: LLMResult | undefined;
}
const Result = memo(function Result({ result, prevResult }: ResultProps) {
switch (result.kind) {
case "chunk":
return <span>{result.chunk}</span>;
break;
case "message":
switch (result.message.role) {
case "assistant":
case "thinking":
const includeRole = !(
prevResult?.kind === "message" &&
prevResult.message.role === result.message.role
);
return renderMessage(result.message, includeRole);
default:
// We don't expect anything but AssistantChatMessages and ThinkingChatMessages in the reply
// from the LLM output, but log them if they do occur.
return (
<div className="border-[color:var(--vscode-panel-border) border-2 border-solid p-1">
{renderMessage(result.message, true)}
</div>
);
}
break;
}
});
export default Result;

View File

@ -0,0 +1,19 @@
import { memo } from "react";
import { LLMResult } from "../../hooks/useLLMLog";
import Result from "./Result";
interface ResultGroupProps {
group: LLMResult[];
}
const ResultGroup = memo(function ResultGroup({ group }: ResultGroupProps) {
return (
<>
{group.map((result, i) => (
<Result key={i} result={result} prevResult={group[i - 1]}></Result>
))}
</>
);
});
export default ResultGroup;

View File

@ -0,0 +1,73 @@
import {
LLMInteractionStartChat,
LLMInteractionStartComplete,
LLMInteractionStartFim,
} from "core";
import Expander from "./Expander";
import Message from "./Message";
export interface StartProps {
item:
| LLMInteractionStartChat
| LLMInteractionStartComplete
| LLMInteractionStartFim;
}
export default function Start({ item }: StartProps) {
return (
<div className="border-0 border-b-2 border-solid border-[color:var(--vscode-panel-border)] p-1">
{(() => {
switch (item.kind) {
case "startChat":
return (
<>
<Expander label="Prompt">
<div className="p-1">
{item.messages.map((message, i) => (
<Message key={i} message={message}></Message>
))}
</div>
</Expander>
<Expander label="Options">
<pre className="m-0">
{JSON.stringify(item.options, undefined, 2)}
</pre>
</Expander>
</>
);
break;
case "startComplete":
return (
<>
<Expander label="Prompt">
<pre className="m-0">{item.prompt}</pre>
</Expander>
<Expander label="Options">
<pre className="m-0">
{JSON.stringify(item.options, undefined, 2)}
</pre>
</Expander>
</>
);
break;
case "startFim":
return (
<>
<Expander label="Prefix">
<pre className="m-0">{item.prefix}</pre>
</Expander>
<Expander label="Suffix">
<pre className="m-0">{item.suffix}</pre>
</Expander>
<Expander label="Options">
<pre className="m-0">
{JSON.stringify(item.options, undefined, 2)}
</pre>
</Expander>
</>
);
}
})()}
</div>
);
}

View File

@ -0,0 +1,34 @@
import {
CheckIcon,
EllipsisHorizontalIcon,
StopCircleIcon,
XCircleIcon,
} from "@heroicons/react/24/outline";
import { LLMInteraction } from "../../hooks/useLLMLog";
export interface StatusIconProps {
interaction: LLMInteraction;
}
export default function StatusIcon({ interaction }: StatusIconProps) {
if (interaction.end) {
switch (interaction.end.kind) {
case "success":
return (
<CheckIcon className="relative top-[2px] -mt-[2px] h-[16px] w-[16px] pr-[2px] text-[color:var(--vscode-charts-green)]" />
);
case "cancel":
return (
<StopCircleIcon className="relative top-[2px] -mt-[2px] h-[16px] w-[16px] pr-[2px] text-[color:var(--vscode-list-warningForeground)]" />
);
case "error":
return (
<XCircleIcon className="relative top-[2px] -mt-[2px] h-[16px] w-[16px] pr-[2px] text-[color:var(--vscode-list-errorForeground)]" />
);
}
} else {
return (
<EllipsisHorizontalIcon className="relative top-[2px] -mt-[2px] h-[16px] w-[16px] pr-[2px]" />
);
}
}

12
gui/src/console.tsx Normal file
View File

@ -0,0 +1,12 @@
import React from "react";
import ReactDOM from "react-dom/client";
import Layout from "./components/console/Layout";
import "./indexConsole.css";
(async () => {
ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render(
<React.StrictMode>
<Layout></Layout>
</React.StrictMode>,
);
})();

225
gui/src/hooks/useLLMLog.ts Normal file
View File

@ -0,0 +1,225 @@
import {
LLMInteractionCancel,
LLMInteractionChunk,
LLMInteractionError,
LLMInteractionItem,
LLMInteractionMessage,
LLMInteractionStartChat,
LLMInteractionStartComplete,
LLMInteractionStartFim,
LLMInteractionSuccess,
} from "core";
import { useEffect, useReducer } from "react";
declare const vscode: any;
export type LLMResult = LLMInteractionMessage | LLMInteractionChunk;
const MAX_GROUP_LENGTH = 32;
interface ToConsoleViewInit {
type: "init";
uuid: string;
items: LLMInteractionItem[];
}
interface ToConsoleViewItem {
type: "item";
uuid: string;
item: LLMInteractionItem;
}
interface ToConsoleViewClear {
type: "clear";
uuid: string;
}
type ToConsoleView = ToConsoleViewInit | ToConsoleViewItem | ToConsoleViewClear;
/**
* Represents a linear list of LLMInteractionItem, transformed into
* a form that is more convenient for rendering.
*/
export interface LLMInteraction {
start?:
| LLMInteractionStartChat
| LLMInteractionStartComplete
| LLMInteractionStartFim;
// We use an array-of-arrays for efficiency when rendering a streamed result
// with lots and lots of separate tokens; instead of having a linear list
// of 1024 Result components, we have 32 ResultGroup components each rendering
// 32 Results.
//
// Inline content can split between one group and the next - we'll let the
// browser engine sort that out.
results: LLMResult[][];
end?: LLMInteractionSuccess | LLMInteractionError | LLMInteractionCancel;
}
export interface LLMLog {
loading: boolean;
interactions: Map<string, LLMInteraction>;
order: string[];
}
/**
* Appends a new result item (chunk or message) to an interaction,
* existing iteractions must already have been copied with
* copyInteractionForMutation().
*/
function appendItemToInteractionResult(
interaction: LLMInteraction,
item: LLMInteractionMessage | LLMInteractionChunk,
) {
let lastGroup = interaction.results[interaction.results.length - 1];
if (lastGroup == undefined || lastGroup.length == MAX_GROUP_LENGTH) {
interaction.results.push([item]);
} else {
lastGroup.push(item);
}
}
/**
* Makes a copy of an interaction that can be mutated without changing
* any objects/arrays that are part of the old state. We always make
* a copy of the last (open) result group, so that we can simply append
* to it; this is very slightly inefficient for the case where we're
* handling a single end item with no results, but makes things simpler.
*/
function copyInteractionForMutation(oldInteraction: LLMInteraction) {
const oldResults = oldInteraction.results;
let oldLastGroup = oldInteraction.results[oldInteraction.results.length - 1];
let newResults;
if (oldLastGroup == undefined || oldLastGroup.length == MAX_GROUP_LENGTH) {
newResults = [...oldInteraction.results];
} else {
newResults = oldInteraction.results.slice(0, -1);
newResults.push([...oldLastGroup]);
}
return { ...oldInteraction, results: newResults };
}
function appendItemsToLLMLog(
oldLog: LLMLog,
items: LLMInteractionItem[],
): LLMLog {
const oldInteractions = oldLog.interactions;
const newInteractions: Map<string, LLMInteraction> = new Map();
let order = oldLog.order;
let interactionsAdded = false;
// Add the new items to the log, making mutable copies of old
// LLMInteraction as necessary
for (const item of items) {
let interaction = newInteractions.get(item.interactionId);
if (interaction === undefined) {
const oldInteraction = oldInteractions.get(item.interactionId);
if (oldInteraction) {
interaction = copyInteractionForMutation(oldInteraction);
} else {
interaction = {
results: [],
};
if (interactionsAdded) {
order.push(item.interactionId);
} else {
order = [...order, item.interactionId];
interactionsAdded = true;
}
}
newInteractions.set(item.interactionId, interaction);
}
switch (item.kind) {
case "startChat":
case "startComplete":
case "startFim":
interaction.start = item;
break;
case "chunk":
case "message":
appendItemToInteractionResult(interaction, item);
break;
case "success":
case "error":
case "cancel":
interaction.end = item;
break;
}
}
// Copy over unchanged interactions
for (const interactionId of oldInteractions.keys()) {
if (!newInteractions.has(interactionId)) {
newInteractions.set(interactionId, oldInteractions.get(interactionId)!);
}
}
return {
loading: false,
interactions: newInteractions,
order,
};
}
/**
* Hook to accumulate log data structures based on messages passed
* from the core. Note that each call site will create an independent
* data structure, so this should be only used once at a toplevel
* component.
* @returns currently log datastructure.
*/
export default function useLLMLog() {
const [llmLog, dispatchLlmLog] = useReducer(
(llmLog: LLMLog, message: ToConsoleView) => {
switch (message.type) {
case "init":
return appendItemsToLLMLog(llmLog, message.items);
case "item":
return appendItemsToLLMLog(llmLog, [message.item]);
case "clear":
return {
loading: false,
interactions: new Map(),
order: [],
};
}
},
{
loading: true,
interactions: new Map(),
order: [],
},
);
useEffect(function () {
// The uuid here marks the "generation" when the webview is
// reloaded, so we don't get confused if there are inflight
// messages from the previous generation. In particular, this
// avoids problems when React.StrictMode runs this effect
// twice - we don't want to process two "init" messages.
const uuid = crypto.randomUUID();
const onMessage = (event: MessageEvent<ToConsoleView>) => {
if (event.data.uuid !== uuid) {
return;
}
dispatchLlmLog(event.data);
};
window.addEventListener("message", onMessage);
vscode.postMessage({ type: "start", uuid });
return () => {
vscode.postMessage({ type: "stop", uuid });
window.removeEventListener("message", onMessage);
};
}, []);
return llmLog;
}

View File

@ -0,0 +1,76 @@
import { useMemo } from "react";
import { LLMInteraction } from "./useLLMLog";
/**
* Hook returning memoized information about single logged prompt/response
* query to the LLM.
*/
export default function useLLMSummary(interaction: LLMInteraction) {
return useMemo(() => {
if (interaction.start == undefined) {
return {
result: "",
type: "",
};
}
const type = interaction.start.kind.slice(5);
let result;
switch (interaction.end?.kind) {
case "cancel":
result = "Cancelled";
break;
case "error":
result = "Error";
break;
case "success":
result = "Success";
break;
case undefined:
result = "";
}
let totalTime,
toFirstToken,
tokensPerSecond,
promptTokens,
generatedTokens,
thinkingTokens;
if (interaction.start != undefined && interaction.results.length > 0) {
const firstGroup = interaction.results[0];
const firstItem = firstGroup ? firstGroup[0] : undefined;
toFirstToken = firstItem
? firstItem.timestamp - interaction.start.timestamp
: undefined;
}
if (interaction.end != undefined) {
totalTime = interaction.end.timestamp - interaction.start.timestamp;
promptTokens = interaction.end.promptTokens;
generatedTokens = interaction.end.generatedTokens;
thinkingTokens = interaction.end.thinkingTokens;
if (toFirstToken != undefined) {
tokensPerSecond =
(generatedTokens + thinkingTokens) /
((totalTime - toFirstToken) / 1000);
}
} else {
const lastGroup = interaction.results[interaction.results.length - 1];
const lastItem = lastGroup ? lastGroup[lastGroup.length - 1] : undefined;
totalTime = lastItem
? lastItem.timestamp - interaction.start.timestamp
: undefined;
}
return {
result,
type,
totalTime,
toFirstToken,
tokensPerSecond,
promptTokens,
generatedTokens,
thinkingTokens,
};
}, [interaction]);
}

9
gui/src/indexConsole.css Normal file
View File

@ -0,0 +1,9 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
html,
body,
#root {
height: 100%;
}

View File

@ -1,4 +1,5 @@
import react from "@vitejs/plugin-react-swc";
import { resolve } from "path";
import tailwindcss from "tailwindcss";
import { defineConfig } from "vitest/config";
@ -9,6 +10,10 @@ export default defineConfig({
// Change the output .js filename to not include a hash
rollupOptions: {
// external: ["vscode-webview"],
input: {
index: resolve(__dirname, "index.html"),
indexConsole: resolve(__dirname, "indexConsole.html"),
},
output: {
entryFileNames: `assets/[name].js`,
chunkFileNames: `assets/[name].js`,