Merge pull request #4974 from continuedev/tomasz/if-rules

Tomasz/if rules
This commit is contained in:
Nate Sesti 2025-04-03 20:51:27 -07:00 committed by GitHub
commit ae59e597da
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 481 additions and 147 deletions

View File

@ -48,7 +48,7 @@
"@aws-sdk/client-sagemaker-runtime": "^3.777.0",
"@aws-sdk/credential-providers": "^3.778.0",
"@continuedev/config-types": "^1.0.13",
"@continuedev/config-yaml": "^1.0.67",
"@continuedev/config-yaml": "^1.0.71",
"@continuedev/fetch": "^1.0.4",
"@continuedev/llm-info": "^1.0.8",
"@continuedev/openai-adapters": "^1.0.18",
@ -67,6 +67,7 @@
"diff": "^7.0.0",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"filtrex": "^3.1.0",
"follow-redirects": "^1.15.5",
"google-auth-library": "^9.14.2",
"handlebars": "^4.7.8",

View File

@ -88,7 +88,6 @@ export class RootPathContextService {
default:
// const type = node.type;
// console.log(getSyntaxTreeString(node));
// debugger;
query = await getQueryForFile(
filepath,
@ -165,7 +164,6 @@ export class RootPathContextService {
)) {
const key = RootPathContextService.keyFromNode(parentKey, astNode);
// const type = astNode.type;
// debugger;
const foundInCache = this.cache.get(key);
const newSnippets =

View File

@ -127,7 +127,7 @@ async function configYamlToContinueConfig(
models: [],
tools: [...allTools],
mcpServerStatuses: [],
systemMessage: config.rules?.join("\n"),
systemMessage: undefined,
experimental: {
modelContextProtocolServers: config.mcpServers?.map((mcpServer) => ({
transport: {
@ -211,15 +211,15 @@ async function configYamlToContinueConfig(
for (const model of config.models ?? []) {
model.roles = model.roles ?? modelsArrayRoles; // Default to all 4 chat-esque roles if not specified
try {
const llms = await llmsFromModelConfig(
const llms = await llmsFromModelConfig({
model,
ide,
uniqueId,
ideSettings,
writeLog,
platformConfigMetadata,
continueConfig.systemMessage,
);
config: continueConfig,
});
if (modelsArrayRoles.some((role) => model.roles?.includes(role))) {
continueConfig.models.push(...llms);

View File

@ -1,6 +1,6 @@
import { ModelConfig } from "@continuedev/config-yaml";
import { IDE, IdeSettings, LLMOptions } from "../..";
import { ContinueConfig, IDE, IdeSettings, LLMOptions } from "../..";
import { BaseLLM } from "../../llm";
import { LLMClasses } from "../../llm/llms";
import { PlatformConfigMetadata } from "../profile/PlatformProfileLoader";
@ -13,22 +13,29 @@ function getModelClass(
return LLMClasses.find((llm) => llm.providerName === model.provider);
}
function getContinueProxyModelName(
ownerSlug: string,
packageSlug: string,
model: ModelConfig,
): string {
return `${ownerSlug}/${packageSlug}/${model.provider}/${model.model}`;
}
// function getContinueProxyModelName(
// ownerSlug: string,
// packageSlug: string,
// model: ModelConfig,
// ): string {
// return `${ownerSlug}/${packageSlug}/${model.provider}/${model.model}`;
// }
async function modelConfigToBaseLLM(
model: ModelConfig,
uniqueId: string,
ideSettings: IdeSettings,
writeLog: (log: string) => Promise<void>,
platformConfigMetadata: PlatformConfigMetadata | undefined,
systemMessage: string | undefined,
): Promise<BaseLLM | undefined> {
async function modelConfigToBaseLLM({
model,
uniqueId,
ideSettings,
writeLog,
platformConfigMetadata,
config,
}: {
model: ModelConfig;
uniqueId: string;
ideSettings: IdeSettings;
writeLog: (log: string) => Promise<void>;
platformConfigMetadata: PlatformConfigMetadata | undefined;
config: ContinueConfig;
}): Promise<BaseLLM | undefined> {
const cls = getModelClass(model);
if (!cls) {
@ -50,7 +57,8 @@ async function modelConfigToBaseLLM(
writeLog,
uniqueId,
title: model.name,
systemMessage,
systemMessage: config.systemMessage,
rules: config.rules,
promptTemplates: model.promptTemplates,
capabilities: {
tools: model.capabilities?.includes("tool_use"),
@ -127,16 +135,25 @@ async function modelConfigToBaseLLM(
return llm;
}
async function autodetectModels(
llm: BaseLLM,
model: ModelConfig,
ide: IDE,
uniqueId: string,
ideSettings: IdeSettings,
writeLog: (log: string) => Promise<void>,
platformConfigMetadata: PlatformConfigMetadata | undefined,
systemMessage: string | undefined,
): Promise<BaseLLM[]> {
async function autodetectModels({
llm,
model,
ide,
uniqueId,
ideSettings,
writeLog,
platformConfigMetadata,
config,
}: {
llm: BaseLLM;
model: ModelConfig;
ide: IDE;
uniqueId: string;
ideSettings: IdeSettings;
writeLog: (log: string) => Promise<void>;
platformConfigMetadata: PlatformConfigMetadata | undefined;
config: ContinueConfig;
}): Promise<BaseLLM[]> {
try {
const modelNames = await llm.listModels();
const detectedModels = await Promise.all(
@ -146,8 +163,8 @@ async function autodetectModels(
return undefined;
}
return await modelConfigToBaseLLM(
{
return await modelConfigToBaseLLM({
model: {
...model,
model: modelName,
name: modelName,
@ -156,8 +173,8 @@ async function autodetectModels(
ideSettings,
writeLog,
platformConfigMetadata,
systemMessage,
);
config,
});
}),
);
return detectedModels.filter((x) => typeof x !== "undefined") as BaseLLM[];
@ -167,38 +184,46 @@ async function autodetectModels(
}
}
export async function llmsFromModelConfig(
model: ModelConfig,
ide: IDE,
uniqueId: string,
ideSettings: IdeSettings,
writeLog: (log: string) => Promise<void>,
platformConfigMetadata: PlatformConfigMetadata | undefined,
systemMessage: string | undefined,
): Promise<BaseLLM[]> {
const baseLlm = await modelConfigToBaseLLM(
export async function llmsFromModelConfig({
model,
ide,
uniqueId,
ideSettings,
writeLog,
platformConfigMetadata,
config,
}: {
model: ModelConfig;
ide: IDE;
uniqueId: string;
ideSettings: IdeSettings;
writeLog: (log: string) => Promise<void>;
platformConfigMetadata: PlatformConfigMetadata | undefined;
config: ContinueConfig;
}): Promise<BaseLLM[]> {
const baseLlm = await modelConfigToBaseLLM({
model,
uniqueId,
ideSettings,
writeLog,
platformConfigMetadata,
systemMessage,
);
config,
});
if (!baseLlm) {
return [];
}
if (model.model === AUTODETECT) {
const models = await autodetectModels(
baseLlm,
const models = await autodetectModels({
llm: baseLlm,
model,
ide,
uniqueId,
ideSettings,
writeLog,
platformConfigMetadata,
systemMessage,
);
config,
});
return models;
} else {
return [baseLlm];

7
core/index.d.ts vendored
View File

@ -1,4 +1,4 @@
import { DataDestination, ModelRole } from "@continuedev/config-yaml";
import { DataDestination, ModelRole, Rule } from "@continuedev/config-yaml";
import Parser from "web-tree-sitter";
import { GetGhTokenArgs } from "./protocol/ide";
declare global {
@ -497,6 +497,7 @@ export interface LLMOptions {
templateMessages?: (messages: ChatMessage[]) => string;
writeLog?: (str: string) => Promise<void>;
llmRequestHook?: (model: string, prompt: string) => any;
rules?: Rule[];
apiKey?: string;
// continueProperties
@ -1312,7 +1313,7 @@ export interface ContinueConfig {
docs?: SiteIndexingConfig[];
tools: Tool[];
mcpServerStatuses: MCPServerStatus[];
rules?: string[];
rules?: Rule[];
modelsByRole: Record<ModelRole, ILLM[]>;
selectedModelByRole: Record<ModelRole, ILLM | null>;
data?: DataDestination[];
@ -1335,7 +1336,7 @@ export interface BrowserSerializedContinueConfig {
docs?: SiteIndexingConfig[];
tools: Tool[];
mcpServerStatuses: MCPServerStatus[];
rules?: string[];
rules?: Rule[];
usePlatform: boolean;
tabAutocompleteOptions?: Partial<TabAutocompleteOptions>;
modelsByRole: Record<ModelRole, ModelDescription[]>;

View File

@ -1,3 +1,4 @@
// @ts-nocheck
// Generated by continue
import { ChatMessage, MessagePart } from "../index.js";
import {

View File

@ -1,7 +1,13 @@
import { Tiktoken, encodingForModel as _encodingForModel } from "js-tiktoken";
import { ChatMessage, MessageContent, MessagePart } from "../index.js";
import {
ChatMessage,
MessageContent,
MessagePart,
UserChatMessage,
} from "../index.js";
import { Rule } from "@continuedev/config-yaml";
import { renderChatMessage } from "../util/messageContent.js";
import {
AsyncEncoder,
@ -11,6 +17,8 @@ import {
import { autodetectTemplateType } from "./autodetect.js";
import { TOKEN_BUFFER_FOR_SAFETY } from "./constants.js";
import llamaTokenizer from "./llamaTokenizer.js";
import { isRuleActive } from "./rules/isRuleActive.js";
import { extractPathsFromCodeBlocks } from "./utils/extractPathsFromCodeBlocks.js";
interface Encoding {
encode: Tiktoken["encode"];
decode: Tiktoken["decode"];
@ -399,16 +407,127 @@ function chatMessageIsEmpty(message: ChatMessage): boolean {
}
}
function compileChatMessages(
modelName: string,
msgs: ChatMessage[] | undefined,
contextLength: number,
maxTokens: number,
supportsImages: boolean,
prompt: string | undefined = undefined,
functions: any[] | undefined = undefined,
systemMessage: string | undefined = undefined,
): ChatMessage[] {
function addSystemMessage({
messages,
systemMessage,
originalMessages,
}: {
messages: ChatMessage[];
systemMessage: string | undefined;
originalMessages: ChatMessage[] | undefined;
}): ChatMessage[] {
if (
!(systemMessage && systemMessage.trim() !== "") &&
originalMessages?.[0]?.role !== "system"
) {
return messages;
}
let content = "";
if (originalMessages?.[0]?.role === "system") {
content = renderChatMessage(originalMessages[0]);
}
if (systemMessage && systemMessage.trim() !== "") {
const shouldAddNewLines = content !== "";
if (shouldAddNewLines) {
content += "\n\n";
}
content += systemMessage;
}
const systemChatMsg: ChatMessage = {
role: "system",
content,
};
// Insert as second to last
messages.splice(-1, 0, systemChatMsg);
return messages;
}
function getMessageStringContent(message?: UserChatMessage): string {
if (!message) {
return "";
}
if (typeof message.content === "string") {
return message.content;
}
// Handle MessagePart array
return message.content
.map((part) => {
if (part.type === "text") {
return part.text;
}
return "";
})
.join("\n");
}
const getSystemMessage = ({
userMessage,
rules,
currentModel,
}: {
userMessage?: UserChatMessage;
rules: Rule[];
currentModel: string;
}) => {
const messageStringContent = getMessageStringContent(userMessage);
const filePathsFromMessage = extractPathsFromCodeBlocks(messageStringContent);
return rules
.filter((rule) => {
return isRuleActive({
rule,
activePaths: filePathsFromMessage,
currentModel,
});
})
.map((rule) => {
if (typeof rule === "string") {
return rule;
}
return rule.rule;
})
.join("\n");
};
function getLastUserMessage(
messages: ChatMessage[],
): UserChatMessage | undefined {
// Iterate backwards through messages to find the last user message
for (let i = messages.length - 1; i >= 0; i--) {
if (messages[i].role === "user") {
return messages[i] as UserChatMessage;
}
}
return undefined;
}
function compileChatMessages({
modelName,
msgs,
contextLength,
maxTokens,
supportsImages,
prompt,
functions,
systemMessage,
rules,
}: {
modelName: string;
msgs: ChatMessage[] | undefined;
contextLength: number;
maxTokens: number;
supportsImages: boolean;
prompt: string | undefined;
functions: any[] | undefined;
systemMessage: string | undefined;
rules: Rule[];
}): ChatMessage[] {
let msgsCopy = msgs
? msgs
.map((msg) => ({ ...msg }))
@ -425,29 +544,19 @@ function compileChatMessages(
msgsCopy.push(promptMsg);
}
if (
(systemMessage && systemMessage.trim() !== "") ||
msgs?.[0]?.role === "system"
) {
let content = "";
if (msgs?.[0]?.role === "system") {
content = renderChatMessage(msgs?.[0]);
}
if (systemMessage && systemMessage.trim() !== "") {
const shouldAddNewLines = content !== "";
if (shouldAddNewLines) {
content += "\n\n";
}
content += systemMessage;
}
const systemChatMsg: ChatMessage = {
role: "system",
content,
};
// Insert as second to last
// Later moved to top, but want second-priority to last user message
msgsCopy.splice(-1, 0, systemChatMsg);
}
const lastUserMessage = getLastUserMessage(msgsCopy);
msgsCopy = addSystemMessage({
messages: msgsCopy,
systemMessage:
systemMessage ??
getSystemMessage({
userMessage: lastUserMessage,
rules,
currentModel: modelName,
}),
originalMessages: msgs,
});
let functionTokens = 0;
if (functions) {

View File

@ -1,4 +1,4 @@
import { ModelRole } from "@continuedev/config-yaml";
import { ModelRole, Rule } from "@continuedev/config-yaml";
import { fetchwithRequestOptions } from "@continuedev/fetch";
import { findLlmInfo } from "@continuedev/llm-info";
import {
@ -143,6 +143,7 @@ export abstract class BaseLLM implements ILLM {
cacheBehavior?: CacheBehavior;
capabilities?: ModelCapability;
roles?: ModelRole[];
rules?: Rule[];
deployment?: string;
apiVersion?: string;
@ -242,6 +243,7 @@ export abstract class BaseLLM implements ILLM {
this.accountId = options.accountId;
this.capabilities = options.capabilities;
this.roles = options.roles;
this.rules = options.rules;
this.deployment = options.deployment;
this.apiVersion = options.apiVersion;
@ -286,21 +288,17 @@ export abstract class BaseLLM implements ILLM {
CONTEXT_LENGTH_FOR_MODEL[options.model] || DEFAULT_CONTEXT_LENGTH;
}
return compileChatMessages(
options.model,
messages,
return compileChatMessages({
modelName: options.model,
msgs: messages,
contextLength,
options.maxTokens ?? DEFAULT_MAX_TOKENS,
this.supportsImages(),
undefined,
maxTokens: options.maxTokens ?? DEFAULT_MAX_TOKENS,
supportsImages: this.supportsImages(),
prompt: undefined,
functions,
this.systemMessage,
);
}
private _getSystemMessage(): string | undefined {
// TODO: Merge with config system message
return this.systemMessage;
systemMessage: this.systemMessage,
rules: this.rules ?? [],
});
}
private _templatePromptLikeMessages(prompt: string): string {
@ -310,7 +308,7 @@ export abstract class BaseLLM implements ILLM {
const msgs: ChatMessage[] = [{ role: "user", content: prompt }];
const systemMessage = this._getSystemMessage();
const systemMessage = this.systemMessage;
if (systemMessage) {
msgs.unshift({ role: "system", content: systemMessage });
}

View File

@ -0,0 +1,59 @@
import { isRuleActive } from "./isRuleActive";
describe("isRuleActive", () => {
const rule = {
name: "My Rule",
rule: "Do no harm",
if: '${{ contains(current.model.model, "claude-3-7-sonnet") and glob("*.tsx") }}',
};
it("should return false when activePaths is empty", () => {
expect(
isRuleActive({
rule,
activePaths: [],
currentModel: "claude-3-7-sonnet",
}),
).toBe(false);
});
it("should return false when no matching paths exist", () => {
expect(
isRuleActive({
rule,
activePaths: ["test.py", "main.js"],
currentModel: "claude-3-7-sonnet",
}),
).toBe(false);
});
it("should return true when both conditions are met", () => {
expect(
isRuleActive({
rule,
activePaths: ["Component.tsx", "main.js"],
currentModel: "claude-3-7-sonnet",
}),
).toBe(true);
});
it("should return false when only glob matches but model doesn't", () => {
expect(
isRuleActive({
rule,
activePaths: ["Component.tsx"],
currentModel: "gpt-4",
}),
).toBe(false);
});
it("should return false when model matches but glob doesn't", () => {
expect(
isRuleActive({
rule,
activePaths: ["Component.ts"],
currentModel: "claude-3-7-sonnet",
}),
).toBe(false);
});
});

View File

@ -0,0 +1,68 @@
import { Rule } from "@continuedev/config-yaml";
import { compileExpression } from "filtrex";
import { minimatch } from "minimatch";
const TEMPLATE_VAR_REGEX = /^\$\{\{(\s*.*?\s*)\}\}$/;
const createGlobFunction = (activePaths: string[]) => {
return (pattern: string) => {
const result = activePaths.some((path) => minimatch(path, pattern));
return result;
};
};
const contains = (str: string, searchStr: string): boolean => {
return str.includes(searchStr);
};
const evaluateIf = ({
condition,
activePaths,
currentModel,
}: {
condition: string;
activePaths: string[];
currentModel: string;
}) => {
const expression = condition.match(TEMPLATE_VAR_REGEX)?.[1]?.trim();
if (!expression) {
return true;
}
try {
const evaluate = compileExpression(expression, {
extraFunctions: {
glob: createGlobFunction(activePaths),
contains,
},
constants: {
"current.model.model": currentModel,
},
});
return evaluate({});
} catch (error) {
console.error("Error evaluating rule condition:", error);
return false;
}
};
export const isRuleActive = ({
rule,
activePaths,
currentModel,
}: {
rule: Rule;
activePaths: string[];
currentModel: string;
}): boolean => {
if (typeof rule === "string") {
return true;
}
if (rule.if) {
return evaluateIf({ condition: rule.if, activePaths, currentModel });
}
return true;
};

View File

@ -0,0 +1,22 @@
/**
* Extracts file paths from markdown code blocks
*/
export function extractPathsFromCodeBlocks(content: string): string[] {
// Match:
// 1. Starting ```
// 2. Optional language identifier
// 3. Required whitespace
// 4. File path (captured) that must contain a dot and extension
const codeBlockRegex = /```(?:[\w-+#]+)?\s+([^\s\n()]+\.[a-zA-Z0-9]+)/g;
const paths: string[] = [];
let match;
while ((match = codeBlockRegex.exec(content)) !== null) {
const path = match[1];
if (path && !path.startsWith("```")) {
paths.push(path);
}
}
return paths;
}

14
core/package-lock.json generated
View File

@ -13,7 +13,7 @@
"@aws-sdk/client-sagemaker-runtime": "^3.777.0",
"@aws-sdk/credential-providers": "^3.778.0",
"@continuedev/config-types": "^1.0.13",
"@continuedev/config-yaml": "^1.0.67",
"@continuedev/config-yaml": "^1.0.71",
"@continuedev/fetch": "^1.0.4",
"@continuedev/llm-info": "^1.0.8",
"@continuedev/openai-adapters": "^1.0.18",
@ -32,6 +32,7 @@
"diff": "^7.0.0",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"filtrex": "^3.1.0",
"follow-redirects": "^1.15.5",
"google-auth-library": "^9.14.2",
"handlebars": "^4.7.8",
@ -2976,9 +2977,9 @@
}
},
"node_modules/@continuedev/config-yaml": {
"version": "1.0.67",
"resolved": "https://registry.npmjs.org/@continuedev/config-yaml/-/config-yaml-1.0.67.tgz",
"integrity": "sha512-RbJXSH2Z9XmaN1h9Dm0Qfru7UeAdCAEWMiEgJ9A2FGHysR9oxbQDqptk1Y61oDSpYeX+lWsxkz/SFIc2PjNnKg==",
"version": "1.0.71",
"resolved": "https://registry.npmjs.org/@continuedev/config-yaml/-/config-yaml-1.0.71.tgz",
"integrity": "sha512-c7KVaYfldaYHf2EqeqE9Oo1son33iTp6MAY+WRPJVs9XWmRWOA6Cn7YDf575FpwX6UsPCT/Eb0JJme8AC7UE6g==",
"dependencies": {
"@continuedev/config-types": "^1.0.14",
"yaml": "^2.6.1",
@ -8587,6 +8588,11 @@
"node": ">=8"
}
},
"node_modules/filtrex": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/filtrex/-/filtrex-3.1.0.tgz",
"integrity": "sha512-mHzZ2wUISETF1OaEcNRiGz1ljuIV8c/C9td9qyAZ+wTwigkAk5RO9YrCxQKk5H9v7joDRFIBik9U5RTK9eXZ/A=="
},
"node_modules/find-replace": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz",

View File

@ -47,7 +47,7 @@
"@aws-sdk/client-sagemaker-runtime": "^3.777.0",
"@aws-sdk/credential-providers": "^3.778.0",
"@continuedev/config-types": "^1.0.13",
"@continuedev/config-yaml": "^1.0.67",
"@continuedev/config-yaml": "^1.0.71",
"@continuedev/fetch": "^1.0.4",
"@continuedev/llm-info": "^1.0.8",
"@continuedev/openai-adapters": "^1.0.18",
@ -66,6 +66,7 @@
"diff": "^7.0.0",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"filtrex": "^3.1.0",
"follow-redirects": "^1.15.5",
"google-auth-library": "^9.14.2",
"handlebars": "^4.7.8",

View File

@ -106,7 +106,7 @@
"@aws-sdk/client-sagemaker-runtime": "^3.777.0",
"@aws-sdk/credential-providers": "^3.778.0",
"@continuedev/config-types": "^1.0.13",
"@continuedev/config-yaml": "^1.0.67",
"@continuedev/config-yaml": "^1.0.71",
"@continuedev/fetch": "^1.0.4",
"@continuedev/llm-info": "^1.0.8",
"@continuedev/openai-adapters": "^1.0.18",
@ -125,6 +125,7 @@
"diff": "^7.0.0",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"filtrex": "^3.1.0",
"follow-redirects": "^1.15.5",
"google-auth-library": "^9.14.2",
"handlebars": "^4.7.8",

View File

@ -9,6 +9,7 @@
"strict": true,
"esModuleInterop": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"types": ["mocha"] // This solves conflict between jest and mocha: https://github.com/cypress-io/cypress/issues/7435#issuecomment-631695007
},
"include": ["src/**/*", "../../core/**/*.ts", "../../core/**/*.d.ts"],

11
gui/package-lock.json generated
View File

@ -7,7 +7,7 @@
"name": "gui",
"license": "Apache-2.0",
"dependencies": {
"@continuedev/config-yaml": "^1.0.67",
"@continuedev/config-yaml": "^1.0.71",
"@headlessui/react": "^2.2.0",
"@heroicons/react": "^2.0.18",
"@reduxjs/toolkit": "^2.3.0",
@ -111,7 +111,7 @@
"@aws-sdk/client-sagemaker-runtime": "^3.777.0",
"@aws-sdk/credential-providers": "^3.778.0",
"@continuedev/config-types": "^1.0.13",
"@continuedev/config-yaml": "^1.0.67",
"@continuedev/config-yaml": "^1.0.71",
"@continuedev/fetch": "^1.0.4",
"@continuedev/llm-info": "^1.0.8",
"@continuedev/openai-adapters": "^1.0.18",
@ -130,6 +130,7 @@
"diff": "^7.0.0",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"filtrex": "^3.1.0",
"follow-redirects": "^1.15.5",
"google-auth-library": "^9.14.2",
"handlebars": "^4.7.8",
@ -554,9 +555,9 @@
}
},
"node_modules/@continuedev/config-yaml": {
"version": "1.0.67",
"resolved": "https://registry.npmjs.org/@continuedev/config-yaml/-/config-yaml-1.0.67.tgz",
"integrity": "sha512-RbJXSH2Z9XmaN1h9Dm0Qfru7UeAdCAEWMiEgJ9A2FGHysR9oxbQDqptk1Y61oDSpYeX+lWsxkz/SFIc2PjNnKg==",
"version": "1.0.71",
"resolved": "https://registry.npmjs.org/@continuedev/config-yaml/-/config-yaml-1.0.71.tgz",
"integrity": "sha512-c7KVaYfldaYHf2EqeqE9Oo1son33iTp6MAY+WRPJVs9XWmRWOA6Cn7YDf575FpwX6UsPCT/Eb0JJme8AC7UE6g==",
"dependencies": {
"@continuedev/config-types": "^1.0.14",
"yaml": "^2.6.1",

View File

@ -15,7 +15,7 @@
"test:watch": "vitest"
},
"dependencies": {
"@continuedev/config-yaml": "^1.0.67",
"@continuedev/config-yaml": "^1.0.71",
"@headlessui/react": "^2.2.0",
"@heroicons/react": "^2.0.18",
"@reduxjs/toolkit": "^2.3.0",

View File

@ -1,4 +1,4 @@
import { parseConfigYaml } from "@continuedev/config-yaml";
import { parseConfigYaml, Rule } from "@continuedev/config-yaml";
import { ArrowsPointingOutIcon, PencilIcon } from "@heroicons/react/24/outline";
import { useContext, useMemo } from "react";
import { useSelector } from "react-redux";
@ -17,7 +17,7 @@ import { ExploreBlocksButton } from "./ExploreBlocksButton";
interface RuleCardProps {
index: number;
rule: string;
rule: Rule;
onClick: () => void;
title: string;
}
@ -31,12 +31,32 @@ const RuleCard: React.FC<RuleCardProps> = ({ index, rule, onClick, title }) => {
setDialogMessage(
<div className="p-4 text-center">
<h3>{title}</h3>
<pre className="max-w-full overflow-x-scroll">{rule}</pre>
<pre className="max-w-full overflow-x-scroll">
{/** TODO: Render the rule in a more readable way */}
{JSON.stringify(rule)}
</pre>
</div>,
),
);
}
const renderRuleContent = () => {
if (typeof rule === "string") {
return <div>{rule}</div>;
}
if (rule.rule) {
return (
<div>
<div>{rule.rule}</div>
{rule.if && <div className="text-gray-500">if: {rule.if}</div>}
</div>
);
}
return <div>{JSON.stringify(rule)}</div>;
};
return (
<div
style={{
@ -62,7 +82,7 @@ const RuleCard: React.FC<RuleCardProps> = ({ index, rule, onClick, title }) => {
}}
className="line-clamp-3 text-gray-400"
>
{rule}
{renderRuleContent()}
</div>
</div>
<div className="flex items-center gap-1">
@ -137,27 +157,25 @@ export function RulesSection() {
);
}
if (!rule.ruleFromYaml?.uses) {
return null;
if ("uses" in rule.ruleFromYaml) {
const ruleSlug = rule.ruleFromYaml?.uses;
return (
<RuleCard
key={index}
index={index}
rule={rule.unrolledRule}
onClick={() => openUrl(`${ruleSlug}/new-version`)}
title={ruleSlug}
/>
);
}
const ruleSlug = rule.ruleFromYaml?.uses;
return (
<RuleCard
key={index}
index={index}
rule={rule.unrolledRule}
onClick={() => openUrl(`${ruleSlug}/new-version`)}
title={ruleSlug}
/>
);
// TODO: Handle rules as object with 'if'
return null;
})}
</div>
<ExploreBlocksButton blockType="rules" />
</div>
);
}
function useTypedDispatch() {
throw new Error("Function not implemented.");
}

View File

@ -1,6 +1,6 @@
{
"name": "@continuedev/config-yaml",
"version": "1.0.67",
"version": "1.0.71",
"description": "",
"main": "dist/index.js",
"types": "dist/index.d.ts",

View File

@ -15,3 +15,14 @@ This happens on the server, unless using local mode.
## Client rendering
The unrolled config.yaml is then rendered on the client. This is done by replacing all user secret template variables with their values and replacing all other secrets with secret locations.
## Publishing
Make sure you are logged into the npm registry with `npm login`.
Then, bump the version in `package.json` and then run:
```bash
npm run build
npm publish --access public
```

View File

@ -17,6 +17,7 @@ import {
blockSchema,
ConfigYaml,
configYamlSchema,
Rule,
} from "../schemas/index.js";
import { useProxyForUnrenderedSecrets } from "./clientRender.js";
@ -62,7 +63,7 @@ export function parseBlock(configYaml: string): Block {
}
}
const TEMPLATE_VAR_REGEX = /\${{[\s]*([^}\s]+)[\s]*}}/g;
export const TEMPLATE_VAR_REGEX = /\${{[\s]*([^}\s]+)[\s]*}}/g;
export function getTemplateVariables(templatedYaml: string): string[] {
const variables = new Set<string>();
@ -313,13 +314,13 @@ export async function unrollBlocks(
}
}
// Rules are a bit different because they're just strings, so handle separately
// Rules are a bit different because they can be strings, so hanlde separately
if (assistant.rules) {
const rules: string[] = [];
const rules: Rule[] = [];
for (const rule of assistant.rules) {
if (typeof rule === "string") {
if (typeof rule === "string" || !("uses" in rule)) {
rules.push(rule);
} else {
} else if ("uses" in rule) {
const blockConfigYaml = await resolveBlock(
decodeFullSlug(rule.uses),
rule.with,

View File

@ -28,6 +28,16 @@ const docSchema = z.object({
faviconUrl: z.string().optional(),
});
const ruleObjectSchema = z.object({
name: z.string(),
rule: z.string(),
if: z.string().optional(),
});
const ruleSchema = z.union([z.string(), ruleObjectSchema]);
export type Rule = z.infer<typeof ruleSchema>;
export const blockItemWrapperSchema = <T extends z.AnyZodObject>(schema: T) =>
z.object({
uses: z.string(),
@ -63,7 +73,7 @@ export const configYamlSchema = baseConfigYamlSchema.extend({
rules: z
.array(
z.union([
z.string(),
ruleSchema,
z.object({
uses: z.string(),
with: z.record(z.string()).optional(),
@ -82,7 +92,7 @@ export const assistantUnrolledSchema = baseConfigYamlSchema.extend({
context: z.array(contextSchema).optional(),
data: z.array(dataSchema).optional(),
mcpServers: z.array(mcpServerSchema).optional(),
rules: z.array(z.string()).optional(),
rules: z.array(ruleSchema).optional(),
prompts: z.array(promptSchema).optional(),
docs: z.array(docSchema).optional(),
});
@ -95,7 +105,9 @@ export const blockSchema = baseConfigYamlSchema.and(
z.object({ context: z.array(contextSchema).length(1) }),
z.object({ data: z.array(dataSchema).length(1) }),
z.object({ mcpServers: z.array(mcpServerSchema).length(1) }),
z.object({ rules: z.array(z.string()).length(1) }),
z.object({
rules: z.array(ruleSchema).length(1),
}),
z.object({ prompts: z.array(promptSchema).length(1) }),
z.object({ docs: z.array(docSchema).length(1) }),
]),