🎨 format all files with .prettierrc
This commit is contained in:
parent
d90e7dce92
commit
2eb647656b
|
@ -4,7 +4,7 @@ import { getParserForFile } from "../util/treeSitter";
|
|||
|
||||
export async function getAst(
|
||||
filepath: string,
|
||||
fileContents: string
|
||||
fileContents: string,
|
||||
): Promise<Parser.Tree | undefined> {
|
||||
const parser = await getParserForFile(filepath);
|
||||
|
||||
|
@ -18,7 +18,7 @@ export async function getAst(
|
|||
|
||||
export async function getTreePathAtCursor(
|
||||
ast: Parser.Tree,
|
||||
cursorIndex: number
|
||||
cursorIndex: number,
|
||||
): Promise<Parser.SyntaxNode[] | undefined> {
|
||||
const path = [ast.rootNode];
|
||||
while (path[path.length - 1].childCount > 0) {
|
||||
|
@ -40,7 +40,7 @@ export async function getTreePathAtCursor(
|
|||
}
|
||||
|
||||
export async function getScopeAroundRange(
|
||||
range: RangeInFileWithContents
|
||||
range: RangeInFileWithContents,
|
||||
): Promise<RangeInFileWithContents | undefined> {
|
||||
const ast = await getAst(range.filepath, range.contents);
|
||||
if (!ast) {
|
||||
|
@ -51,10 +51,10 @@ export async function getScopeAroundRange(
|
|||
const lines = range.contents.split("\n");
|
||||
const startIndex =
|
||||
lines.slice(0, s.line).join("\n").length +
|
||||
(lines[s.line]?.slice(s.character).length ?? 0);
|
||||
(lines[s.line]?.slice(s.character).length ?? 0);
|
||||
const endIndex =
|
||||
lines.slice(0, e.line).join("\n").length +
|
||||
(lines[e.line]?.slice(0, e.character).length ?? 0);
|
||||
(lines[e.line]?.slice(0, e.character).length ?? 0);
|
||||
|
||||
let node = ast.rootNode;
|
||||
while (node.childCount > 0) {
|
||||
|
@ -79,11 +79,11 @@ export async function getScopeAroundRange(
|
|||
start: {
|
||||
line: node.startPosition.row,
|
||||
character: node.startPosition.column,
|
||||
},
|
||||
},
|
||||
end: {
|
||||
line: node.endPosition.row,
|
||||
character: node.endPosition.column,
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -32,14 +32,14 @@ export class AutocompleteLruCache {
|
|||
async get(key: string): Promise<string | undefined> {
|
||||
const result = await this.db.get(
|
||||
"SELECT value FROM cache WHERE key = ?",
|
||||
key
|
||||
key,
|
||||
);
|
||||
|
||||
if (result) {
|
||||
await this.db.run(
|
||||
"UPDATE cache SET timestamp = ? WHERE key = ?",
|
||||
Date.now(),
|
||||
key
|
||||
key,
|
||||
);
|
||||
return result.value;
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ export class AutocompleteLruCache {
|
|||
async put(key: string, value: string) {
|
||||
const result = await this.db.get(
|
||||
"SELECT key FROM cache WHERE key = ?",
|
||||
key
|
||||
key,
|
||||
);
|
||||
|
||||
if (result) {
|
||||
|
@ -58,14 +58,14 @@ export class AutocompleteLruCache {
|
|||
"UPDATE cache SET value = ?, timestamp = ? WHERE key = ?",
|
||||
value,
|
||||
Date.now(),
|
||||
key
|
||||
key,
|
||||
);
|
||||
} else {
|
||||
const count = await this.db.get("SELECT COUNT(*) as count FROM cache");
|
||||
|
||||
if (count.count >= AutocompleteLruCache.capacity) {
|
||||
await this.db.run(
|
||||
"DELETE FROM cache WHERE key = (SELECT key FROM cache ORDER BY timestamp ASC LIMIT 1)"
|
||||
"DELETE FROM cache WHERE key = (SELECT key FROM cache ORDER BY timestamp ASC LIMIT 1)",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -73,7 +73,7 @@ export class AutocompleteLruCache {
|
|||
"INSERT INTO cache (key, value, timestamp) VALUES (?, ?, ?)",
|
||||
key,
|
||||
value,
|
||||
Date.now()
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
export async function* onlyWhitespaceAfterEndOfLine(
|
||||
stream: AsyncGenerator<string>,
|
||||
endOfLine: string[]
|
||||
endOfLine: string[],
|
||||
): AsyncGenerator<string> {
|
||||
let pending = "";
|
||||
for await (let chunk of stream) {
|
||||
|
|
|
@ -10,11 +10,16 @@ import { getBasename } from "../util";
|
|||
|
||||
import { getAst, getTreePathAtCursor } from "./ast";
|
||||
import { AutocompleteLanguageInfo, LANGUAGES, Typescript } from "./languages";
|
||||
import { AutocompleteSnippet, fillPromptWithSnippets, rankSnippets, removeRangeFromSnippets } from "./ranking";
|
||||
import {
|
||||
AutocompleteSnippet,
|
||||
fillPromptWithSnippets,
|
||||
rankSnippets,
|
||||
removeRangeFromSnippets,
|
||||
} from "./ranking";
|
||||
import { slidingWindowMatcher } from "./slidingWindow";
|
||||
|
||||
export function languageForFilepath(
|
||||
filepath: string
|
||||
filepath: string,
|
||||
): AutocompleteLanguageInfo {
|
||||
return LANGUAGES[filepath.split(".").slice(-1)[0]] || Typescript;
|
||||
}
|
||||
|
@ -22,12 +27,15 @@ export function languageForFilepath(
|
|||
function formatExternalSnippet(
|
||||
filepath: string,
|
||||
snippet: string,
|
||||
language: AutocompleteLanguageInfo
|
||||
language: AutocompleteLanguageInfo,
|
||||
) {
|
||||
const comment = language.comment;
|
||||
const lines = [
|
||||
comment + " Path: " + getBasename(filepath),
|
||||
...snippet.trim().split("\n").map((line) => comment + " " + line),
|
||||
...snippet
|
||||
.trim()
|
||||
.split("\n")
|
||||
.map((line) => comment + " " + line),
|
||||
comment,
|
||||
];
|
||||
return lines.join("\n");
|
||||
|
@ -37,7 +45,7 @@ const BLOCK_TYPES = ["body", "statement_block"];
|
|||
|
||||
function shouldCompleteMultilineAst(
|
||||
treePath: Parser.SyntaxNode[],
|
||||
cursorLine: number
|
||||
cursorLine: number,
|
||||
): boolean {
|
||||
// If at the base of the file, do multiline
|
||||
if (treePath.length === 1) {
|
||||
|
@ -65,7 +73,7 @@ function shouldCompleteMultilineAst(
|
|||
async function shouldCompleteMultiline(
|
||||
filepath: string,
|
||||
fullPrefix: string,
|
||||
fullSuffix: string
|
||||
fullSuffix: string,
|
||||
): Promise<boolean> {
|
||||
// Use AST to determine whether to complete multiline
|
||||
let treePath: Parser.SyntaxNode[] | undefined;
|
||||
|
@ -99,7 +107,7 @@ export async function constructAutocompletePrompt(
|
|||
recentlyEditedRanges: RangeInFileWithContents[],
|
||||
recentlyEditedDocuments: RangeInFileWithContents[],
|
||||
modelName: string,
|
||||
extraSnippets: AutocompleteSnippet[]
|
||||
extraSnippets: AutocompleteSnippet[],
|
||||
): Promise<{
|
||||
prefix: string;
|
||||
suffix: string;
|
||||
|
@ -111,78 +119,88 @@ export async function constructAutocompletePrompt(
|
|||
|
||||
const windowAroundCursor =
|
||||
fullPrefix.slice(
|
||||
-options.slidingWindowSize * options.slidingWindowPrefixPercentage
|
||||
-options.slidingWindowSize * options.slidingWindowPrefixPercentage,
|
||||
) +
|
||||
fullSuffix.slice(
|
||||
options.slidingWindowSize * (1 - options.slidingWindowPrefixPercentage)
|
||||
options.slidingWindowSize * (1 - options.slidingWindowPrefixPercentage),
|
||||
);
|
||||
|
||||
const slidingWindowMatches = await slidingWindowMatcher(
|
||||
recentlyEditedDocuments,
|
||||
windowAroundCursor,
|
||||
3,
|
||||
options.slidingWindowSize
|
||||
options.slidingWindowSize,
|
||||
);
|
||||
snippets.push(...slidingWindowMatches);
|
||||
|
||||
const recentlyEdited = (await Promise.all(
|
||||
recentlyEditedRanges
|
||||
.map(async (r) => {
|
||||
const recentlyEdited = (
|
||||
await Promise.all(
|
||||
recentlyEditedRanges.map(async (r) => {
|
||||
return r;
|
||||
// return await getScopeAroundRange(r);
|
||||
})
|
||||
)).filter((s) => !!s);
|
||||
}),
|
||||
)
|
||||
).filter((s) => !!s);
|
||||
snippets.push(...(recentlyEdited as any));
|
||||
|
||||
// Rank / order the snippets
|
||||
const scoredSnippets = rankSnippets(snippets, windowAroundCursor)
|
||||
const scoredSnippets = rankSnippets(snippets, windowAroundCursor);
|
||||
|
||||
// Fill maxSnippetTokens with snippets
|
||||
const maxSnippetTokens = options.maxPromptTokens * options.maxSnippetPercentage;
|
||||
|
||||
const maxSnippetTokens =
|
||||
options.maxPromptTokens * options.maxSnippetPercentage;
|
||||
|
||||
// Construct basic prefix
|
||||
const maxPrefixTokens =
|
||||
options.maxPromptTokens * options.prefixPercentage
|
||||
const maxPrefixTokens = options.maxPromptTokens * options.prefixPercentage;
|
||||
let prefix = pruneLinesFromTop(fullPrefix, maxPrefixTokens, modelName);
|
||||
|
||||
|
||||
// Construct suffix
|
||||
const maxSuffixTokens = Math.min(
|
||||
options.maxPromptTokens - countTokens(prefix, modelName),
|
||||
options.maxSuffixPercentage * options.maxPromptTokens
|
||||
options.maxSuffixPercentage * options.maxPromptTokens,
|
||||
);
|
||||
let suffix = pruneLinesFromBottom(fullSuffix, maxSuffixTokens, modelName);
|
||||
|
||||
// Remove prefix range from snippets
|
||||
const prefixLines = prefix.split('\n').length;
|
||||
const suffixLines = suffix.split('\n').length;
|
||||
const prefixLines = prefix.split("\n").length;
|
||||
const suffixLines = suffix.split("\n").length;
|
||||
const buffer = 8;
|
||||
const prefixSuffixRangeWithBuffer = {
|
||||
start: {
|
||||
line: cursorLine - prefixLines - buffer,
|
||||
character: 0
|
||||
character: 0,
|
||||
},
|
||||
end: {
|
||||
line: cursorLine + suffixLines+ buffer,
|
||||
character: 0
|
||||
}
|
||||
}
|
||||
let finalSnippets = removeRangeFromSnippets(scoredSnippets, filepath.split("://").slice(-1)[0], prefixSuffixRangeWithBuffer);
|
||||
line: cursorLine + suffixLines + buffer,
|
||||
character: 0,
|
||||
},
|
||||
};
|
||||
let finalSnippets = removeRangeFromSnippets(
|
||||
scoredSnippets,
|
||||
filepath.split("://").slice(-1)[0],
|
||||
prefixSuffixRangeWithBuffer,
|
||||
);
|
||||
|
||||
// Filter snippets for those with best scores (must be above threshold)
|
||||
finalSnippets = finalSnippets.filter((snippet) => snippet.score >= options.recentlyEditedSimilarityThreshold)
|
||||
finalSnippets = fillPromptWithSnippets(scoredSnippets, maxSnippetTokens, modelName);
|
||||
|
||||
finalSnippets = finalSnippets.filter(
|
||||
(snippet) => snippet.score >= options.recentlyEditedSimilarityThreshold,
|
||||
);
|
||||
finalSnippets = fillPromptWithSnippets(
|
||||
scoredSnippets,
|
||||
maxSnippetTokens,
|
||||
modelName,
|
||||
);
|
||||
|
||||
// Format snippets as comments and prepend to prefix
|
||||
const formattedSnippets = finalSnippets
|
||||
.map((snippet) =>
|
||||
formatExternalSnippet(snippet.filepath, snippet.contents, language)
|
||||
)
|
||||
.join("\n");
|
||||
.map((snippet) =>
|
||||
formatExternalSnippet(snippet.filepath, snippet.contents, language),
|
||||
)
|
||||
.join("\n");
|
||||
if (formattedSnippets.length > 0) {
|
||||
prefix = formattedSnippets + "\n\n" + prefix;
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
prefix,
|
||||
suffix,
|
||||
|
@ -190,7 +208,7 @@ export async function constructAutocompletePrompt(
|
|||
completeMultiline: await shouldCompleteMultiline(
|
||||
filepath,
|
||||
fullPrefix,
|
||||
fullSuffix
|
||||
fullSuffix,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -13,5 +13,5 @@ export const DEFAULT_AUTOCOMPLETE_OPTS: TabAutocompleteOptions = {
|
|||
maxSnippetPercentage: 0.6,
|
||||
recentlyEditedSimilarityThreshold: 0.3,
|
||||
useCache: true,
|
||||
onlyMyCode: true
|
||||
onlyMyCode: true,
|
||||
};
|
||||
|
|
|
@ -43,7 +43,7 @@ export function jaccardSimilarity(a: string, b: string): number {
|
|||
*/
|
||||
export function rankSnippets(
|
||||
ranges: RangeInFileWithContents[],
|
||||
windowAroundCursor: string
|
||||
windowAroundCursor: string,
|
||||
): AutocompleteSnippet[] {
|
||||
const snippets = ranges.map((snippet) => ({
|
||||
score: jaccardSimilarity(snippet.contents, windowAroundCursor),
|
||||
|
@ -57,7 +57,7 @@ export function rankSnippets(
|
|||
* Deduplicate code snippets by merging overlapping ranges into a single range.
|
||||
*/
|
||||
export function deduplicateSnippets(
|
||||
snippets: AutocompleteSnippet[]
|
||||
snippets: AutocompleteSnippet[],
|
||||
): AutocompleteSnippet[] {
|
||||
// Group by file
|
||||
const fileGroups: { [key: string]: AutocompleteSnippet[] } = {};
|
||||
|
@ -77,14 +77,14 @@ export function deduplicateSnippets(
|
|||
}
|
||||
|
||||
function mergeSnippetsByRange(
|
||||
snippets: AutocompleteSnippet[]
|
||||
snippets: AutocompleteSnippet[],
|
||||
): AutocompleteSnippet[] {
|
||||
if (snippets.length === 0) {
|
||||
return snippets;
|
||||
}
|
||||
|
||||
const sorted = snippets.sort(
|
||||
(a, b) => a.range.start.line - b.range.start.line
|
||||
(a, b) => a.range.start.line - b.range.start.line,
|
||||
);
|
||||
const merged: AutocompleteSnippet[] = [];
|
||||
|
||||
|
@ -106,23 +106,26 @@ function mergeSnippetsByRange(
|
|||
|
||||
function mergeOverlappingRangeContents(
|
||||
first: RangeInFileWithContents,
|
||||
second: RangeInFileWithContents
|
||||
second: RangeInFileWithContents,
|
||||
): string {
|
||||
const firstLines = first.contents.split("\n");
|
||||
const numOverlapping = first.range.end.line - second.range.start.line;
|
||||
return firstLines.slice(-numOverlapping).join("\n") + "\n" + second.contents;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Fill the allowed space with snippets
|
||||
*/
|
||||
export function fillPromptWithSnippets(snippets: AutocompleteSnippet[], maxSnippetTokens: number, modelName: string): AutocompleteSnippet[] {
|
||||
export function fillPromptWithSnippets(
|
||||
snippets: AutocompleteSnippet[],
|
||||
maxSnippetTokens: number,
|
||||
modelName: string,
|
||||
): AutocompleteSnippet[] {
|
||||
let tokensRemaining = maxSnippetTokens;
|
||||
const keptSnippets: AutocompleteSnippet[] = []
|
||||
const keptSnippets: AutocompleteSnippet[] = [];
|
||||
for (let i = 0; i < snippets.length; i++) {
|
||||
const snippet = snippets[i];
|
||||
const tokenCount = countTokens(snippet.contents, modelName)
|
||||
const tokenCount = countTokens(snippet.contents, modelName);
|
||||
if (tokensRemaining - tokenCount >= 0) {
|
||||
tokensRemaining -= tokenCount;
|
||||
keptSnippets.push(snippet);
|
||||
|
@ -132,24 +135,24 @@ export function fillPromptWithSnippets(snippets: AutocompleteSnippet[], maxSnipp
|
|||
}
|
||||
|
||||
return keptSnippets;
|
||||
};
|
||||
}
|
||||
|
||||
function rangeIntersectionByLines(a: Range, b: Range): Range | null {
|
||||
const startLine = Math.max(a.start.line, b.start.line);
|
||||
const endLine = Math.min(a.end.line, b.end.line);
|
||||
if (startLine >= endLine) {
|
||||
if (startLine >= endLine) {
|
||||
return null;
|
||||
} else {
|
||||
return {
|
||||
start: {
|
||||
line: startLine,
|
||||
character: 0
|
||||
character: 0,
|
||||
},
|
||||
end: {
|
||||
line: endLine,
|
||||
character: 0
|
||||
}
|
||||
}
|
||||
character: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,37 +160,60 @@ function rangeIntersectionByLines(a: Range, b: Range): Range | null {
|
|||
* Remove one range from another range, which may lead to returning two disjoint ranges
|
||||
*/
|
||||
function rangeDifferenceByLines(orig: Range, remove: Range): Range[] {
|
||||
if (orig.start.line >= remove.start.line && orig.end.line <= remove.end.line) {
|
||||
if (
|
||||
orig.start.line >= remove.start.line &&
|
||||
orig.end.line <= remove.end.line
|
||||
) {
|
||||
// / | | /
|
||||
return [];
|
||||
} else if (orig.start.line <= remove.start.line && orig.end.line >= remove.end.line) {
|
||||
} else if (
|
||||
orig.start.line <= remove.start.line &&
|
||||
orig.end.line >= remove.end.line
|
||||
) {
|
||||
// | / / |
|
||||
// Splits the range
|
||||
return [{
|
||||
start: orig.start,
|
||||
end: remove.start
|
||||
}, {
|
||||
start: remove.end,
|
||||
end: orig.end
|
||||
}]
|
||||
} else if (orig.start.line >= remove.start.line && orig.end.line >= remove.end.line) {
|
||||
return [
|
||||
{
|
||||
start: orig.start,
|
||||
end: remove.start,
|
||||
},
|
||||
{
|
||||
start: remove.end,
|
||||
end: orig.end,
|
||||
},
|
||||
];
|
||||
} else if (
|
||||
orig.start.line >= remove.start.line &&
|
||||
orig.end.line >= remove.end.line
|
||||
) {
|
||||
// \ | / |
|
||||
return [{
|
||||
start: remove.end,
|
||||
end: orig.end
|
||||
}]
|
||||
} else if (orig.start.line <= remove.start.line && orig.end.line <= remove.end.line) {
|
||||
return [
|
||||
{
|
||||
start: remove.end,
|
||||
end: orig.end,
|
||||
},
|
||||
];
|
||||
} else if (
|
||||
orig.start.line <= remove.start.line &&
|
||||
orig.end.line <= remove.end.line
|
||||
) {
|
||||
// | / | /
|
||||
return [{
|
||||
start: orig.start,
|
||||
end: remove.start
|
||||
}]
|
||||
return [
|
||||
{
|
||||
start: orig.start,
|
||||
end: remove.start,
|
||||
},
|
||||
];
|
||||
} else {
|
||||
return [orig]
|
||||
return [orig];
|
||||
}
|
||||
}
|
||||
|
||||
export function removeRangeFromSnippets(snippets: AutocompleteSnippet[], filepath: string, range: Range): AutocompleteSnippet[] {
|
||||
export function removeRangeFromSnippets(
|
||||
snippets: AutocompleteSnippet[],
|
||||
filepath: string,
|
||||
range: Range,
|
||||
): AutocompleteSnippet[] {
|
||||
const finalSnippets: AutocompleteSnippet[] = [];
|
||||
for (let snippet of snippets) {
|
||||
if (snippet.filepath !== filepath) {
|
||||
|
@ -199,9 +225,14 @@ export function removeRangeFromSnippets(snippets: AutocompleteSnippet[], filepat
|
|||
if (!intersection) {
|
||||
finalSnippets.push(snippet);
|
||||
} else {
|
||||
finalSnippets.push(...rangeDifferenceByLines(snippet.range, intersection).map(range => ({...snippet, range})));
|
||||
finalSnippets.push(
|
||||
...rangeDifferenceByLines(snippet.range, intersection).map((range) => ({
|
||||
...snippet,
|
||||
range,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return finalSnippets;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ import { IndexTag } from "../indexing/types";
|
|||
export async function fullTextRetrieve(
|
||||
prefix: string,
|
||||
suffix: string,
|
||||
indexTag: IndexTag
|
||||
indexTag: IndexTag,
|
||||
): Promise<Chunk[]> {
|
||||
const index = new FullTextSearchCodebaseIndex();
|
||||
const searchStrings = prefix.split("\n").slice(-3);
|
||||
|
@ -16,7 +16,7 @@ export async function fullTextRetrieve(
|
|||
searchString,
|
||||
3,
|
||||
undefined,
|
||||
undefined
|
||||
undefined,
|
||||
);
|
||||
results.push(...chunks);
|
||||
});
|
||||
|
|
|
@ -3,7 +3,7 @@ import { AutocompleteSnippet, jaccardSimilarity } from "./ranking";
|
|||
|
||||
function* slidingWindow(
|
||||
content: string,
|
||||
windowSize: number
|
||||
windowSize: number,
|
||||
): Generator<string> {
|
||||
const lines = content.split("\n");
|
||||
|
||||
|
@ -35,7 +35,7 @@ export async function slidingWindowMatcher(
|
|||
recentDocuments: RangeInFileWithContents[],
|
||||
windowAroundCursor: string,
|
||||
topN: number,
|
||||
windowSize: number
|
||||
windowSize: number,
|
||||
): Promise<AutocompleteSnippet[]> {
|
||||
// Sorted lowest similarity to highest
|
||||
const topMatches: AutocompleteSnippet[] = [];
|
||||
|
|
|
@ -71,13 +71,13 @@ export class GeneratorReuseManager {
|
|||
|
||||
private static _createListenableGenerator(
|
||||
gen: AsyncGenerator<string>,
|
||||
prefix: string
|
||||
prefix: string,
|
||||
) {
|
||||
GeneratorReuseManager.currentGenerator?.cancel();
|
||||
|
||||
const listenableGen = new ListenableGenerator(gen);
|
||||
listenableGen.listen(
|
||||
(chunk) => (GeneratorReuseManager.pendingCompletion += chunk ?? "")
|
||||
(chunk) => (GeneratorReuseManager.pendingCompletion += chunk ?? ""),
|
||||
);
|
||||
|
||||
GeneratorReuseManager.pendingGeneratorPrefix = prefix;
|
||||
|
@ -87,7 +87,7 @@ export class GeneratorReuseManager {
|
|||
|
||||
static async *getGenerator(
|
||||
prefix: string,
|
||||
newGenerator: () => AsyncGenerator<string>
|
||||
newGenerator: () => AsyncGenerator<string>,
|
||||
): AsyncGenerator<string> {
|
||||
// Check if current can be reused
|
||||
if (
|
||||
|
|
|
@ -3,7 +3,7 @@ import { stripImages } from "../llm/countTokens";
|
|||
import SlashCommands from "./slash";
|
||||
|
||||
export function slashFromCustomCommand(
|
||||
customCommand: CustomCommand
|
||||
customCommand: CustomCommand,
|
||||
): SlashCommand {
|
||||
return {
|
||||
name: customCommand.name,
|
||||
|
@ -30,7 +30,7 @@ export function slashFromCustomCommand(
|
|||
}
|
||||
|
||||
export function slashCommandFromDescription(
|
||||
desc: SlashCommandDescription
|
||||
desc: SlashCommandDescription,
|
||||
): SlashCommand | undefined {
|
||||
const cmd = SlashCommands.find((cmd) => cmd.name === desc.name);
|
||||
if (!cmd) {
|
||||
|
|
|
@ -4,7 +4,7 @@ import { removeQuotesAndEscapes } from "../../util";
|
|||
|
||||
const PROMPT = (
|
||||
input: string,
|
||||
title: string
|
||||
title: string,
|
||||
) => `You will be asked to generate the body of a GitHub issue given a user request. You should follow these rules:
|
||||
- Be descriptive but do not make up details
|
||||
- If the the user request includes any code snippets that are relevant, reference them in code blocks
|
||||
|
@ -31,7 +31,7 @@ const DraftIssueCommand: SlashCommand = {
|
|||
}
|
||||
let title = await llm.complete(
|
||||
`Generate a title for the GitHub issue requested in this user input: '${input}'. Use no more than 20 words and output nothing other than the title. Do not surround it with quotes. The title is: `,
|
||||
{ maxTokens: 20 }
|
||||
{ maxTokens: 20 },
|
||||
);
|
||||
|
||||
title = removeQuotesAndEscapes(title.trim()) + "\n\n";
|
||||
|
@ -49,7 +49,7 @@ const DraftIssueCommand: SlashCommand = {
|
|||
}
|
||||
|
||||
const url = `${params.repositoryUrl}/issues/new?title=${encodeURIComponent(
|
||||
title
|
||||
title,
|
||||
)}&body=${encodeURIComponent(body)}`;
|
||||
yield `\n\n[Link to draft of issue](${url})`;
|
||||
},
|
||||
|
|
|
@ -54,7 +54,7 @@ export async function getPromptParts(
|
|||
fullFileContents: string,
|
||||
model: ILLM,
|
||||
input: string,
|
||||
tokenLimit: number | undefined
|
||||
tokenLimit: number | undefined,
|
||||
) {
|
||||
let maxTokens = Math.floor(model.contextLength / 2);
|
||||
|
||||
|
@ -119,7 +119,7 @@ export async function getPromptParts(
|
|||
fileSuffix = lastLine + fileSuffix;
|
||||
rif.contents = rif.contents.substring(
|
||||
0,
|
||||
rif.contents.length - lastLine.length
|
||||
rif.contents.length - lastLine.length,
|
||||
);
|
||||
lines = rif.contents.split(/\r?\n/);
|
||||
lastLine = lines[lines.length - 1] || null;
|
||||
|
@ -141,7 +141,7 @@ function compilePrompt(
|
|||
filePrefix: string,
|
||||
contents: string,
|
||||
fileSuffix: string,
|
||||
input: string
|
||||
input: string,
|
||||
): string {
|
||||
if (contents.trim() == "") {
|
||||
// Separate prompt for insertion at the cursor, the other tends to cause it to repeat whole file
|
||||
|
@ -216,11 +216,11 @@ const EditSlashCommand: SlashCommand = {
|
|||
run: async function* ({ ide, llm, input, history, contextItems, params }) {
|
||||
let contextItemToEdit = contextItems.find(
|
||||
(item: ContextItemWithId) =>
|
||||
item.editing && item.id.providerTitle === "code"
|
||||
item.editing && item.id.providerTitle === "code",
|
||||
);
|
||||
if (!contextItemToEdit) {
|
||||
contextItemToEdit = contextItems.find(
|
||||
(item: ContextItemWithId) => item.id.providerTitle === "code"
|
||||
(item: ContextItemWithId) => item.id.providerTitle === "code",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -236,11 +236,11 @@ const EditSlashCommand: SlashCommand = {
|
|||
if (part.text && part.text.startsWith("/edit")) {
|
||||
part.text = part.text.replace("/edit", "").trimStart();
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
let userInput = stripImages(content).replace(
|
||||
`\`\`\`${contextItemToEdit.name}\n${contextItemToEdit.content}\n\`\`\`\n`,
|
||||
""
|
||||
"",
|
||||
);
|
||||
|
||||
const rif: RangeInFileWithContents =
|
||||
|
@ -254,7 +254,7 @@ const EditSlashCommand: SlashCommand = {
|
|||
fullFileContents,
|
||||
llm,
|
||||
userInput,
|
||||
params?.tokenLimit
|
||||
params?.tokenLimit,
|
||||
);
|
||||
const [dedentedContents, commonWhitespace] =
|
||||
dedentAndGetCommonWhitespace(contents);
|
||||
|
@ -264,7 +264,7 @@ const EditSlashCommand: SlashCommand = {
|
|||
let fullFileContentsLines = fullFileContents.split("\n");
|
||||
let fullPrefixLines = fullFileContentsLines.slice(
|
||||
0,
|
||||
Math.max(0, rif.range.start.line - 1)
|
||||
Math.max(0, rif.range.start.line - 1),
|
||||
);
|
||||
let fullSuffixLines = fullFileContentsLines.slice(rif.range.end.line);
|
||||
|
||||
|
@ -426,7 +426,7 @@ const EditSlashCommand: SlashCommand = {
|
|||
|
||||
// Make sure they are sorted by index
|
||||
indicesOfLastMatchedLines = indicesOfLastMatchedLines.sort(
|
||||
(a, b) => a[0] - b[0]
|
||||
(a, b) => a[0] - b[0],
|
||||
);
|
||||
|
||||
currentBlockLines.push(line);
|
||||
|
@ -457,7 +457,7 @@ const EditSlashCommand: SlashCommand = {
|
|||
fileSuffix: fileSuffix,
|
||||
systemMessage: llm.systemMessage || "",
|
||||
// "contextItems": (await sdk.getContextItemChatMessages()).map(x => x.content || "").join("\n\n"),
|
||||
}
|
||||
},
|
||||
);
|
||||
if (typeof rendered === "string") {
|
||||
messages = [
|
||||
|
@ -481,7 +481,7 @@ const EditSlashCommand: SlashCommand = {
|
|||
lineStream = filterEnglishLinesAtEnd(filterCodeBlockLines(lineStream));
|
||||
|
||||
generator = streamWithNewLines(
|
||||
fixCodeLlamaFirstLineIndentation(lineStream)
|
||||
fixCodeLlamaFirstLineIndentation(lineStream),
|
||||
);
|
||||
} else {
|
||||
async function* gen() {
|
||||
|
@ -490,7 +490,7 @@ const EditSlashCommand: SlashCommand = {
|
|||
maxTokens: Math.min(
|
||||
maxTokens,
|
||||
Math.floor(llm.contextLength / 2),
|
||||
4096
|
||||
4096,
|
||||
),
|
||||
})) {
|
||||
yield stripImages(chunk.content);
|
||||
|
@ -567,7 +567,7 @@ const EditSlashCommand: SlashCommand = {
|
|||
unfinishedLine?.startsWith("<")
|
||||
? commonWhitespace
|
||||
: commonWhitespace + unfinishedLine,
|
||||
])
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import { pruneStringFromBottom, stripImages } from "../../llm/countTokens";
|
|||
|
||||
const SERVER_URL = "https://proxy-server-l6vsfbzhba-uw.a.run.app";
|
||||
const PROMPT = (
|
||||
input: string
|
||||
input: string,
|
||||
) => `The above sources are excerpts from related StackOverflow questions. Use them to help answer the below question from our user. Provide links to the sources in markdown whenever possible:
|
||||
|
||||
${input}
|
||||
|
@ -82,7 +82,7 @@ const StackOverflowSlashCommand: SlashCommand = {
|
|||
sources[sources.length - 1] = pruneStringFromBottom(
|
||||
llm.model,
|
||||
contextLength - (totalTokens - newTokens),
|
||||
sources[sources.length - 1]
|
||||
sources[sources.length - 1],
|
||||
);
|
||||
shouldBreak = true;
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ export interface RangeInFileWithContents {
|
|||
}
|
||||
|
||||
export function contextItemToRangeInFileWithContents(
|
||||
item: ContextItemWithId
|
||||
item: ContextItemWithId,
|
||||
): RangeInFileWithContents {
|
||||
const lines = item.name.split("(")[1].split(")")[0].split("-");
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ export const defaultConfig: SerializedContinueConfig = {
|
|||
title: "Starcoder 3b",
|
||||
provider: "ollama",
|
||||
model: "starcoder-3b",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
export const defaultConfigJetBrains: SerializedContinueConfig = {
|
||||
|
|
|
@ -23,7 +23,7 @@ export class ConfigHandler {
|
|||
ide: IDE,
|
||||
remoteConfigServerUrl: URL | undefined,
|
||||
writeLog: (text: string) => void,
|
||||
onConfigUpdate: () => void
|
||||
onConfigUpdate: () => void,
|
||||
) {
|
||||
this.ide = ide;
|
||||
this.remoteConfigServerUrl = remoteConfigServerUrl;
|
||||
|
@ -40,7 +40,7 @@ export class ConfigHandler {
|
|||
this.savedConfig = undefined;
|
||||
this.savedBrowserConfig = undefined;
|
||||
this.loadConfig();
|
||||
this.onConfigUpdate()
|
||||
this.onConfigUpdate();
|
||||
}
|
||||
|
||||
async getSerializedConfig(): Promise<BrowserSerializedContinueConfig> {
|
||||
|
@ -69,7 +69,7 @@ export class ConfigHandler {
|
|||
this.ide.readFile,
|
||||
workspaceConfigs,
|
||||
this.remoteConfigServerUrl,
|
||||
ideInfo.ideType
|
||||
ideInfo.ideType,
|
||||
);
|
||||
this.savedConfig.allowAnonymousTelemetry =
|
||||
this.savedConfig.allowAnonymousTelemetry &&
|
||||
|
@ -78,7 +78,7 @@ export class ConfigHandler {
|
|||
// Setup telemetry only after (and if) we know it is enabled
|
||||
await Telemetry.setup(
|
||||
this.savedConfig.allowAnonymousTelemetry ?? true,
|
||||
await this.ide.getUniqueId()
|
||||
await this.ide.getUniqueId(),
|
||||
);
|
||||
|
||||
return this.savedConfig;
|
||||
|
@ -98,7 +98,7 @@ export class ConfigHandler {
|
|||
: llm.requestOptions?.caBundlePath;
|
||||
if (customCerts) {
|
||||
ca.push(
|
||||
...customCerts.map((customCert) => fs.readFileSync(customCert, "utf8"))
|
||||
...customCerts.map((customCert) => fs.readFileSync(customCert, "utf8")),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -174,7 +174,7 @@ export class ConfigHandler {
|
|||
}
|
||||
}
|
||||
throw new Error(
|
||||
`HTTP ${resp.status} ${resp.statusText} from ${resp.url}\n\n${text}`
|
||||
`HTTP ${resp.status} ${resp.statusText} from ${resp.url}\n\n${text}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ function resolveSerializedConfig(filepath: string): SerializedContinueConfig {
|
|||
config.env.forEach((envVar) => {
|
||||
content = content.replaceAll(
|
||||
new RegExp(`"${envVar}"`, "g"),
|
||||
`"${env[envVar]}"`
|
||||
`"${env[envVar]}"`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -60,16 +60,16 @@ function resolveSerializedConfig(filepath: string): SerializedContinueConfig {
|
|||
}
|
||||
|
||||
const configMergeKeys = {
|
||||
"models": (a: any, b: any) => a.title === b.title,
|
||||
"contextProviders": (a: any, b: any) => a.name === b.name,
|
||||
"slashCommands": (a: any, b: any) => a.name === b.name,
|
||||
"customCommands": (a: any, b: any) => a.name === b.name,
|
||||
}
|
||||
models: (a: any, b: any) => a.title === b.title,
|
||||
contextProviders: (a: any, b: any) => a.name === b.name,
|
||||
slashCommands: (a: any, b: any) => a.name === b.name,
|
||||
customCommands: (a: any, b: any) => a.name === b.name,
|
||||
};
|
||||
|
||||
function loadSerializedConfig(
|
||||
workspaceConfigs: ContinueRcJson[],
|
||||
remoteConfigServerUrl: URL | undefined,
|
||||
ideType: IdeType
|
||||
ideType: IdeType,
|
||||
): SerializedContinueConfig {
|
||||
const configPath = getConfigJsonPath(ideType);
|
||||
let config = resolveSerializedConfig(configPath);
|
||||
|
@ -133,20 +133,25 @@ function loadSerializedConfig(
|
|||
|
||||
if (remoteConfigServerUrl) {
|
||||
const remoteConfigJson = resolveSerializedConfig(
|
||||
getConfigJsonPathForRemote(remoteConfigServerUrl)
|
||||
getConfigJsonPathForRemote(remoteConfigServerUrl),
|
||||
);
|
||||
config = mergeJson(config, remoteConfigJson, "merge", configMergeKeys);
|
||||
}
|
||||
|
||||
for (const workspaceConfig of workspaceConfigs) {
|
||||
config = mergeJson(config, workspaceConfig, workspaceConfig.mergeBehavior, configMergeKeys);
|
||||
config = mergeJson(
|
||||
config,
|
||||
workspaceConfig,
|
||||
workspaceConfig.mergeBehavior,
|
||||
configMergeKeys,
|
||||
);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
function serializedToIntermediateConfig(
|
||||
initial: SerializedContinueConfig
|
||||
initial: SerializedContinueConfig,
|
||||
): Config {
|
||||
const slashCommands: SlashCommand[] = [];
|
||||
for (const command of initial.slashCommands || []) {
|
||||
|
@ -169,13 +174,13 @@ function serializedToIntermediateConfig(
|
|||
}
|
||||
|
||||
function isModelDescription(
|
||||
llm: ModelDescription | CustomLLM
|
||||
llm: ModelDescription | CustomLLM,
|
||||
): llm is ModelDescription {
|
||||
return (llm as ModelDescription).title !== undefined;
|
||||
}
|
||||
|
||||
function isContextProviderWithParams(
|
||||
contextProvider: CustomContextProvider | ContextProviderWithParams
|
||||
contextProvider: CustomContextProvider | ContextProviderWithParams,
|
||||
): contextProvider is ContextProviderWithParams {
|
||||
return (contextProvider as ContextProviderWithParams).name !== undefined;
|
||||
}
|
||||
|
@ -183,7 +188,7 @@ function isContextProviderWithParams(
|
|||
/** Only difference between intermediate and final configs is the `models` array */
|
||||
async function intermediateToFinalConfig(
|
||||
config: Config,
|
||||
readFile: (filepath: string) => Promise<string>
|
||||
readFile: (filepath: string) => Promise<string>,
|
||||
): Promise<ContinueConfig> {
|
||||
const models: BaseLLM[] = [];
|
||||
for (const desc of config.models) {
|
||||
|
@ -192,7 +197,7 @@ async function intermediateToFinalConfig(
|
|||
desc,
|
||||
readFile,
|
||||
config.completionOptions,
|
||||
config.systemMessage
|
||||
config.systemMessage,
|
||||
);
|
||||
if (!llm) continue;
|
||||
|
||||
|
@ -209,14 +214,14 @@ async function intermediateToFinalConfig(
|
|||
},
|
||||
readFile,
|
||||
config.completionOptions,
|
||||
config.systemMessage
|
||||
config.systemMessage,
|
||||
);
|
||||
})
|
||||
}),
|
||||
);
|
||||
models.push(
|
||||
...(detectedModels.filter(
|
||||
(x) => typeof x !== "undefined"
|
||||
) as BaseLLM[])
|
||||
(x) => typeof x !== "undefined",
|
||||
) as BaseLLM[]),
|
||||
);
|
||||
} catch (e) {
|
||||
console.warn("Error listing models: ", e);
|
||||
|
@ -234,7 +239,7 @@ async function intermediateToFinalConfig(
|
|||
new CustomLLMClass({
|
||||
...desc,
|
||||
options: { ...desc.options, model: modelName },
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
models.push(...models);
|
||||
|
@ -254,7 +259,7 @@ async function intermediateToFinalConfig(
|
|||
config.tabAutocompleteModel,
|
||||
readFile,
|
||||
config.completionOptions,
|
||||
config.systemMessage
|
||||
config.systemMessage,
|
||||
);
|
||||
} else {
|
||||
autocompleteLlm = new CustomLLMClass(config.tabAutocompleteModel);
|
||||
|
@ -298,7 +303,7 @@ async function intermediateToFinalConfig(
|
|||
}
|
||||
|
||||
function finalToBrowserConfig(
|
||||
final: ContinueConfig
|
||||
final: ContinueConfig,
|
||||
): BrowserSerializedContinueConfig {
|
||||
return {
|
||||
allowAnonymousTelemetry: final.allowAnonymousTelemetry,
|
||||
|
@ -375,10 +380,10 @@ async function buildConfigTs() {
|
|||
`/esbuild${
|
||||
getTarget().startsWith("win32") ? ".exe" : ""
|
||||
} ${escapeSpacesInPath(
|
||||
getConfigTsPath()
|
||||
getConfigTsPath(),
|
||||
)} --bundle --outfile=${escapeSpacesInPath(
|
||||
getConfigJsPath()
|
||||
)} --platform=node --format=cjs --sourcemap --external:fetch --external:fs --external:path --external:os --external:child_process`
|
||||
getConfigJsPath(),
|
||||
)} --platform=node --format=cjs --sourcemap --external:fetch --external:fs --external:path --external:os --external:child_process`,
|
||||
);
|
||||
} else {
|
||||
// Dynamic import esbuild so potentially disastrous errors can be caught
|
||||
|
@ -396,7 +401,7 @@ async function buildConfigTs() {
|
|||
}
|
||||
} catch (e) {
|
||||
console.log(
|
||||
"Build error. Please check your ~/.continue/config.ts file: " + e
|
||||
"Build error. Please check your ~/.continue/config.ts file: " + e,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
@ -411,12 +416,12 @@ async function loadFullConfigNode(
|
|||
readFile: (filepath: string) => Promise<string>,
|
||||
workspaceConfigs: ContinueRcJson[],
|
||||
remoteConfigServerUrl: URL | undefined,
|
||||
ideType: IdeType
|
||||
ideType: IdeType,
|
||||
): Promise<ContinueConfig> {
|
||||
let serialized = loadSerializedConfig(
|
||||
workspaceConfigs,
|
||||
remoteConfigServerUrl,
|
||||
ideType
|
||||
ideType,
|
||||
);
|
||||
let intermediate = serializedToIntermediateConfig(serialized);
|
||||
|
||||
|
@ -440,7 +445,7 @@ async function loadFullConfigNode(
|
|||
if (remoteConfigServerUrl) {
|
||||
try {
|
||||
const configJsPathForRemote = getConfigJsPathForRemote(
|
||||
remoteConfigServerUrl
|
||||
remoteConfigServerUrl,
|
||||
);
|
||||
const module = await require(configJsPathForRemote);
|
||||
delete require.cache[require.resolve(configJsPathForRemote)];
|
||||
|
@ -462,5 +467,5 @@ export {
|
|||
intermediateToFinalConfig,
|
||||
loadFullConfigNode,
|
||||
serializedToIntermediateConfig,
|
||||
type BrowserSerializedContinueConfig
|
||||
type BrowserSerializedContinueConfig,
|
||||
};
|
||||
|
|
|
@ -11,7 +11,7 @@ export function addModel(model: ModelDescription) {
|
|||
(key, value) => {
|
||||
return value === null ? undefined : value;
|
||||
},
|
||||
2
|
||||
2,
|
||||
);
|
||||
writeFileSync(getConfigJsonPath(), newConfigString);
|
||||
return newConfigString;
|
||||
|
|
|
@ -22,11 +22,11 @@ export abstract class BaseContextProvider implements IContextProvider {
|
|||
// Maybe just include the chat message in here. Should never have to go back to the context provider once you have the information.
|
||||
abstract getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]>;
|
||||
|
||||
async loadSubmenuItems(
|
||||
args: LoadSubmenuItemsArgs
|
||||
args: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
return [];
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ class CodeHighlightsContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const ide = extras.ide;
|
||||
const openFiles = await ide.getOpenFiles();
|
||||
|
@ -32,7 +32,7 @@ class CodeHighlightsContextProvider extends BaseContextProvider {
|
|||
absPath: filepath,
|
||||
content: `${await ide.readFile(filepath)}`,
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
// const contextSizer = {
|
||||
// fits(content: string): boolean {
|
||||
|
@ -58,7 +58,7 @@ class CodeHighlightsContextProvider extends BaseContextProvider {
|
|||
// description: "Code highlights from open files",
|
||||
// },
|
||||
// ];
|
||||
return []
|
||||
return [];
|
||||
}
|
||||
|
||||
async load(): Promise<void> {}
|
||||
|
|
|
@ -18,7 +18,7 @@ class CodeOutlineContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const ide = extras.ide;
|
||||
const openFiles = await ide.getOpenFiles();
|
||||
|
@ -30,7 +30,7 @@ class CodeOutlineContextProvider extends BaseContextProvider {
|
|||
absPath: filepath,
|
||||
content: `${await ide.readFile(filepath)}`,
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
// const outlines = await getOutlines(
|
||||
// allFiles
|
||||
|
@ -49,7 +49,7 @@ class CodeOutlineContextProvider extends BaseContextProvider {
|
|||
// description: "Definition lines only (from open files)",
|
||||
// },
|
||||
// ];
|
||||
return []
|
||||
return [];
|
||||
}
|
||||
|
||||
async load(): Promise<void> {}
|
||||
|
|
|
@ -17,7 +17,7 @@ class CodebaseContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
return retrieveContextItemsFromEmbeddings(extras, this.options, undefined);
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ class CustomContextProviderClass implements IContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
return await this.custom.getContextItems(query, extras);
|
||||
}
|
||||
|
|
|
@ -3,8 +3,8 @@ import {
|
|||
ContextItem,
|
||||
ContextProviderDescription,
|
||||
ContextProviderExtras,
|
||||
LoadSubmenuItemsArgs,
|
||||
ContextSubmenuItem,
|
||||
LoadSubmenuItemsArgs,
|
||||
} from "../..";
|
||||
|
||||
class DatabaseContextProvider extends BaseContextProvider {
|
||||
|
@ -17,52 +17,58 @@ class DatabaseContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const contextItems: ContextItem[] = [];
|
||||
|
||||
const connections = this.options?.connections;
|
||||
|
||||
if (connections === null) { return contextItems; }
|
||||
if (connections === null) {
|
||||
return contextItems;
|
||||
}
|
||||
|
||||
let [connectionName, table] = query.split('.');
|
||||
let [connectionName, table] = query.split(".");
|
||||
|
||||
const getDatabaseAdapter = await require("dbinfoz");
|
||||
|
||||
for (const connection of connections) {
|
||||
if(connection.name == connectionName) {
|
||||
const adapter = getDatabaseAdapter(connection.connection_type, connection.connection);
|
||||
const tablesAndSchemas = await adapter.getAllTablesAndSchemas(connection.connection.database);
|
||||
if (connection.name == connectionName) {
|
||||
const adapter = getDatabaseAdapter(
|
||||
connection.connection_type,
|
||||
connection.connection,
|
||||
);
|
||||
const tablesAndSchemas = await adapter.getAllTablesAndSchemas(
|
||||
connection.connection.database,
|
||||
);
|
||||
|
||||
if(table === 'all') {
|
||||
if (table === "all") {
|
||||
let prompt = `Schema for all tables on ${connection.connection_type} is `;
|
||||
prompt += JSON.stringify(tablesAndSchemas);
|
||||
|
||||
let contextItem = {
|
||||
name: `${connectionName}-all-tables-schemas`,
|
||||
description: `Schema for all tables.`,
|
||||
content: prompt
|
||||
}
|
||||
content: prompt,
|
||||
};
|
||||
|
||||
contextItems.push(contextItem);
|
||||
} else {
|
||||
const tables = Object.keys(tablesAndSchemas);
|
||||
|
||||
tables.forEach(tableName => {
|
||||
if(table === tableName) {
|
||||
tables.forEach((tableName) => {
|
||||
if (table === tableName) {
|
||||
let prompt = `Schema for ${tableName} on ${connection.connection_type} is `;
|
||||
prompt += JSON.stringify(tablesAndSchemas[tableName]);
|
||||
|
||||
let contextItem = {
|
||||
name: `${connectionName}-${tableName}-schema`,
|
||||
description: `${tableName} Schema`,
|
||||
content: prompt
|
||||
}
|
||||
|
||||
content: prompt,
|
||||
};
|
||||
|
||||
contextItems.push(contextItem);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -71,35 +77,42 @@ class DatabaseContextProvider extends BaseContextProvider {
|
|||
}
|
||||
|
||||
async loadSubmenuItems(
|
||||
args: LoadSubmenuItemsArgs
|
||||
args: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
const contextItems: ContextSubmenuItem[] = [];
|
||||
const connections = this.options?.connections;
|
||||
|
||||
if (connections === null) { return contextItems; }
|
||||
if (connections === null) {
|
||||
return contextItems;
|
||||
}
|
||||
|
||||
const getDatabaseAdapter = await require("dbinfoz");
|
||||
|
||||
for (const connection of connections) {
|
||||
let adapter = getDatabaseAdapter(connection.connection_type, connection.connection);
|
||||
const tablesAndSchemas = await adapter.getAllTablesAndSchemas(connection.connection.database);
|
||||
let adapter = getDatabaseAdapter(
|
||||
connection.connection_type,
|
||||
connection.connection,
|
||||
);
|
||||
const tablesAndSchemas = await adapter.getAllTablesAndSchemas(
|
||||
connection.connection.database,
|
||||
);
|
||||
const tables = Object.keys(tablesAndSchemas);
|
||||
|
||||
let contextItem = {
|
||||
id: `${connection.name}.all`,
|
||||
title: `${connection.name} all table schemas`,
|
||||
description: ``
|
||||
}
|
||||
|
||||
id: `${connection.name}.all`,
|
||||
title: `${connection.name} all table schemas`,
|
||||
description: ``,
|
||||
};
|
||||
|
||||
contextItems.push(contextItem);
|
||||
|
||||
tables.forEach(tableName => {
|
||||
tables.forEach((tableName) => {
|
||||
let contextItem = {
|
||||
id: `${connection.name}.${tableName}`,
|
||||
title: `${connection.name}.${tableName} schema`,
|
||||
description: ``
|
||||
}
|
||||
|
||||
description: ``,
|
||||
};
|
||||
|
||||
contextItems.push(contextItem);
|
||||
});
|
||||
}
|
||||
|
@ -109,4 +122,3 @@ class DatabaseContextProvider extends BaseContextProvider {
|
|||
}
|
||||
|
||||
export default DatabaseContextProvider;
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ class DiffContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const diff = await extras.ide.getDiff();
|
||||
return [
|
||||
|
|
|
@ -18,7 +18,7 @@ class DocsContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const { retrieveDocs } = await import("../../indexing/docs/db");
|
||||
|
||||
|
@ -28,7 +28,7 @@ class DocsContextProvider extends BaseContextProvider {
|
|||
const chunks = await retrieveDocs(
|
||||
query,
|
||||
vector,
|
||||
this.options?.nRetrieve || 15
|
||||
this.options?.nRetrieve || 15,
|
||||
);
|
||||
|
||||
console.log(chunks);
|
||||
|
@ -59,7 +59,7 @@ class DocsContextProvider extends BaseContextProvider {
|
|||
}
|
||||
|
||||
async loadSubmenuItems(
|
||||
args: LoadSubmenuItemsArgs
|
||||
args: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
const { listDocs } = await import("../../indexing/docs/db");
|
||||
const docs = await listDocs();
|
||||
|
|
|
@ -18,7 +18,7 @@ class FileContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
// Assume the query is a filepath
|
||||
query = query.trim();
|
||||
|
@ -33,13 +33,13 @@ class FileContextProvider extends BaseContextProvider {
|
|||
}
|
||||
|
||||
async loadSubmenuItems(
|
||||
args: LoadSubmenuItemsArgs
|
||||
args: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
const workspaceDirs = await args.ide.getWorkspaceDirs();
|
||||
const results = await Promise.all(
|
||||
workspaceDirs.map((dir) => {
|
||||
return args.ide.listWorkspaceContents(dir);
|
||||
})
|
||||
}),
|
||||
);
|
||||
const files = results.flat();
|
||||
return files.map((file) => {
|
||||
|
|
|
@ -44,7 +44,7 @@ class FileTreeContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const workspaceDirs = await extras.ide.getWorkspaceDirs();
|
||||
let trees = [];
|
||||
|
|
|
@ -18,13 +18,13 @@ class FolderContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const { retrieveContextItemsFromEmbeddings } = await import("../retrieval");
|
||||
return retrieveContextItemsFromEmbeddings(extras, this.options, query);
|
||||
}
|
||||
async loadSubmenuItems(
|
||||
args: LoadSubmenuItemsArgs
|
||||
args: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
const folders = await args.ide.listFolders();
|
||||
return folders.map((folder) => {
|
||||
|
|
|
@ -17,7 +17,7 @@ class GitHubIssuesContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const issueId = query;
|
||||
const { Octokit } = await import("@octokit/rest");
|
||||
|
@ -58,7 +58,7 @@ class GitHubIssuesContextProvider extends BaseContextProvider {
|
|||
}
|
||||
|
||||
async loadSubmenuItems(
|
||||
args: LoadSubmenuItemsArgs
|
||||
args: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
const { Octokit } = await import("@octokit/rest");
|
||||
|
||||
|
@ -83,7 +83,7 @@ class GitHubIssuesContextProvider extends BaseContextProvider {
|
|||
repo: repo.repo,
|
||||
issue_number: issue.number,
|
||||
}),
|
||||
}))
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ class GoogleContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const url = "https://google.serper.dev/search";
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ class HttpContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const response = await fetch(this.options.url, {
|
||||
method: "POST",
|
||||
|
|
|
@ -59,7 +59,7 @@ class JiraIssuesContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const issueId = query;
|
||||
|
||||
|
@ -89,7 +89,7 @@ class JiraIssuesContextProvider extends BaseContextProvider {
|
|||
const commentText = convert(comment.body).result;
|
||||
|
||||
return `### ${comment.author.displayName} on ${comment.created}\n\n${commentText}`;
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ class JiraIssuesContextProvider extends BaseContextProvider {
|
|||
}
|
||||
|
||||
async loadSubmenuItems(
|
||||
args: LoadSubmenuItemsArgs
|
||||
args: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
const api = await this.createApi();
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ class OpenFilesContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const ide = extras.ide;
|
||||
const openFiles = this.options?.onlyPinned
|
||||
|
@ -27,11 +27,11 @@ class OpenFilesContextProvider extends BaseContextProvider {
|
|||
return {
|
||||
description: filepath,
|
||||
content: `\`\`\`${getBasename(filepath)}\n${await ide.readFile(
|
||||
filepath
|
||||
filepath,
|
||||
)}\n\`\`\``,
|
||||
name: (filepath.split("/").pop() || "").split("\\").pop() || "",
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,8 +3,8 @@ import {
|
|||
ContextItem,
|
||||
ContextProviderDescription,
|
||||
ContextProviderExtras,
|
||||
LoadSubmenuItemsArgs,
|
||||
ContextSubmenuItem,
|
||||
LoadSubmenuItemsArgs,
|
||||
} from "../..";
|
||||
|
||||
class PostgresContextProvider extends BaseContextProvider {
|
||||
|
@ -51,13 +51,13 @@ FROM information_schema.tables`;
|
|||
}
|
||||
const { rows: tablesInfo } = await pool.query(tablesInfoQuery);
|
||||
return tablesInfo.map(
|
||||
(tableInfo: any) => `${tableInfo.table_schema}.${tableInfo.table_name}`
|
||||
(tableInfo: any) => `${tableInfo.table_schema}.${tableInfo.table_name}`,
|
||||
);
|
||||
}
|
||||
|
||||
async getContextItems(
|
||||
query: string = "",
|
||||
_: ContextProviderExtras = {} as ContextProviderExtras
|
||||
_: ContextProviderExtras = {} as ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const pool = await this.getPool();
|
||||
|
||||
|
@ -75,7 +75,7 @@ FROM information_schema.tables`;
|
|||
// Get the table schema
|
||||
if (!tableName.includes(".")) {
|
||||
throw new Error(
|
||||
`Table name must be in format schema.table_name, got ${tableName}`
|
||||
`Table name must be in format schema.table_name, got ${tableName}`,
|
||||
);
|
||||
}
|
||||
var schemaQuery = `
|
||||
|
@ -115,7 +115,7 @@ LIMIT ${sampleRows}`);
|
|||
}
|
||||
|
||||
async loadSubmenuItems(
|
||||
_: LoadSubmenuItemsArgs
|
||||
_: LoadSubmenuItemsArgs,
|
||||
): Promise<ContextSubmenuItem[]> {
|
||||
const pool = await this.getPool();
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ class ProblemsContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const ide = extras.ide;
|
||||
const problems = await ide.getProblems();
|
||||
|
@ -28,18 +28,18 @@ class ProblemsContextProvider extends BaseContextProvider {
|
|||
const rangeContent = lines
|
||||
.slice(
|
||||
Math.max(0, problem.range.start.line - 2),
|
||||
problem.range.end.line + 2
|
||||
problem.range.end.line + 2,
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
return {
|
||||
description: "Problems in current file",
|
||||
content: `\`\`\`${getBasename(
|
||||
problem.filepath
|
||||
problem.filepath,
|
||||
)}\n${rangeContent}\n\`\`\`\n${problem.message}\n\n`,
|
||||
name: `Warning in ${getBasename(problem.filepath)}`,
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
return items.length === 0
|
||||
|
|
|
@ -15,7 +15,7 @@ class SearchContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const results = await extras.ide.getSearchResults(query);
|
||||
return [
|
||||
|
|
|
@ -15,7 +15,7 @@ class TerminalContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
const content = await extras.ide.getTerminalContents();
|
||||
return [
|
||||
|
|
|
@ -15,7 +15,7 @@ class URLContextProvider extends BaseContextProvider {
|
|||
|
||||
async getContextItems(
|
||||
query: string,
|
||||
extras: ContextProviderExtras
|
||||
extras: ContextProviderExtras,
|
||||
): Promise<ContextItem[]> {
|
||||
let url = query.trim();
|
||||
if (!url.startsWith("http")) {
|
||||
|
|
|
@ -39,7 +39,7 @@ const Providers: (typeof BaseContextProvider)[] = [
|
|||
];
|
||||
|
||||
export function contextProviderClassFromName(
|
||||
name: ContextProviderName
|
||||
name: ContextProviderName,
|
||||
): typeof BaseContextProvider | undefined {
|
||||
const cls = Providers.find((cls) => cls.description.title === name);
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import { LineStream, matchLine } from "./util";
|
|||
*/
|
||||
export async function* streamDiff(
|
||||
oldLines: string[],
|
||||
newLines: LineStream
|
||||
newLines: LineStream,
|
||||
): AsyncGenerator<DiffLine> {
|
||||
const mutatedOldLines = [...oldLines]; // be careful
|
||||
let seenIndentationMistake = false;
|
||||
|
@ -21,7 +21,7 @@ export async function* streamDiff(
|
|||
const [matchIndex, isPerfectMatch, newLine] = matchLine(
|
||||
newLineResult.value,
|
||||
oldLines,
|
||||
seenIndentationMistake
|
||||
seenIndentationMistake,
|
||||
);
|
||||
if (!seenIndentationMistake && newLineResult.value !== newLine) {
|
||||
seenIndentationMistake = true;
|
||||
|
|
|
@ -28,7 +28,7 @@ function linesMatch(lineA: string, lineB: string): boolean {
|
|||
export function matchLine(
|
||||
newLine: string,
|
||||
oldLines: string[],
|
||||
permissiveAboutIndentation: boolean = false
|
||||
permissiveAboutIndentation: boolean = false,
|
||||
): [number, boolean, string] {
|
||||
// Only match empty lines if it's the next one:
|
||||
if (newLine.trim() === "" && oldLines[0]?.trim() === "") {
|
||||
|
@ -57,7 +57,7 @@ export function matchLine(
|
|||
* Convert a stream of arbitrary chunks to a stream of lines
|
||||
*/
|
||||
export async function* streamLines(
|
||||
streamCompletion: AsyncGenerator<string>
|
||||
streamCompletion: AsyncGenerator<string>,
|
||||
): LineStream {
|
||||
let buffer = "";
|
||||
for await (const chunk of streamCompletion) {
|
||||
|
|
|
@ -31,7 +31,7 @@ export class FullTextSearchCodebaseIndex implements CodebaseIndex {
|
|||
async *update(
|
||||
tag: IndexTag,
|
||||
results: RefreshIndexResults,
|
||||
markComplete: MarkCompleteCallback
|
||||
markComplete: MarkCompleteCallback,
|
||||
): AsyncGenerator<IndexingProgressUpdate, any, unknown> {
|
||||
const db = await SqliteDb.get();
|
||||
await this._createTables(db);
|
||||
|
@ -42,17 +42,17 @@ export class FullTextSearchCodebaseIndex implements CodebaseIndex {
|
|||
// Insert chunks
|
||||
const chunks = await db.all(
|
||||
`SELECT * FROM chunks WHERE path = ? AND cacheKey = ?`,
|
||||
[item.path, item.cacheKey]
|
||||
[item.path, item.cacheKey],
|
||||
);
|
||||
|
||||
for (let chunk of chunks) {
|
||||
const { lastID } = await db.run(
|
||||
`INSERT INTO fts (path, content) VALUES (?, ?)`,
|
||||
[item.path, chunk.content]
|
||||
[item.path, chunk.content],
|
||||
);
|
||||
await db.run(
|
||||
`INSERT INTO fts_metadata (id, path, cacheKey, chunkId) VALUES (?, ?, ?, ?)`,
|
||||
[lastID, item.path, item.cacheKey, chunk.id]
|
||||
[lastID, item.path, item.cacheKey, chunk.id],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ export class FullTextSearchCodebaseIndex implements CodebaseIndex {
|
|||
for (const item of results.del) {
|
||||
const { lastID } = await db.run(
|
||||
`DELETE FROM fts_metadata WHERE path = ? AND cacheKey = ?`,
|
||||
[item.path, item.cacheKey]
|
||||
[item.path, item.cacheKey],
|
||||
);
|
||||
await db.run(`DELETE FROM fts WHERE rowid = ?`, [lastID]);
|
||||
|
||||
|
@ -90,7 +90,7 @@ export class FullTextSearchCodebaseIndex implements CodebaseIndex {
|
|||
text: string,
|
||||
n: number,
|
||||
directory: string | undefined,
|
||||
filterPaths: string[] | undefined
|
||||
filterPaths: string[] | undefined,
|
||||
): Promise<Chunk[]> {
|
||||
const db = await SqliteDb.get();
|
||||
const tagStrings = tags.map(tagToString);
|
||||
|
@ -101,7 +101,7 @@ export class FullTextSearchCodebaseIndex implements CodebaseIndex {
|
|||
JOIN chunk_tags ON fts_metadata.chunkId = chunk_tags.chunkId
|
||||
WHERE fts MATCH '${text.replace(
|
||||
/\?/g,
|
||||
""
|
||||
"",
|
||||
)}' AND chunk_tags.tag IN (${tagStrings.map(() => "?").join(",")})
|
||||
${
|
||||
filterPaths
|
||||
|
@ -119,7 +119,7 @@ export class FullTextSearchCodebaseIndex implements CodebaseIndex {
|
|||
|
||||
const chunks = await db.all(
|
||||
`SELECT * FROM chunks WHERE id IN (${results.map(() => "?").join(",")})`,
|
||||
results.map((result) => result.chunkId)
|
||||
results.map((result) => result.chunkId),
|
||||
);
|
||||
|
||||
return chunks.map((chunk) => {
|
||||
|
|
|
@ -35,7 +35,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
|
||||
constructor(
|
||||
embeddingsProvider: EmbeddingsProvider,
|
||||
readFile: (filepath: string) => Promise<string>
|
||||
readFile: (filepath: string) => Promise<string>,
|
||||
) {
|
||||
this.embeddingsProvider = embeddingsProvider;
|
||||
this.readFile = readFile;
|
||||
|
@ -61,7 +61,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
}
|
||||
|
||||
private async *computeChunks(
|
||||
items: PathAndCacheKey[]
|
||||
items: PathAndCacheKey[],
|
||||
): AsyncGenerator<
|
||||
| [
|
||||
number,
|
||||
|
@ -72,7 +72,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
| PathAndCacheKey
|
||||
> {
|
||||
const contents = await Promise.all(
|
||||
items.map(({ path }) => this.readFile(path))
|
||||
items.map(({ path }) => this.readFile(path)),
|
||||
);
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
|
@ -84,7 +84,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
items[i].path,
|
||||
content,
|
||||
LanceDbIndex.MAX_CHUNK_SIZE,
|
||||
items[i].cacheKey
|
||||
items[i].cacheKey,
|
||||
)) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
|
||||
// Calculate embeddings
|
||||
const embeddings = await this.embeddingsProvider.embed(
|
||||
chunks.map((c) => c.content)
|
||||
chunks.map((c) => c.content),
|
||||
);
|
||||
|
||||
// Create row format
|
||||
|
@ -130,8 +130,8 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
results: RefreshIndexResults,
|
||||
markComplete: (
|
||||
items: PathAndCacheKey[],
|
||||
resultType: IndexResultType
|
||||
) => void
|
||||
resultType: IndexResultType,
|
||||
) => void,
|
||||
): AsyncGenerator<IndexingProgressUpdate> {
|
||||
const lancedb = await import("vectordb");
|
||||
const tableName = this.tableNameForTag(tag);
|
||||
|
@ -160,7 +160,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
JSON.stringify(row.vector),
|
||||
data.startLine,
|
||||
data.endLine,
|
||||
data.contents
|
||||
data.contents,
|
||||
);
|
||||
|
||||
yield { progress, desc };
|
||||
|
@ -193,7 +193,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
const stmt = await sqlite.prepare(
|
||||
"SELECT * FROM lance_db_cache WHERE cacheKey = ? AND path = ?",
|
||||
cacheKey,
|
||||
path
|
||||
path,
|
||||
);
|
||||
const cachedItems = await stmt.all();
|
||||
|
||||
|
@ -230,7 +230,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
await sqlite.run(
|
||||
"DELETE FROM lance_db_cache WHERE cacheKey = ? AND path = ?",
|
||||
cacheKey,
|
||||
path
|
||||
path,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -243,7 +243,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
n: number,
|
||||
directory: string | undefined,
|
||||
vector: number[],
|
||||
db: any /// lancedb.Connection
|
||||
db: any, /// lancedb.Connection
|
||||
): Promise<LanceDbRow[]> {
|
||||
const tableName = this.tableNameForTag(tag);
|
||||
const tableNames = await db.tableNames();
|
||||
|
@ -267,7 +267,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
tags: IndexTag[],
|
||||
text: string,
|
||||
n: number,
|
||||
directory: string | undefined
|
||||
directory: string | undefined,
|
||||
): Promise<Chunk[]> {
|
||||
const lancedb = await import("vectordb");
|
||||
if (!lancedb.connect) {
|
||||
|
@ -290,7 +290,7 @@ export class LanceDbIndex implements CodebaseIndex {
|
|||
const data = await sqliteDb.all(
|
||||
`SELECT * FROM lance_db_cache WHERE uuid in (${allResults
|
||||
.map((r) => `'${r.uuid}'`)
|
||||
.join(",")})`
|
||||
.join(",")})`,
|
||||
);
|
||||
|
||||
return data.map((d) => {
|
||||
|
|
|
@ -42,7 +42,7 @@ export class ChunkCodebaseIndex implements CodebaseIndex {
|
|||
async *update(
|
||||
tag: IndexTag,
|
||||
results: RefreshIndexResults,
|
||||
markComplete: MarkCompleteCallback
|
||||
markComplete: MarkCompleteCallback,
|
||||
): AsyncGenerator<IndexingProgressUpdate, any, unknown> {
|
||||
const db = await SqliteDb.get();
|
||||
await this._createTables(db);
|
||||
|
@ -50,7 +50,7 @@ export class ChunkCodebaseIndex implements CodebaseIndex {
|
|||
|
||||
// Compute chunks for new files
|
||||
const contents = await Promise.all(
|
||||
results.compute.map(({ path }) => this.readFile(path))
|
||||
results.compute.map(({ path }) => this.readFile(path)),
|
||||
);
|
||||
for (let i = 0; i < results.compute.length; i++) {
|
||||
const item = results.compute[i];
|
||||
|
@ -60,7 +60,7 @@ export class ChunkCodebaseIndex implements CodebaseIndex {
|
|||
item.path,
|
||||
contents[i],
|
||||
MAX_CHUNK_SIZE,
|
||||
item.cacheKey
|
||||
item.cacheKey,
|
||||
)) {
|
||||
const { lastID } = await db.run(
|
||||
`INSERT INTO chunks (cacheKey, path, idx, startLine, endLine, content) VALUES (?, ?, ?, ?, ?, ?)`,
|
||||
|
@ -71,7 +71,7 @@ export class ChunkCodebaseIndex implements CodebaseIndex {
|
|||
chunk.startLine,
|
||||
chunk.endLine,
|
||||
chunk.content,
|
||||
]
|
||||
],
|
||||
);
|
||||
|
||||
await db.run(`INSERT INTO chunk_tags (chunkId, tag) VALUES (?, ?)`, [
|
||||
|
@ -91,7 +91,7 @@ export class ChunkCodebaseIndex implements CodebaseIndex {
|
|||
for (const item of results.addTag) {
|
||||
const chunksWithPath = await db.all(
|
||||
`SELECT * FROM chunks WHERE cacheKey = ?`,
|
||||
[item.cacheKey]
|
||||
[item.cacheKey],
|
||||
);
|
||||
|
||||
for (const chunk of chunksWithPath) {
|
||||
|
|
|
@ -3,7 +3,7 @@ import { countTokens } from "../../llm/countTokens";
|
|||
|
||||
export function* basicChunker(
|
||||
contents: string,
|
||||
maxChunkSize: number
|
||||
maxChunkSize: number,
|
||||
): Generator<ChunkWithoutID> {
|
||||
let chunkContent = "";
|
||||
let chunkTokens = 0;
|
||||
|
|
|
@ -8,7 +8,7 @@ import { codeChunker } from "./code";
|
|||
async function* chunkDocumentWithoutId(
|
||||
filepath: string,
|
||||
contents: string,
|
||||
maxChunkSize: number
|
||||
maxChunkSize: number,
|
||||
): AsyncGenerator<ChunkWithoutID> {
|
||||
if (contents.trim() === "") {
|
||||
return;
|
||||
|
@ -35,19 +35,19 @@ export async function* chunkDocument(
|
|||
filepath: string,
|
||||
contents: string,
|
||||
maxChunkSize: number,
|
||||
digest: string
|
||||
digest: string,
|
||||
): AsyncGenerator<Chunk> {
|
||||
let index = 0;
|
||||
for await (let chunkWithoutId of chunkDocumentWithoutId(
|
||||
filepath,
|
||||
contents,
|
||||
maxChunkSize
|
||||
maxChunkSize,
|
||||
)) {
|
||||
if (countTokens(chunkWithoutId.content) > MAX_CHUNK_SIZE) {
|
||||
console.warn(
|
||||
`Chunk with more than ${maxChunkSize} tokens constructed: `,
|
||||
filepath,
|
||||
countTokens(chunkWithoutId.content)
|
||||
countTokens(chunkWithoutId.content),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ function collapsedReplacement(node: SyntaxNode): string {
|
|||
|
||||
function firstChild(
|
||||
node: SyntaxNode,
|
||||
grammarName: string | string[]
|
||||
grammarName: string | string[],
|
||||
): SyntaxNode | null {
|
||||
if (Array.isArray(grammarName)) {
|
||||
return (
|
||||
|
@ -30,7 +30,7 @@ function collapseChildren(
|
|||
blockTypes: string[],
|
||||
collapseTypes: string[],
|
||||
collapseBlockTypes: string[],
|
||||
maxChunkSize: number
|
||||
maxChunkSize: number,
|
||||
): string {
|
||||
code = code.slice(0, node.endIndex);
|
||||
const block = firstChild(node, blockTypes);
|
||||
|
@ -38,7 +38,7 @@ function collapseChildren(
|
|||
|
||||
if (block) {
|
||||
const childrenToCollapse = block.children.filter((child) =>
|
||||
collapseTypes.includes(child.type)
|
||||
collapseTypes.includes(child.type),
|
||||
);
|
||||
for (const child of childrenToCollapse.reverse()) {
|
||||
const grandChild = firstChild(child, collapseBlockTypes);
|
||||
|
@ -99,7 +99,7 @@ function collapseChildren(
|
|||
function constructClassDefinitionChunk(
|
||||
node: SyntaxNode,
|
||||
code: string,
|
||||
maxChunkSize: number
|
||||
maxChunkSize: number,
|
||||
): string {
|
||||
return collapseChildren(
|
||||
node,
|
||||
|
@ -107,14 +107,14 @@ function constructClassDefinitionChunk(
|
|||
["block", "class_body", "declaration_list"],
|
||||
["method_definition", "function_definition", "function_item"],
|
||||
["block", "statement_block"],
|
||||
maxChunkSize
|
||||
maxChunkSize,
|
||||
);
|
||||
}
|
||||
|
||||
function constructFunctionDefinitionChunk(
|
||||
node: SyntaxNode,
|
||||
code: string,
|
||||
maxChunkSize: number
|
||||
maxChunkSize: number,
|
||||
): string {
|
||||
const bodyNode = node.children[node.children.length - 1];
|
||||
const funcText =
|
||||
|
@ -144,7 +144,7 @@ const collapsedNodeConstructors: {
|
|||
[key: string]: (
|
||||
node: SyntaxNode,
|
||||
code: string,
|
||||
maxChunkSize: number
|
||||
maxChunkSize: number,
|
||||
) => string;
|
||||
} = {
|
||||
// Classes, structs, etc
|
||||
|
@ -161,7 +161,7 @@ function* getSmartCollapsedChunks(
|
|||
node: SyntaxNode,
|
||||
code: string,
|
||||
maxChunkSize: number,
|
||||
root = true
|
||||
root = true,
|
||||
): Generator<ChunkWithoutID> {
|
||||
// Keep entire text if not over size
|
||||
if (
|
||||
|
@ -194,7 +194,7 @@ function* getSmartCollapsedChunks(
|
|||
export async function* codeChunker(
|
||||
filepath: string,
|
||||
contents: string,
|
||||
maxChunkSize: number
|
||||
maxChunkSize: number,
|
||||
): AsyncGenerator<ChunkWithoutID> {
|
||||
if (contents.trim().length === 0) {
|
||||
return;
|
||||
|
|
|
@ -55,7 +55,7 @@ function findHeader(lines: string[]): string | undefined {
|
|||
export async function* markdownChunker(
|
||||
content: string,
|
||||
maxChunkSize: number,
|
||||
hLevel: number
|
||||
hLevel: number,
|
||||
): AsyncGenerator<ChunkWithoutID> {
|
||||
if (countTokens(content) <= maxChunkSize) {
|
||||
const header = findHeader(content.split("\n"));
|
||||
|
@ -123,7 +123,7 @@ export async function* markdownChunker(
|
|||
for await (const chunk of markdownChunker(
|
||||
section.content,
|
||||
maxChunkSize - (section.header ? countTokens(section.header) : 0),
|
||||
hLevel + 1
|
||||
hLevel + 1,
|
||||
)) {
|
||||
yield {
|
||||
content: section.header + "\n" + chunk.content,
|
||||
|
|
|
@ -45,7 +45,7 @@ function shouldFilterPath(pathname: string, baseUrl: URL): boolean {
|
|||
async function* crawlLinks(
|
||||
path: string,
|
||||
baseUrl: URL,
|
||||
visited: Set<string>
|
||||
visited: Set<string>,
|
||||
): AsyncGenerator<number> {
|
||||
if (visited.has(path) || shouldFilterPath(path, baseUrl)) {
|
||||
return;
|
||||
|
@ -78,7 +78,7 @@ async function* crawlLinks(
|
|||
children.map(async (child) => {
|
||||
for await (const _ of crawlLinks(child, baseUrl, visited)) {
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
yield visited.size;
|
||||
}
|
||||
|
@ -105,12 +105,12 @@ async function crawlGithubRepo(baseUrl: URL) {
|
|||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
},
|
||||
recursive: "true",
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
const paths = tree.data.tree
|
||||
.filter(
|
||||
(file) => file.type === "blob" && file.path?.endsWith(".md")
|
||||
(file) => file.type === "blob" && file.path?.endsWith(".md"),
|
||||
// ||
|
||||
// file.path?.endsWith(".rst") ||
|
||||
// file.path?.split("/").includes("documentation") ||
|
||||
|
@ -125,7 +125,7 @@ async function crawlGithubRepo(baseUrl: URL) {
|
|||
}
|
||||
|
||||
export async function* crawlSubpages(
|
||||
baseUrl: URL
|
||||
baseUrl: URL,
|
||||
): AsyncGenerator<number, string[]> {
|
||||
// Special case for GitHub repos
|
||||
if (baseUrl.hostname === "github.com") {
|
||||
|
@ -158,7 +158,7 @@ export async function* crawlSubpages(
|
|||
for await (const count of crawlLinks(
|
||||
realBaseUrl.pathname || "/",
|
||||
realBaseUrl,
|
||||
visited
|
||||
visited,
|
||||
)) {
|
||||
yield count;
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ async function createDocsTable(db: Database<sqlite3.Database>) {
|
|||
export async function retrieveDocs(
|
||||
baseUrl: string,
|
||||
vector: number[],
|
||||
nRetrieve: number
|
||||
nRetrieve: number,
|
||||
): Promise<Chunk[]> {
|
||||
const lancedb = await import("vectordb");
|
||||
const lance = await lancedb.connect(getLanceDbPath());
|
||||
|
@ -56,7 +56,7 @@ export async function addDocs(
|
|||
title: string,
|
||||
baseUrl: URL,
|
||||
chunks: Chunk[],
|
||||
embeddings: number[][]
|
||||
embeddings: number[][],
|
||||
) {
|
||||
const data: LanceDbDocsRow[] = chunks.map((chunk, i) => ({
|
||||
title: chunk.otherMetadata?.title || title,
|
||||
|
@ -87,7 +87,7 @@ export async function addDocs(
|
|||
await db.run(
|
||||
`INSERT INTO docs (title, baseUrl) VALUES (?, ?)`,
|
||||
title,
|
||||
baseUrl.toString()
|
||||
baseUrl.toString(),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ import { convertURLToMarkdown } from "./urlToMarkdown";
|
|||
export async function* indexDocs(
|
||||
title: string,
|
||||
baseUrl: URL,
|
||||
embeddingsProvider: EmbeddingsProvider
|
||||
embeddingsProvider: EmbeddingsProvider,
|
||||
): AsyncGenerator<IndexingProgressUpdate> {
|
||||
const existingDocs = await listDocs();
|
||||
if (existingDocs.find((doc) => doc.baseUrl === baseUrl.toString())) {
|
||||
|
@ -50,7 +50,7 @@ export async function* indexDocs(
|
|||
const embeddings: number[][] = [];
|
||||
|
||||
let markdownForSubpaths = await Promise.all(
|
||||
subpaths.map((subpath) => convertURLToMarkdown(new URL(subpath, baseUrl)))
|
||||
subpaths.map((subpath) => convertURLToMarkdown(new URL(subpath, baseUrl))),
|
||||
);
|
||||
|
||||
// Filter out undefineds
|
||||
|
@ -79,7 +79,7 @@ export async function* indexDocs(
|
|||
}
|
||||
|
||||
const subpathEmbeddings = await embeddingsProvider.embed(
|
||||
markdownChunks.map((chunk) => chunk.content)
|
||||
markdownChunks.map((chunk) => chunk.content),
|
||||
);
|
||||
|
||||
markdownChunks.forEach((chunk, index) => {
|
||||
|
|
|
@ -16,7 +16,7 @@ const nhm = new NodeHtmlMarkdown(
|
|||
},
|
||||
},
|
||||
},
|
||||
undefined
|
||||
undefined,
|
||||
);
|
||||
|
||||
const STRIP_BEFORE = ["\n# "];
|
||||
|
@ -39,7 +39,7 @@ async function retrieveGitHubBlob(url: URL): Promise<string | undefined> {
|
|||
|
||||
const content = Buffer.from(
|
||||
(response.data as any).content || "",
|
||||
"base64"
|
||||
"base64",
|
||||
).toString();
|
||||
|
||||
const fileExtension = url.pathname.split(".").slice(-1)[0];
|
||||
|
@ -51,7 +51,7 @@ async function retrieveGitHubBlob(url: URL): Promise<string | undefined> {
|
|||
}
|
||||
|
||||
export async function convertURLToMarkdown(
|
||||
url: URL
|
||||
url: URL,
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
if (url.hostname === "github.com") {
|
||||
|
|
|
@ -32,7 +32,7 @@ class OpenAIEmbeddingsProvider extends BaseEmbeddingsProvider {
|
|||
});
|
||||
const data = await resp.json();
|
||||
return data.data[0].embedding;
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ class TransformersJsEmbeddingsProvider extends BaseEmbeddingsProvider {
|
|||
) {
|
||||
let chunkGroup = chunks.slice(
|
||||
i,
|
||||
i + TransformersJsEmbeddingsProvider.MaxGroupSize
|
||||
i + TransformersJsEmbeddingsProvider.MaxGroupSize,
|
||||
);
|
||||
let output = await extractor(chunkGroup, {
|
||||
pooling: "mean",
|
||||
|
|
|
@ -43,7 +43,7 @@ parentPort.on("message", async (chunks) => {
|
|||
) {
|
||||
let chunkGroup = chunks.slice(
|
||||
i,
|
||||
i + TransformersJsEmbeddingsProvider.MaxGroupSize
|
||||
i + TransformersJsEmbeddingsProvider.MaxGroupSize,
|
||||
);
|
||||
let output = await extractor(chunkGroup, {
|
||||
pooling: "mean",
|
||||
|
|
|
@ -36,7 +36,7 @@ export class CodebaseIndexer {
|
|||
new ChunkCodebaseIndex(this.ide.readFile.bind(this.ide)), // Chunking must come first
|
||||
new LanceDbIndex(
|
||||
config.embeddingsProvider,
|
||||
this.ide.readFile.bind(this.ide)
|
||||
this.ide.readFile.bind(this.ide),
|
||||
),
|
||||
new FullTextSearchCodebaseIndex(),
|
||||
];
|
||||
|
@ -45,7 +45,7 @@ export class CodebaseIndexer {
|
|||
}
|
||||
|
||||
async *refresh(
|
||||
workspaceDirs: string[]
|
||||
workspaceDirs: string[],
|
||||
): AsyncGenerator<IndexingProgressUpdate> {
|
||||
const config = await this.configHandler.loadConfig();
|
||||
if (config.disableIndexing) {
|
||||
|
@ -57,8 +57,7 @@ export class CodebaseIndexer {
|
|||
let completedDirs = 0;
|
||||
|
||||
yield {
|
||||
progress:
|
||||
0,
|
||||
progress: 0,
|
||||
desc: "Starting indexing...",
|
||||
};
|
||||
|
||||
|
@ -77,13 +76,13 @@ export class CodebaseIndexer {
|
|||
const [results, markComplete] = await getComputeDeleteAddRemove(
|
||||
tag,
|
||||
{ ...stats },
|
||||
(filepath) => this.ide.readFile(filepath)
|
||||
(filepath) => this.ide.readFile(filepath),
|
||||
);
|
||||
|
||||
for await (let { progress, desc } of codebaseIndex.update(
|
||||
tag,
|
||||
results,
|
||||
markComplete
|
||||
markComplete,
|
||||
)) {
|
||||
// Handle pausing in this loop because it's the only one really taking time
|
||||
while (this.pauseToken.paused) {
|
||||
|
|
|
@ -33,7 +33,7 @@ export class SqliteDb {
|
|||
path STRING NOT NULL,
|
||||
cacheKey STRING NOT NULL,
|
||||
lastUpdated INTEGER NOT NULL
|
||||
)`
|
||||
)`,
|
||||
);
|
||||
|
||||
await db.exec(
|
||||
|
@ -43,7 +43,7 @@ export class SqliteDb {
|
|||
dir STRING NOT NULL,
|
||||
branch STRING NOT NULL,
|
||||
artifactId STRING NOT NULL
|
||||
)`
|
||||
)`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -67,7 +67,7 @@ export class SqliteDb {
|
|||
}
|
||||
|
||||
async function getSavedItemsForTag(
|
||||
tag: IndexTag
|
||||
tag: IndexTag,
|
||||
): Promise<{ path: string; cacheKey: string; lastUpdated: number }[]> {
|
||||
const db = await SqliteDb.get();
|
||||
const stmt = await db.prepare(
|
||||
|
@ -75,7 +75,7 @@ async function getSavedItemsForTag(
|
|||
WHERE dir = ? AND branch = ? AND artifactId = ?`,
|
||||
tag.directory,
|
||||
tag.branch,
|
||||
tag.artifactId
|
||||
tag.artifactId,
|
||||
);
|
||||
const rows = await stmt.all();
|
||||
return rows;
|
||||
|
@ -96,7 +96,7 @@ enum AddRemoveResultType {
|
|||
async function getAddRemoveForTag(
|
||||
tag: IndexTag,
|
||||
currentFiles: LastModifiedMap,
|
||||
readFile: (path: string) => Promise<string>
|
||||
readFile: (path: string) => Promise<string>,
|
||||
): Promise<[PathAndCacheKey[], PathAndCacheKey[], MarkCompleteCallback]> {
|
||||
const newLastUpdatedTimestamp = Date.now();
|
||||
|
||||
|
@ -137,8 +137,8 @@ async function getAddRemoveForTag(
|
|||
Object.keys(currentFiles).map(async (path) => {
|
||||
const fileContents = await readFile(path);
|
||||
return { path, cacheKey: calculateHash(fileContents) };
|
||||
})
|
||||
))
|
||||
}),
|
||||
)),
|
||||
);
|
||||
|
||||
// Create the markComplete callback function
|
||||
|
@ -159,7 +159,7 @@ async function getAddRemoveForTag(
|
|||
newLastUpdatedTimestamp,
|
||||
tag.directory,
|
||||
tag.branch,
|
||||
tag.artifactId
|
||||
tag.artifactId,
|
||||
);
|
||||
break;
|
||||
case AddRemoveResultType.Remove:
|
||||
|
@ -175,7 +175,7 @@ async function getAddRemoveForTag(
|
|||
path,
|
||||
tag.directory,
|
||||
tag.branch,
|
||||
tag.artifactId
|
||||
tag.artifactId,
|
||||
);
|
||||
break;
|
||||
case AddRemoveResultType.UpdateNewVersion:
|
||||
|
@ -194,7 +194,7 @@ async function getAddRemoveForTag(
|
|||
path,
|
||||
tag.directory,
|
||||
tag.branch,
|
||||
tag.artifactId
|
||||
tag.artifactId,
|
||||
);
|
||||
break;
|
||||
case AddRemoveResultType.UpdateOldVersion:
|
||||
|
@ -235,11 +235,11 @@ async function getAddRemoveForTag(
|
|||
*/
|
||||
async function getTagsFromGlobalCache(
|
||||
cacheKey: string,
|
||||
artifactId: string
|
||||
artifactId: string,
|
||||
): Promise<IndexTag[]> {
|
||||
const db = await SqliteDb.get();
|
||||
const stmt = await db.prepare(
|
||||
`SELECT dir, branch, artifactId FROM global_cache WHERE cacheKey = ? AND artifactId = ?`
|
||||
`SELECT dir, branch, artifactId FROM global_cache WHERE cacheKey = ? AND artifactId = ?`,
|
||||
);
|
||||
const rows = await stmt.all(cacheKey, artifactId);
|
||||
return rows;
|
||||
|
@ -254,12 +254,12 @@ function calculateHash(fileContents: string): string {
|
|||
export async function getComputeDeleteAddRemove(
|
||||
tag: IndexTag,
|
||||
currentFiles: LastModifiedMap,
|
||||
readFile: (path: string) => Promise<string>
|
||||
readFile: (path: string) => Promise<string>,
|
||||
): Promise<[RefreshIndexResults, MarkCompleteCallback]> {
|
||||
const [add, remove, markComplete] = await getAddRemoveForTag(
|
||||
tag,
|
||||
currentFiles,
|
||||
readFile
|
||||
readFile,
|
||||
);
|
||||
|
||||
const compute: PathAndCacheKey[] = [];
|
||||
|
@ -333,7 +333,7 @@ export class GlobalCacheCodeBaseIndex implements CodebaseIndex {
|
|||
async *update(
|
||||
tag: IndexTag,
|
||||
results: RefreshIndexResults,
|
||||
_: MarkCompleteCallback
|
||||
_: MarkCompleteCallback,
|
||||
): AsyncGenerator<IndexingProgressUpdate> {
|
||||
const add = [...results.compute, ...results.addTag];
|
||||
const remove = [...results.del, ...results.removeTag];
|
||||
|
@ -350,26 +350,26 @@ export class GlobalCacheCodeBaseIndex implements CodebaseIndex {
|
|||
|
||||
private async computeOrAddTag(
|
||||
cacheKey: string,
|
||||
tag: IndexTag
|
||||
tag: IndexTag,
|
||||
): Promise<void> {
|
||||
await this.db.run(
|
||||
"INSERT INTO global_cache (cacheKey, dir, branch, artifactId) VALUES (?, ?, ?, ?)",
|
||||
cacheKey,
|
||||
tag.directory,
|
||||
tag.branch,
|
||||
tag.artifactId
|
||||
tag.artifactId,
|
||||
);
|
||||
}
|
||||
private async deleteOrRemoveTag(
|
||||
cacheKey: string,
|
||||
tag: IndexTag
|
||||
tag: IndexTag,
|
||||
): Promise<void> {
|
||||
await this.db.run(
|
||||
"DELETE FROM global_cache WHERE cacheKey = ? AND dir = ? AND branch = ? AND artifactId = ?",
|
||||
cacheKey,
|
||||
tag.directory,
|
||||
tag.branch,
|
||||
tag.artifactId
|
||||
tag.artifactId,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ export enum IndexResultType {
|
|||
|
||||
export type MarkCompleteCallback = (
|
||||
items: PathAndCacheKey[],
|
||||
resultType: IndexResultType
|
||||
resultType: IndexResultType,
|
||||
) => void;
|
||||
|
||||
export interface CodebaseIndex {
|
||||
|
@ -17,7 +17,7 @@ export interface CodebaseIndex {
|
|||
update(
|
||||
tag: IndexTag,
|
||||
results: RefreshIndexResults,
|
||||
markComplete: MarkCompleteCallback
|
||||
markComplete: MarkCompleteCallback,
|
||||
): AsyncGenerator<IndexingProgressUpdate>;
|
||||
}
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ const PARALLEL_PROVIDERS: ModelProvider[] = [
|
|||
|
||||
function llmCanGenerateInParallel(
|
||||
provider: ModelProvider,
|
||||
model: string
|
||||
model: string,
|
||||
): boolean {
|
||||
if (provider === "openai") {
|
||||
return model.includes("gpt");
|
||||
|
@ -168,7 +168,7 @@ function autodetectTemplateType(model: string): TemplateType | undefined {
|
|||
function autodetectTemplateFunction(
|
||||
model: string,
|
||||
provider: ModelProvider,
|
||||
explicitTemplate: TemplateType | undefined = undefined
|
||||
explicitTemplate: TemplateType | undefined = undefined,
|
||||
) {
|
||||
if (
|
||||
explicitTemplate === undefined &&
|
||||
|
@ -205,7 +205,7 @@ function autodetectTemplateFunction(
|
|||
|
||||
function autodetectPromptTemplates(
|
||||
model: string,
|
||||
explicitTemplate: TemplateType | undefined = undefined
|
||||
explicitTemplate: TemplateType | undefined = undefined,
|
||||
) {
|
||||
const templateType = explicitTemplate || autodetectTemplateType(model);
|
||||
const templates: Record<string, any> = {};
|
||||
|
|
|
@ -37,7 +37,7 @@ function countImageTokens(content: MessagePart): number {
|
|||
function countTokens(
|
||||
content: MessageContent,
|
||||
// defaults to llama2 because the tokenizer tends to produce more tokens
|
||||
modelName: string = "llama2"
|
||||
modelName: string = "llama2",
|
||||
): number {
|
||||
const encoding = encodingForModel(modelName);
|
||||
if (Array.isArray(content)) {
|
||||
|
@ -80,7 +80,7 @@ export function stripImages(content: MessageContent): string {
|
|||
|
||||
function countChatMessageTokens(
|
||||
modelName: string,
|
||||
chatMessage: ChatMessage
|
||||
chatMessage: ChatMessage,
|
||||
): number {
|
||||
// Doing simpler, safer version of what is here:
|
||||
// https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
|
||||
|
@ -92,7 +92,7 @@ function countChatMessageTokens(
|
|||
function pruneLinesFromTop(
|
||||
prompt: string,
|
||||
maxTokens: number,
|
||||
modelName: string
|
||||
modelName: string,
|
||||
): string {
|
||||
let totalTokens = countTokens(prompt, modelName);
|
||||
const lines = prompt.split("\n");
|
||||
|
@ -106,7 +106,7 @@ function pruneLinesFromTop(
|
|||
function pruneLinesFromBottom(
|
||||
prompt: string,
|
||||
maxTokens: number,
|
||||
modelName: string
|
||||
modelName: string,
|
||||
): string {
|
||||
let totalTokens = countTokens(prompt, modelName);
|
||||
const lines = prompt.split("\n");
|
||||
|
@ -120,7 +120,7 @@ function pruneLinesFromBottom(
|
|||
function pruneStringFromBottom(
|
||||
modelName: string,
|
||||
maxTokens: number,
|
||||
prompt: string
|
||||
prompt: string,
|
||||
): string {
|
||||
const encoding = encodingForModel(modelName);
|
||||
|
||||
|
@ -135,7 +135,7 @@ function pruneStringFromBottom(
|
|||
function pruneStringFromTop(
|
||||
modelName: string,
|
||||
maxTokens: number,
|
||||
prompt: string
|
||||
prompt: string,
|
||||
): string {
|
||||
const encoding = encodingForModel(modelName);
|
||||
|
||||
|
@ -151,7 +151,7 @@ function pruneRawPromptFromTop(
|
|||
modelName: string,
|
||||
contextLength: number,
|
||||
prompt: string,
|
||||
tokensForCompletion: number
|
||||
tokensForCompletion: number,
|
||||
): string {
|
||||
const maxTokens =
|
||||
contextLength - tokensForCompletion - TOKEN_BUFFER_FOR_SAFETY;
|
||||
|
@ -162,7 +162,7 @@ function pruneRawPromptFromBottom(
|
|||
modelName: string,
|
||||
contextLength: number,
|
||||
prompt: string,
|
||||
tokensForCompletion: number
|
||||
tokensForCompletion: number,
|
||||
): string {
|
||||
const maxTokens =
|
||||
contextLength - tokensForCompletion - TOKEN_BUFFER_FOR_SAFETY;
|
||||
|
@ -181,7 +181,7 @@ function pruneChatHistory(
|
|||
modelName: string,
|
||||
chatHistory: ChatMessage[],
|
||||
contextLength: number,
|
||||
tokensForCompletion: number
|
||||
tokensForCompletion: number,
|
||||
): ChatMessage[] {
|
||||
let totalTokens =
|
||||
tokensForCompletion +
|
||||
|
@ -195,11 +195,11 @@ function pruneChatHistory(
|
|||
|
||||
const longerThanOneThird = longestMessages.filter(
|
||||
(message: ChatMessage) =>
|
||||
countTokens(message.content, modelName) > contextLength / 3
|
||||
countTokens(message.content, modelName) > contextLength / 3,
|
||||
);
|
||||
const distanceFromThird = longerThanOneThird.map(
|
||||
(message: ChatMessage) =>
|
||||
countTokens(message.content, modelName) - contextLength / 3
|
||||
countTokens(message.content, modelName) - contextLength / 3,
|
||||
);
|
||||
|
||||
for (let i = 0; i < longerThanOneThird.length; i++) {
|
||||
|
@ -267,7 +267,7 @@ function pruneChatHistory(
|
|||
modelName,
|
||||
contextLength,
|
||||
stripImages(message.content),
|
||||
tokensForCompletion
|
||||
tokensForCompletion,
|
||||
);
|
||||
totalTokens = contextLength;
|
||||
}
|
||||
|
@ -283,7 +283,7 @@ function compileChatMessages(
|
|||
supportsImages: boolean,
|
||||
prompt: string | undefined = undefined,
|
||||
functions: any[] | undefined = undefined,
|
||||
systemMessage: string | undefined = undefined
|
||||
systemMessage: string | undefined = undefined,
|
||||
): ChatMessage[] {
|
||||
const msgsCopy = msgs ? msgs.map((msg) => ({ ...msg })) : [];
|
||||
|
||||
|
@ -314,7 +314,7 @@ function compileChatMessages(
|
|||
|
||||
if (maxTokens + functionTokens + TOKEN_BUFFER_FOR_SAFETY >= contextLength) {
|
||||
throw new Error(
|
||||
`maxTokens (${maxTokens}) is too close to contextLength (${contextLength}), which doesn't leave room for response. Try increasing the contextLength parameter of the model in your config.json.`
|
||||
`maxTokens (${maxTokens}) is too close to contextLength (${contextLength}), which doesn't leave room for response. Try increasing the contextLength parameter of the model in your config.json.`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -332,7 +332,7 @@ function compileChatMessages(
|
|||
modelName,
|
||||
msgsCopy,
|
||||
contextLength,
|
||||
functionTokens + maxTokens + TOKEN_BUFFER_FOR_SAFETY
|
||||
functionTokens + maxTokens + TOKEN_BUFFER_FOR_SAFETY,
|
||||
);
|
||||
|
||||
if (
|
||||
|
|
|
@ -36,7 +36,7 @@ class Anthropic extends BaseLLM {
|
|||
|
||||
protected async _complete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): Promise<string> {
|
||||
const response = await this.fetch(this.apiBase + "/complete", {
|
||||
method: "POST",
|
||||
|
@ -57,7 +57,7 @@ class Anthropic extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const response = await this.fetch(this.apiBase + "/complete", {
|
||||
method: "POST",
|
||||
|
|
|
@ -8,7 +8,7 @@ class Bedrock extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const response = await this.fetch(
|
||||
`${this.apiBase}/model/${options.model}/invoke-with-response-stream`,
|
||||
|
@ -22,7 +22,7 @@ class Bedrock extends BaseLLM {
|
|||
body: JSON.stringify({
|
||||
inputText: prompt,
|
||||
}),
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
for await (const value of streamSse(response)) {
|
||||
|
|
|
@ -5,13 +5,13 @@ class CustomLLMClass extends BaseLLM {
|
|||
private customStreamCompletion?: (
|
||||
prompt: string,
|
||||
options: CompletionOptions,
|
||||
fetch: (input: RequestInfo | URL, init?: RequestInit) => Promise<Response>
|
||||
fetch: (input: RequestInfo | URL, init?: RequestInit) => Promise<Response>,
|
||||
) => AsyncGenerator<string>;
|
||||
|
||||
private customStreamChat?: (
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions,
|
||||
fetch: (input: RequestInfo | URL, init?: RequestInit) => Promise<Response>
|
||||
fetch: (input: RequestInfo | URL, init?: RequestInit) => Promise<Response>,
|
||||
) => AsyncGenerator<string>;
|
||||
|
||||
constructor(custom: CustomLLM) {
|
||||
|
@ -22,13 +22,13 @@ class CustomLLMClass extends BaseLLM {
|
|||
|
||||
protected async *_streamChat(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
if (this.customStreamChat) {
|
||||
for await (const content of this.customStreamChat(
|
||||
messages,
|
||||
options,
|
||||
(...args) => this.fetch(...args)
|
||||
(...args) => this.fetch(...args),
|
||||
)) {
|
||||
yield { role: "assistant", content };
|
||||
}
|
||||
|
@ -41,13 +41,13 @@ class CustomLLMClass extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
if (this.customStreamCompletion) {
|
||||
for await (const content of this.customStreamCompletion(
|
||||
prompt,
|
||||
options,
|
||||
(...args) => this.fetch(...args)
|
||||
(...args) => this.fetch(...args),
|
||||
)) {
|
||||
yield content;
|
||||
}
|
||||
|
@ -55,13 +55,13 @@ class CustomLLMClass extends BaseLLM {
|
|||
for await (const content of this.customStreamChat(
|
||||
[{ role: "user", content: prompt }],
|
||||
options,
|
||||
(...args) => this.fetch(...args)
|
||||
(...args) => this.fetch(...args),
|
||||
)) {
|
||||
yield content;
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
"Either streamCompletion or streamChat must be defined in a custom LLM in config.ts"
|
||||
"Either streamCompletion or streamChat must be defined in a custom LLM in config.ts",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ class Flowise extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const message: ChatMessage = { role: "user", content: prompt };
|
||||
for await (const chunk of this._streamChat([message], options)) {
|
||||
|
@ -130,7 +130,7 @@ class Flowise extends BaseLLM {
|
|||
|
||||
protected async *_streamChat(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
const requestBody = this._getRequestBody(messages, options);
|
||||
const { socket, socketInfo } = await this._initializeSocket();
|
||||
|
@ -156,7 +156,7 @@ class Flowise extends BaseLLM {
|
|||
|
||||
protected _getRequestBody(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): any {
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
const history = messages
|
||||
|
@ -200,7 +200,7 @@ class Flowise extends BaseLLM {
|
|||
socketInfo.internal.hasNextTokenPromiseResolve =
|
||||
hasNextTokenResolve;
|
||||
socketInfo.internal.hasNextTokenPromiseReject = hasNextTokenReject;
|
||||
}
|
||||
},
|
||||
);
|
||||
};
|
||||
const resetTimeout = () => {
|
||||
|
|
|
@ -29,7 +29,7 @@ class FreeTrial extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const args = this._convertArgs(this.collectArgs(options));
|
||||
|
||||
|
@ -67,7 +67,7 @@ class FreeTrial extends BaseLLM {
|
|||
|
||||
protected async *_streamChat(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
const args = this._convertArgs(this.collectArgs(options));
|
||||
|
||||
|
|
|
@ -17,11 +17,11 @@ class Gemini extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
for await (const chunk of this._streamChat(
|
||||
[{ role: "user", content: prompt }],
|
||||
options
|
||||
options,
|
||||
)) {
|
||||
yield stripImages(chunk.content);
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ class Gemini extends BaseLLM {
|
|||
|
||||
protected async *_streamChat(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
const apiUrl = `https://${this.region}-aiplatform.googleapis.com/v1/projects/${this.projectId}/locations/${this.region}/publishers/google/models/gemini-pro:streamGenerateContent`;
|
||||
const body = {
|
||||
|
@ -60,12 +60,13 @@ class Gemini extends BaseLLM {
|
|||
if (data[0]?.error) {
|
||||
throw new Error(data[0].error.message);
|
||||
}
|
||||
let combinedText = '';
|
||||
let combinedText = "";
|
||||
for (const entry of data || []) {
|
||||
combinedText += entry?.candidates?.[0]?.content?.parts?.[0]?.text || '';
|
||||
combinedText += entry?.candidates?.[0]?.content?.parts?.[0]?.text || "";
|
||||
}
|
||||
|
||||
yield { role: "assistant", content: combinedText.trim() }; }
|
||||
yield { role: "assistant", content: combinedText.trim() };
|
||||
}
|
||||
}
|
||||
|
||||
export default Gemini;
|
||||
|
|
|
@ -19,11 +19,11 @@ class GooglePalm extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
for await (const message of this._streamChat(
|
||||
[{ content: prompt, role: "user" }],
|
||||
options
|
||||
options,
|
||||
)) {
|
||||
yield stripImages(message.content);
|
||||
}
|
||||
|
@ -45,20 +45,20 @@ class GooglePalm extends BaseLLM {
|
|||
|
||||
protected async *_streamChat(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
let convertedMsgs = this.removeSystemMessage(messages);
|
||||
if (options.model.includes("gemini")) {
|
||||
for await (const message of this.streamChatGemini(
|
||||
convertedMsgs,
|
||||
options
|
||||
options,
|
||||
)) {
|
||||
yield message;
|
||||
}
|
||||
} else {
|
||||
for await (const message of this.streamChatBison(
|
||||
convertedMsgs,
|
||||
options
|
||||
options,
|
||||
)) {
|
||||
yield message;
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ class GooglePalm extends BaseLLM {
|
|||
|
||||
private async *streamChatGemini(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
const apiURL = `${this.apiBase}/v1/models/${options.model}:streamGenerateContent?key=${this.apiKey}`;
|
||||
const body = {
|
||||
|
@ -150,7 +150,7 @@ class GooglePalm extends BaseLLM {
|
|||
}
|
||||
private async *streamChatBison(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
const msgList = [];
|
||||
for (const message of messages) {
|
||||
|
|
|
@ -16,11 +16,11 @@ class HuggingFaceInferenceAPI extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
if (!this.apiBase) {
|
||||
throw new Error(
|
||||
"No API base URL provided. Please add the `apiBase` field in your config.json."
|
||||
"No API base URL provided. Please add the `apiBase` field in your config.json.",
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ class HuggingFaceTGI extends BaseLLM {
|
|||
if (response.status !== 200) {
|
||||
console.warn(
|
||||
"Error calling Hugging Face TGI /info endpoint: ",
|
||||
await response.text()
|
||||
await response.text(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ class HuggingFaceTGI extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const args = this._convertArgs(options, prompt);
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ class LlamaCpp extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const headers = {
|
||||
"Content-Type": "application/json",
|
||||
|
|
|
@ -30,7 +30,7 @@ class Ollama extends BaseLLM {
|
|||
if (response.status !== 200) {
|
||||
console.warn(
|
||||
"Error calling Ollama /api/show endpoint: ",
|
||||
await response.text()
|
||||
await response.text(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ class Ollama extends BaseLLM {
|
|||
|
||||
private _convertArgs(
|
||||
options: CompletionOptions,
|
||||
prompt: string | ChatMessage[]
|
||||
prompt: string | ChatMessage[],
|
||||
) {
|
||||
const finalOptions: any = {
|
||||
model: this._getModel(),
|
||||
|
@ -139,7 +139,7 @@ class Ollama extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const response = await this.fetch(`${this.apiBase}/api/generate`, {
|
||||
method: "POST",
|
||||
|
@ -178,7 +178,7 @@ class Ollama extends BaseLLM {
|
|||
|
||||
protected async *_streamChat(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
const response = await this.fetch(`${this.apiBase}/api/chat`, {
|
||||
method: "POST",
|
||||
|
|
|
@ -17,7 +17,7 @@ class OpenAIFreeTrial extends BaseLLM {
|
|||
|
||||
protected async _complete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): Promise<string> {
|
||||
const args = this.collectArgs(options);
|
||||
|
||||
|
@ -35,7 +35,7 @@ class OpenAIFreeTrial extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const args = this.collectArgs(options);
|
||||
|
||||
|
@ -55,7 +55,7 @@ class OpenAIFreeTrial extends BaseLLM {
|
|||
|
||||
protected async *_streamChat(
|
||||
messages: ChatMessage[],
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<ChatMessage> {
|
||||
const args = this.collectArgs(options);
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ class Replicate extends BaseLLM {
|
|||
|
||||
private _convertArgs(
|
||||
options: CompletionOptions,
|
||||
prompt: string
|
||||
prompt: string,
|
||||
): [`${string}/${string}:${string}`, { input: any }] {
|
||||
return [
|
||||
Replicate.MODEL_IDS[options.model] || (options.model as any),
|
||||
|
@ -51,7 +51,7 @@ class Replicate extends BaseLLM {
|
|||
|
||||
protected async _complete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): Promise<string> {
|
||||
const [model, args] = this._convertArgs(options, prompt);
|
||||
const response = await this._replicate.run(model, args);
|
||||
|
@ -61,7 +61,7 @@ class Replicate extends BaseLLM {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
const [model, args] = this._convertArgs(options, prompt);
|
||||
for await (const event of this._replicate.stream(model, args)) {
|
||||
|
|
|
@ -39,7 +39,7 @@ class Together extends OpenAI {
|
|||
|
||||
protected async *_streamComplete(
|
||||
prompt: string,
|
||||
options: CompletionOptions
|
||||
options: CompletionOptions,
|
||||
): AsyncGenerator<string> {
|
||||
for await (const chunk of this._legacystreamComplete(prompt, options)) {
|
||||
yield chunk;
|
||||
|
|
|
@ -38,7 +38,7 @@ function convertToLetter(num: number): string {
|
|||
}
|
||||
|
||||
const getHandlebarsVars = (
|
||||
value: string
|
||||
value: string,
|
||||
): [string, { [key: string]: string }] => {
|
||||
const ast = Handlebars.parse(value);
|
||||
|
||||
|
@ -50,7 +50,7 @@ const getHandlebarsVars = (
|
|||
keysToFilepath[letter] = (ast.body[i] as any).path.original;
|
||||
value = value.replace(
|
||||
new RegExp("{{\\s*" + (ast.body[i] as any).path.original + "\\s*}}"),
|
||||
`{{${letter}}}`
|
||||
`{{${letter}}}`,
|
||||
);
|
||||
keyIndex++;
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ const getHandlebarsVars = (
|
|||
|
||||
async function renderTemplatedString(
|
||||
template: string,
|
||||
readFile: (filepath: string) => Promise<string>
|
||||
readFile: (filepath: string) => Promise<string>,
|
||||
): Promise<string> {
|
||||
const [newTemplate, vars] = getHandlebarsVars(template);
|
||||
template = newTemplate;
|
||||
|
@ -100,7 +100,7 @@ export async function llmFromDescription(
|
|||
desc: ModelDescription,
|
||||
readFile: (filepath: string) => Promise<string>,
|
||||
completionOptions?: BaseCompletionOptions,
|
||||
systemMessage?: string
|
||||
systemMessage?: string,
|
||||
): Promise<BaseLLM | undefined> {
|
||||
const cls = LLMs.find((llm) => llm.providerName === desc.provider);
|
||||
|
||||
|
@ -136,7 +136,7 @@ export async function llmFromDescription(
|
|||
|
||||
export function llmFromProviderAndOptions(
|
||||
providerName: string,
|
||||
llmOptions: LLMOptions
|
||||
llmOptions: LLMOptions,
|
||||
): ILLM {
|
||||
const cls = LLMs.find((llm) => llm.providerName === providerName);
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
export async function* streamResponse(
|
||||
response: Response
|
||||
response: Response,
|
||||
): AsyncGenerator<string> {
|
||||
if (response.status !== 200) {
|
||||
throw new Error(await response.text());
|
||||
|
|
|
@ -36,7 +36,7 @@ class FileSystemIde implements IDE {
|
|||
showLines(
|
||||
filepath: string,
|
||||
startLine: number,
|
||||
endLine: number
|
||||
endLine: number,
|
||||
): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ class FileSystemIde implements IDE {
|
|||
showDiff(
|
||||
filepath: string,
|
||||
newContents: string,
|
||||
stepIndex: number
|
||||
stepIndex: number,
|
||||
): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
|
|
@ -32,12 +32,12 @@ class HistoryManager {
|
|||
sessionsList = JSON.parse(sessionsListRaw);
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`It looks like there is a JSON formatting error in your sessions.json file (${sessionsListFile}). Please fix this before creating a new session.`
|
||||
`It looks like there is a JSON formatting error in your sessions.json file (${sessionsListFile}). Please fix this before creating a new session.`,
|
||||
);
|
||||
}
|
||||
|
||||
sessionsList = sessionsList.filter(
|
||||
(session) => session.sessionId !== sessionId
|
||||
(session) => session.sessionId !== sessionId,
|
||||
);
|
||||
|
||||
fs.writeFileSync(sessionsListFile, JSON.stringify(sessionsList));
|
||||
|
@ -50,7 +50,7 @@ class HistoryManager {
|
|||
throw new Error(`Session file ${sessionFile} does not exist`);
|
||||
}
|
||||
const session: PersistedSessionInfo = JSON.parse(
|
||||
fs.readFileSync(sessionFile, "utf8")
|
||||
fs.readFileSync(sessionFile, "utf8"),
|
||||
);
|
||||
session.sessionId = sessionId;
|
||||
return session;
|
||||
|
@ -69,7 +69,7 @@ class HistoryManager {
|
|||
// Save the main session json file
|
||||
fs.writeFileSync(
|
||||
getSessionFilePath(session.sessionId),
|
||||
JSON.stringify(session)
|
||||
JSON.stringify(session),
|
||||
);
|
||||
|
||||
// Read and update the sessions list
|
||||
|
@ -114,11 +114,11 @@ class HistoryManager {
|
|||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
throw new Error(
|
||||
`It looks like there is a JSON formatting error in your sessions.json file (${sessionsListFilePath}). Please fix this before creating a new session.`
|
||||
`It looks like there is a JSON formatting error in your sessions.json file (${sessionsListFilePath}). Please fix this before creating a new session.`,
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
`It looks like there is a validation error in your sessions.json file (${sessionsListFilePath}). Please fix this before creating a new session. Error: ${error}`
|
||||
`It looks like there is a validation error in your sessions.json file (${sessionsListFilePath}). Please fix this before creating a new session. Error: ${error}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,13 +96,13 @@ type PromptTemplate =
|
|||
| string
|
||||
| ((
|
||||
history: ChatMessage[],
|
||||
otherData: Record<string, string>
|
||||
otherData: Record<string, string>,
|
||||
) => string | ChatMessage[]);
|
||||
|
||||
export function renderPromptTemplate(
|
||||
template: PromptTemplate,
|
||||
history: ChatMessage[],
|
||||
otherData: Record<string, string>
|
||||
otherData: Record<string, string>,
|
||||
): string | ChatMessage[] {
|
||||
if (typeof template === "string") {
|
||||
let data: any = {
|
||||
|
|
|
@ -2,7 +2,7 @@ import { ContinueRcJson, IDE, IdeInfo, Problem, Range } from "..";
|
|||
|
||||
export class MessageIde implements IDE {
|
||||
constructor(
|
||||
private readonly request: (messageType: string, data: any) => Promise<any>
|
||||
private readonly request: (messageType: string, data: any) => Promise<any>,
|
||||
) {}
|
||||
getIdeInfo(): Promise<IdeInfo> {
|
||||
return this.request("getIdeInfo", undefined);
|
||||
|
@ -45,7 +45,7 @@ export class MessageIde implements IDE {
|
|||
async showLines(
|
||||
filepath: string,
|
||||
startLine: number,
|
||||
endLine: number
|
||||
endLine: number,
|
||||
): Promise<void> {
|
||||
return await this.request("showLines", { filepath, startLine, endLine });
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ export class MessageIde implements IDE {
|
|||
async showDiff(
|
||||
filepath: string,
|
||||
newContents: string,
|
||||
stepIndex: number
|
||||
stepIndex: number,
|
||||
): Promise<void> {
|
||||
await this.request("showDiff", { filepath, newContents, stepIndex });
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ export function getConfigTsPath(): string {
|
|||
p,
|
||||
`export function modifyConfig(config: Config): Config {
|
||||
return config;
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -83,7 +83,7 @@ export function getConfigTsPath(): string {
|
|||
version: "1.0.0",
|
||||
description: "My Continue Configuration",
|
||||
main: "config.js",
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -123,8 +123,8 @@ export function getTsConfigPath(): string {
|
|||
include: ["./config.ts"],
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
2,
|
||||
),
|
||||
);
|
||||
}
|
||||
return tsConfigPath;
|
||||
|
@ -143,7 +143,7 @@ export function getDevDataFilePath(fileName: string): string {
|
|||
}
|
||||
|
||||
export function editConfigJson(
|
||||
callback: (config: SerializedContinueConfig) => SerializedContinueConfig
|
||||
callback: (config: SerializedContinueConfig) => SerializedContinueConfig,
|
||||
) {
|
||||
const config = fs.readFileSync(getConfigJsonPath(), "utf8");
|
||||
let configJson = JSON.parse(config);
|
||||
|
@ -196,7 +196,7 @@ export function getRemoteConfigsFolderPath(): string {
|
|||
export function getPathToRemoteConfig(remoteConfigServerUrl: URL): string {
|
||||
const dir = path.join(
|
||||
getRemoteConfigsFolderPath(),
|
||||
remoteConfigServerUrl.hostname
|
||||
remoteConfigServerUrl.hostname,
|
||||
);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir);
|
||||
|
|
|
@ -26,7 +26,7 @@ export class Telemetry {
|
|||
"phc_JS6XFROuNbhJtVCEdTSYk6gl5ArRrTNMpCcguAXlSPs",
|
||||
{
|
||||
host: "https://app.posthog.com",
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as path from "path";
|
||||
const Parser = require("web-tree-sitter")
|
||||
const Parser = require("web-tree-sitter");
|
||||
|
||||
export const supportedLanguages: { [key: string]: string } = {
|
||||
cpp: "cpp",
|
||||
|
@ -72,21 +72,21 @@ export async function getParserForFile(filepath: string) {
|
|||
await Parser.init();
|
||||
const parser = new Parser();
|
||||
const extension = path.extname(filepath).slice(1);
|
||||
|
||||
|
||||
if (!supportedLanguages[extension]) {
|
||||
console.warn(
|
||||
"Unable to load language for file",
|
||||
extension,
|
||||
"from path: ",
|
||||
filepath
|
||||
filepath,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
const wasmPath = path.join(
|
||||
__dirname,
|
||||
"tree-sitter-wasms",
|
||||
`tree-sitter-${supportedLanguages[extension]}.wasm`
|
||||
`tree-sitter-${supportedLanguages[extension]}.wasm`,
|
||||
);
|
||||
const language = await Parser.Language.load(wasmPath);
|
||||
parser.setLanguage(language);
|
||||
|
@ -95,4 +95,4 @@ export async function getParserForFile(filepath: string) {
|
|||
console.error("Unable to load language for file", filepath, e);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ function constructPrompt(
|
|||
codeToEdit: string,
|
||||
llm: ILLM,
|
||||
userInput: string,
|
||||
language: string | undefined
|
||||
language: string | undefined,
|
||||
): string {
|
||||
const template = llm.promptTemplates?.edit ?? gptEditPrompt;
|
||||
const rendered = renderPromptTemplate(template, [], {
|
||||
|
@ -31,7 +31,7 @@ function constructPrompt(
|
|||
|
||||
async function* addIndentation(
|
||||
diffLineGenerator: AsyncGenerator<DiffLine>,
|
||||
indentation: string
|
||||
indentation: string,
|
||||
): AsyncGenerator<DiffLine> {
|
||||
for await (const diffLine of diffLineGenerator) {
|
||||
yield {
|
||||
|
@ -49,7 +49,7 @@ export async function* streamDiffLines(
|
|||
oldCode: string,
|
||||
llm: ILLM,
|
||||
input: string,
|
||||
language: string | undefined
|
||||
language: string | undefined,
|
||||
): AsyncGenerator<DiffLine> {
|
||||
// Strip common indentation for the LLM, then add back after generation
|
||||
const [withoutIndentation, commonIndentation] =
|
||||
|
|
|
@ -12,7 +12,7 @@ import { setupInlineTips } from "./inlineTips";
|
|||
export async function showTutorial() {
|
||||
const tutorialPath = path.join(
|
||||
getExtensionUri().fsPath,
|
||||
"continue_tutorial.py"
|
||||
"continue_tutorial.py",
|
||||
);
|
||||
// Ensure keyboard shortcuts match OS
|
||||
if (process.platform !== "darwin") {
|
||||
|
@ -22,7 +22,7 @@ export async function showTutorial() {
|
|||
}
|
||||
|
||||
const doc = await vscode.workspace.openTextDocument(
|
||||
vscode.Uri.file(tutorialPath)
|
||||
vscode.Uri.file(tutorialPath),
|
||||
);
|
||||
await vscode.window.showTextDocument(doc);
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ async function openTutorialFirstTime(context: vscode.ExtensionContext) {
|
|||
}
|
||||
|
||||
function showRefactorMigrationMessage(
|
||||
extensionContext: vscode.ExtensionContext
|
||||
extensionContext: vscode.ExtensionContext,
|
||||
) {
|
||||
// Only if the vscode setting continue.manuallyRunningSserver is true
|
||||
const manuallyRunningServer =
|
||||
|
@ -45,21 +45,21 @@ function showRefactorMigrationMessage(
|
|||
if (
|
||||
manuallyRunningServer &&
|
||||
extensionContext?.globalState.get<boolean>(
|
||||
"continue.showRefactorMigrationMessage"
|
||||
"continue.showRefactorMigrationMessage",
|
||||
) !== false
|
||||
) {
|
||||
vscode.window
|
||||
.showInformationMessage(
|
||||
"The Continue server protocol was recently updated in a way that requires the latest server version to work properly. Since you are manually running the server, please be sure to upgrade with `pip install --upgrade continuedev`.",
|
||||
"Got it",
|
||||
"Don't show again"
|
||||
"Don't show again",
|
||||
)
|
||||
.then((selection) => {
|
||||
if (selection === "Don't show again") {
|
||||
// Get the global state
|
||||
extensionContext?.globalState.update(
|
||||
"continue.showRefactorMigrationMessage",
|
||||
false
|
||||
false,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
@ -82,8 +82,8 @@ export async function activateExtension(context: vscode.ExtensionContext) {
|
|||
vscode.commands.executeCommand(
|
||||
"markdown.showPreview",
|
||||
vscode.Uri.file(
|
||||
path.join(getExtensionUri().fsPath, "media", "welcome.md")
|
||||
)
|
||||
path.join(getExtensionUri().fsPath, "media", "welcome.md"),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ function handleSelectionChange(e: vscode.TextEditorSelectionChangeEvent) {
|
|||
const line = Math.max(0, selection.start.line - 1);
|
||||
|
||||
const hoverMarkdown = new vscode.MarkdownString(
|
||||
`Use ${getMetaKeyLabel()} L to select code, or ${getMetaKeyLabel()} I to edit highlighted code. Click [here](command:continue.hideInlineTip) if you don't want to see these inline suggestions.`
|
||||
`Use ${getMetaKeyLabel()} L to select code, or ${getMetaKeyLabel()} I to edit highlighted code. Click [here](command:continue.hideInlineTip) if you don't want to see these inline suggestions.`,
|
||||
);
|
||||
hoverMarkdown.isTrusted = true;
|
||||
hoverMarkdown.supportHtml = true;
|
||||
|
@ -39,7 +39,7 @@ function handleSelectionChange(e: vscode.TextEditorSelectionChangeEvent) {
|
|||
{
|
||||
range: new vscode.Range(
|
||||
new vscode.Position(line, Number.MAX_VALUE),
|
||||
new vscode.Position(line, Number.MAX_VALUE)
|
||||
new vscode.Position(line, Number.MAX_VALUE),
|
||||
),
|
||||
hoverMessage: [hoverMarkdown],
|
||||
},
|
||||
|
@ -54,7 +54,7 @@ const emptyFileTooltipDecoration = vscode.window.createTextEditorDecorationType(
|
|||
margin: "2em 0 0 0",
|
||||
fontStyle: "italic",
|
||||
},
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let selectionChangeDebounceTimer: NodeJS.Timeout | undefined;
|
||||
|
@ -68,7 +68,7 @@ export function setupInlineTips(context: vscode.ExtensionContext) {
|
|||
selectionChangeDebounceTimer = setTimeout(() => {
|
||||
handleSelectionChange(e);
|
||||
}, 200);
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
context.subscriptions.push(
|
||||
|
@ -85,12 +85,12 @@ export function setupInlineTips(context: vscode.ExtensionContext) {
|
|||
{
|
||||
range: new vscode.Range(
|
||||
new vscode.Position(0, Number.MAX_VALUE),
|
||||
new vscode.Position(0, Number.MAX_VALUE)
|
||||
new vscode.Position(0, Number.MAX_VALUE),
|
||||
),
|
||||
},
|
||||
]);
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
context.subscriptions.push(
|
||||
|
@ -104,7 +104,7 @@ export function setupInlineTips(context: vscode.ExtensionContext) {
|
|||
{
|
||||
range: new vscode.Range(
|
||||
new vscode.Position(0, Number.MAX_VALUE),
|
||||
new vscode.Position(0, Number.MAX_VALUE)
|
||||
new vscode.Position(0, Number.MAX_VALUE),
|
||||
),
|
||||
},
|
||||
]);
|
||||
|
@ -114,6 +114,6 @@ export function setupInlineTips(context: vscode.ExtensionContext) {
|
|||
editor.setDecorations(emptyFileTooltipDecoration, []);
|
||||
});
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -3,16 +3,15 @@
|
|||
*/
|
||||
|
||||
import * as path from "path";
|
||||
import { workspace, ExtensionContext, extensions } from "vscode";
|
||||
import { ExtensionContext, extensions, workspace } from "vscode";
|
||||
|
||||
import {
|
||||
DefinitionParams,
|
||||
LanguageClient,
|
||||
LanguageClientOptions,
|
||||
ServerOptions,
|
||||
State,
|
||||
StateChangeEvent,
|
||||
TransportKind,
|
||||
State,
|
||||
} from "vscode-languageclient/node";
|
||||
import { getExtensionUri } from "../util/vscode";
|
||||
|
||||
|
@ -54,7 +53,7 @@ function startPythonLanguageServer(context: ExtensionContext): LanguageClient {
|
|||
let extensionPath = getExtensionUri().fsPath;
|
||||
const command = `cd ${path.join(
|
||||
extensionPath,
|
||||
"scripts"
|
||||
"scripts",
|
||||
)} && source env/bin/activate.fish && python -m pyls`;
|
||||
const serverOptions: ServerOptions = {
|
||||
command: command,
|
||||
|
@ -109,7 +108,7 @@ async function startPylance(context: ExtensionContext) {
|
|||
"languageServerExample",
|
||||
"Language Server Example",
|
||||
serverOptions,
|
||||
clientOptions
|
||||
clientOptions,
|
||||
);
|
||||
return client;
|
||||
}
|
||||
|
|
|
@ -20,13 +20,17 @@ export class ContinueCompletionProvider
|
|||
|
||||
public static errorsShown: Set<string> = new Set();
|
||||
|
||||
constructor(private readonly configHandler: ConfigHandler, private readonly ide: IDE, private readonly tabAutocompleteModel: TabAutocompleteModel) {}
|
||||
constructor(
|
||||
private readonly configHandler: ConfigHandler,
|
||||
private readonly ide: IDE,
|
||||
private readonly tabAutocompleteModel: TabAutocompleteModel,
|
||||
) {}
|
||||
|
||||
public async provideInlineCompletionItems(
|
||||
document: vscode.TextDocument,
|
||||
position: vscode.Position,
|
||||
context: vscode.InlineCompletionContext,
|
||||
token: vscode.CancellationToken
|
||||
token: vscode.CancellationToken,
|
||||
//@ts-ignore
|
||||
): ProviderResult<InlineCompletionItem[] | InlineCompletionList> {
|
||||
// Debounce
|
||||
|
@ -44,7 +48,7 @@ export class ContinueCompletionProvider
|
|||
const lastUUID = await new Promise((resolve) =>
|
||||
setTimeout(() => {
|
||||
resolve(ContinueCompletionProvider.lastUUID);
|
||||
}, options.debounceDelay)
|
||||
}, options.debounceDelay),
|
||||
);
|
||||
if (uuid !== lastUUID) {
|
||||
return [];
|
||||
|
@ -71,7 +75,7 @@ export class ContinueCompletionProvider
|
|||
token,
|
||||
options,
|
||||
this.tabAutocompleteModel,
|
||||
this.ide
|
||||
this.ide,
|
||||
);
|
||||
const completion = outcome?.completion;
|
||||
|
||||
|
@ -84,7 +88,7 @@ export class ContinueCompletionProvider
|
|||
if (!outcome.cacheHit) {
|
||||
(await ContinueCompletionProvider.autocompleteCache).put(
|
||||
outcome.prompt,
|
||||
completion
|
||||
completion,
|
||||
);
|
||||
}
|
||||
}, 100);
|
||||
|
@ -103,7 +107,7 @@ export class ContinueCompletionProvider
|
|||
title: "Log Autocomplete Outcome",
|
||||
command: "continue.logAutocompleteOutcome",
|
||||
arguments: [outcome, logRejectionTimeout],
|
||||
}
|
||||
},
|
||||
),
|
||||
];
|
||||
} catch (e: any) {
|
||||
|
|
|
@ -44,7 +44,7 @@ export async function getTabCompletion(
|
|||
token: vscode.CancellationToken,
|
||||
options: TabAutocompleteOptions,
|
||||
tabAutocompleteModel: TabAutocompleteModel,
|
||||
ide: IDE
|
||||
ide: IDE,
|
||||
): Promise<AutocompleteOutcome | undefined> {
|
||||
const startTime = Date.now();
|
||||
|
||||
|
@ -59,7 +59,7 @@ export async function getTabCompletion(
|
|||
|
||||
try {
|
||||
// Model
|
||||
const llm = await tabAutocompleteModel.get()
|
||||
const llm = await tabAutocompleteModel.get();
|
||||
if (llm instanceof OpenAI) {
|
||||
llm.useLegacyCompletionsEndpoint = true;
|
||||
}
|
||||
|
@ -67,31 +67,36 @@ export async function getTabCompletion(
|
|||
|
||||
// Prompt
|
||||
const fullPrefix = document.getText(
|
||||
new vscode.Range(new vscode.Position(0, 0), pos)
|
||||
new vscode.Range(new vscode.Position(0, 0), pos),
|
||||
);
|
||||
const fullSuffix = document.getText(
|
||||
new vscode.Range(
|
||||
pos,
|
||||
new vscode.Position(document.lineCount, Number.MAX_SAFE_INTEGER)
|
||||
)
|
||||
new vscode.Position(document.lineCount, Number.MAX_SAFE_INTEGER),
|
||||
),
|
||||
);
|
||||
const lineBelowCursor = document.lineAt(
|
||||
Math.min(pos.line + 1, document.lineCount - 1)
|
||||
Math.min(pos.line + 1, document.lineCount - 1),
|
||||
).text;
|
||||
const clipboardText = await vscode.env.clipboard.readText();
|
||||
|
||||
let extrasSnippets = (await Promise.race([
|
||||
getDefinitionsFromLsp(document.uri.fsPath, fullPrefix + fullSuffix, fullPrefix.length, ide),
|
||||
getDefinitionsFromLsp(
|
||||
document.uri.fsPath,
|
||||
fullPrefix + fullSuffix,
|
||||
fullPrefix.length,
|
||||
ide,
|
||||
),
|
||||
new Promise((resolve) => {
|
||||
setTimeout(() => resolve([]), 100);
|
||||
})
|
||||
])) as AutocompleteSnippet[]
|
||||
}),
|
||||
])) as AutocompleteSnippet[];
|
||||
|
||||
const workspaceDirs = await ide.getWorkspaceDirs();
|
||||
if (options.onlyMyCode) {
|
||||
extrasSnippets = extrasSnippets.filter((snippet) => {
|
||||
return workspaceDirs.some((dir) => snippet.filepath.startsWith(dir));
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
const { prefix, suffix, completeMultiline } =
|
||||
|
@ -106,7 +111,7 @@ export async function getTabCompletion(
|
|||
await recentlyEditedTracker.getRecentlyEditedRanges(),
|
||||
await recentlyEditedTracker.getRecentlyEditedDocuments(),
|
||||
llm.model,
|
||||
extrasSnippets
|
||||
extrasSnippets,
|
||||
);
|
||||
|
||||
const { template, completionOptions } = options.template
|
||||
|
@ -120,7 +125,9 @@ export async function getTabCompletion(
|
|||
let completion = "";
|
||||
|
||||
const cache = await autocompleteCache;
|
||||
const cachedCompletion = options.useCache ? await cache.get(prompt) : undefined;
|
||||
const cachedCompletion = options.useCache
|
||||
? await cache.get(prompt)
|
||||
: undefined;
|
||||
let cacheHit = false;
|
||||
if (cachedCompletion) {
|
||||
// Cache
|
||||
|
@ -148,7 +155,7 @@ export async function getTabCompletion(
|
|||
temperature: 0,
|
||||
raw: true,
|
||||
stop,
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
// LLM
|
||||
|
@ -165,9 +172,11 @@ export async function getTabCompletion(
|
|||
};
|
||||
const gen2 = onlyWhitespaceAfterEndOfLine(
|
||||
generatorWithCancellation(),
|
||||
lang.endOfLine
|
||||
lang.endOfLine,
|
||||
);
|
||||
const lineGenerator = streamWithNewLines(
|
||||
avoidPathLine(stopAtLines(streamLines(gen2)), lang.comment),
|
||||
);
|
||||
const lineGenerator = streamWithNewLines(avoidPathLine(stopAtLines(streamLines(gen2)), lang.comment));
|
||||
const finalGenerator = stopAtSimilarLine(lineGenerator, lineBelowCursor);
|
||||
for await (const update of finalGenerator) {
|
||||
completion += update;
|
||||
|
@ -204,8 +213,8 @@ export async function getTabCompletion(
|
|||
if (val === "Documentation") {
|
||||
vscode.env.openExternal(
|
||||
vscode.Uri.parse(
|
||||
"https://continue.dev/docs/walkthroughs/tab-autocomplete"
|
||||
)
|
||||
"https://continue.dev/docs/walkthroughs/tab-autocomplete",
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -17,7 +17,7 @@ export function stopStatusBarLoading() {
|
|||
|
||||
export function setupStatusBar(
|
||||
enabled: boolean | undefined,
|
||||
loading?: boolean
|
||||
loading?: boolean,
|
||||
) {
|
||||
if (loading !== false) {
|
||||
clearTimeout(statusBarFalseTimeout);
|
||||
|
@ -25,7 +25,7 @@ export function setupStatusBar(
|
|||
}
|
||||
|
||||
const statusBarItem = vscode.window.createStatusBarItem(
|
||||
vscode.StatusBarAlignment.Right
|
||||
vscode.StatusBarAlignment.Right,
|
||||
);
|
||||
statusBarItem.text = loading
|
||||
? "$(loading~spin) Continue"
|
||||
|
|
|
@ -15,7 +15,7 @@ export class ContinueGUIWebviewViewProvider
|
|||
resolveWebviewView(
|
||||
webviewView: vscode.WebviewView,
|
||||
_context: vscode.WebviewViewResolveContext,
|
||||
_token: vscode.CancellationToken
|
||||
_token: vscode.CancellationToken,
|
||||
): void | Thenable<void> {
|
||||
this._webview = webviewView.webview;
|
||||
webviewView.webview.html = this.getSidebarContent(
|
||||
|
@ -23,7 +23,7 @@ export class ContinueGUIWebviewViewProvider
|
|||
webviewView,
|
||||
this.ide,
|
||||
this.configHandler,
|
||||
this.verticalDiffManager
|
||||
this.verticalDiffManager,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -45,12 +45,12 @@ export class ContinueGUIWebviewViewProvider
|
|||
private readonly ide: IDE,
|
||||
private readonly windowId: string,
|
||||
private readonly extensionContext: vscode.ExtensionContext,
|
||||
private readonly verticalDiffManager: VerticalPerLineDiffManager
|
||||
private readonly verticalDiffManager: VerticalPerLineDiffManager,
|
||||
) {
|
||||
this.webviewProtocol = new VsCodeWebviewProtocol(
|
||||
ide,
|
||||
configHandler,
|
||||
verticalDiffManager
|
||||
verticalDiffManager,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -62,7 +62,7 @@ export class ContinueGUIWebviewViewProvider
|
|||
verticalDiffManager: VerticalPerLineDiffManager,
|
||||
page: string | undefined = undefined,
|
||||
edits: FileEdit[] | undefined = undefined,
|
||||
isFullScreen: boolean = false
|
||||
isFullScreen: boolean = false,
|
||||
): string {
|
||||
let extensionUri = getExtensionUri();
|
||||
let scriptUri: string;
|
||||
|
@ -148,8 +148,8 @@ export class ContinueGUIWebviewViewProvider
|
|||
<script>window.colorThemeName = "dark-plus"</script>
|
||||
<script>window.workspacePaths = ${JSON.stringify(
|
||||
vscode.workspace.workspaceFolders?.map(
|
||||
(folder) => folder.uri.fsPath
|
||||
) || []
|
||||
(folder) => folder.uri.fsPath,
|
||||
) || [],
|
||||
)}</script>
|
||||
<script>window.isFullScreen = ${isFullScreen}</script>
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import * as vscode from "vscode";
|
||||
import * as path from "path";
|
||||
import * as vscode from "vscode";
|
||||
import { uriFromFilePath } from "./util/vscode";
|
||||
|
||||
export function showAnswerInTextEditor(
|
||||
filename: string,
|
||||
range: vscode.Range,
|
||||
answer: string
|
||||
answer: string,
|
||||
) {
|
||||
vscode.workspace.openTextDocument(uriFromFilePath(filename)).then((doc) => {
|
||||
const editor = vscode.window.activeTextEditor;
|
||||
|
@ -17,7 +17,7 @@ export function showAnswerInTextEditor(
|
|||
vscode.window.showTextDocument(doc).then((new_editor) => {
|
||||
new_editor.revealRange(
|
||||
new vscode.Range(range.end, range.end),
|
||||
vscode.TextEditorRevealType.InCenter
|
||||
vscode.TextEditorRevealType.InCenter,
|
||||
);
|
||||
|
||||
let decorationType = vscode.window.createTextEditorDecorationType({
|
||||
|
@ -67,7 +67,7 @@ class DecorationManager {
|
|||
|
||||
private rerenderDecorations(
|
||||
editorUri: string,
|
||||
decorationType: vscode.TextEditorDecorationType
|
||||
decorationType: vscode.TextEditorDecorationType,
|
||||
) {
|
||||
const editor = vscode.window.activeTextEditor;
|
||||
if (!editor) {
|
||||
|
@ -144,7 +144,7 @@ export const decorationManager = new DecorationManager();
|
|||
function constructBaseKey(
|
||||
editor: vscode.TextEditor,
|
||||
lineno: number,
|
||||
decorationType?: vscode.TextEditorDecorationType
|
||||
decorationType?: vscode.TextEditorDecorationType,
|
||||
): DecorationKey {
|
||||
return {
|
||||
editorUri: editor.document.uri.toString(),
|
||||
|
@ -159,14 +159,14 @@ function constructBaseKey(
|
|||
const gutterSpinnerDecorationType =
|
||||
vscode.window.createTextEditorDecorationType({
|
||||
gutterIconPath: vscode.Uri.file(
|
||||
path.join(__dirname, "..", "media", "spinner.gif")
|
||||
path.join(__dirname, "..", "media", "spinner.gif"),
|
||||
),
|
||||
gutterIconSize: "contain",
|
||||
});
|
||||
|
||||
export function showGutterSpinner(
|
||||
editor: vscode.TextEditor,
|
||||
lineno: number
|
||||
lineno: number,
|
||||
): DecorationKey {
|
||||
const key = constructBaseKey(editor, lineno, gutterSpinnerDecorationType);
|
||||
decorationManager.addDecoration(key);
|
||||
|
@ -177,7 +177,7 @@ export function showGutterSpinner(
|
|||
export function showLintMessage(
|
||||
editor: vscode.TextEditor,
|
||||
lineno: number,
|
||||
msg: string
|
||||
msg: string,
|
||||
): DecorationKey {
|
||||
const key = constructBaseKey(editor, lineno);
|
||||
key.decorationType = vscode.window.createTextEditorDecorationType({
|
||||
|
@ -186,7 +186,7 @@ export function showLintMessage(
|
|||
color: "rgb(255, 0, 0, 0.6)",
|
||||
},
|
||||
gutterIconPath: vscode.Uri.file(
|
||||
path.join(__dirname, "..", "media", "error.png")
|
||||
path.join(__dirname, "..", "media", "error.png"),
|
||||
),
|
||||
gutterIconSize: "contain",
|
||||
});
|
||||
|
@ -198,7 +198,7 @@ export function showLintMessage(
|
|||
export function highlightCode(
|
||||
editor: vscode.TextEditor,
|
||||
range: vscode.Range,
|
||||
removeOnClick: boolean = true
|
||||
removeOnClick: boolean = true,
|
||||
): DecorationKey {
|
||||
const decorationType = vscode.window.createTextEditorDecorationType({
|
||||
backgroundColor: "rgb(255, 255, 0, 0.1)",
|
||||
|
|
|
@ -78,7 +78,7 @@ export class DiffManager {
|
|||
vscode.workspace.fs.createDirectory(uriFromFilePath(this.remoteTmpDir));
|
||||
return path.join(
|
||||
this.remoteTmpDir,
|
||||
this.escapeFilepath(originalFilepath)
|
||||
this.escapeFilepath(originalFilepath),
|
||||
);
|
||||
}
|
||||
return path.join(DIFF_DIRECTORY, this.escapeFilepath(originalFilepath));
|
||||
|
@ -86,7 +86,7 @@ export class DiffManager {
|
|||
|
||||
private async openDiffEditor(
|
||||
originalFilepath: string,
|
||||
newFilepath: string
|
||||
newFilepath: string,
|
||||
): Promise<vscode.TextEditor | undefined> {
|
||||
// If the file doesn't yet exist or the basename is a single digit number (vscode terminal), don't open the diff editor
|
||||
try {
|
||||
|
@ -116,21 +116,21 @@ export class DiffManager {
|
|||
|
||||
if (
|
||||
this.extensionContext.globalState.get<boolean>(
|
||||
"continue.showDiffInfoMessage"
|
||||
"continue.showDiffInfoMessage",
|
||||
) !== false
|
||||
) {
|
||||
vscode.window
|
||||
.showInformationMessage(
|
||||
`Accept (${getMetaKeyLabel()}⇧↩) or reject (${getMetaKeyLabel()}⇧⌫) at the top of the file.`,
|
||||
"Got it",
|
||||
"Don't show again"
|
||||
"Don't show again",
|
||||
)
|
||||
.then((selection) => {
|
||||
if (selection === "Don't show again") {
|
||||
// Get the global state
|
||||
this.extensionContext.globalState.update(
|
||||
"continue.showDiffInfoMessage",
|
||||
false
|
||||
false,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
@ -153,7 +153,7 @@ export class DiffManager {
|
|||
async writeDiff(
|
||||
originalFilepath: string,
|
||||
newContent: string,
|
||||
step_index: number
|
||||
step_index: number,
|
||||
): Promise<string> {
|
||||
await this.setupDirectory();
|
||||
|
||||
|
@ -181,7 +181,7 @@ export class DiffManager {
|
|||
if (diffInfo && !diffInfo?.editor) {
|
||||
diffInfo.editor = await this.openDiffEditor(
|
||||
originalFilepath,
|
||||
newFilepath
|
||||
newFilepath,
|
||||
);
|
||||
this.diffs.set(newFilepath, diffInfo);
|
||||
}
|
||||
|
@ -192,7 +192,7 @@ export class DiffManager {
|
|||
// Flashes too much on mac with it
|
||||
vscode.commands.executeCommand(
|
||||
"workbench.action.files.revert",
|
||||
uriFromFilePath(newFilepath)
|
||||
uriFromFilePath(newFilepath),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -218,7 +218,7 @@ export class DiffManager {
|
|||
return activeEditorPath;
|
||||
}
|
||||
const visibleEditors = vscode.window.visibleTextEditors.map(
|
||||
(editor) => editor.document.uri.fsPath
|
||||
(editor) => editor.document.uri.fsPath,
|
||||
);
|
||||
for (const editorPath of visibleEditors) {
|
||||
if (path.dirname(editorPath) === DIFF_DIRECTORY) {
|
||||
|
@ -262,7 +262,7 @@ export class DiffManager {
|
|||
.then(async () => {
|
||||
await writeFile(
|
||||
uriFromFilePath(diffInfo.originalFilepath),
|
||||
await readFile(diffInfo.newFilepath)
|
||||
await readFile(diffInfo.newFilepath),
|
||||
);
|
||||
this.cleanUpDiff(diffInfo);
|
||||
});
|
||||
|
@ -278,7 +278,7 @@ export class DiffManager {
|
|||
if (!newFilepath) {
|
||||
console.log(
|
||||
"No newFilepath provided to reject the diff, diffs.size was",
|
||||
this.diffs.size
|
||||
this.diffs.size,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
@ -328,6 +328,6 @@ async function recordAcceptReject(accepted: boolean, diffInfo: DiffInfo) {
|
|||
// Write the updated suggestions back to the file
|
||||
await writeFile(
|
||||
vscode.Uri.file(suggestionsPath),
|
||||
JSON.stringify(suggestions, null, 4)
|
||||
JSON.stringify(suggestions, null, 4),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ type VerticalPerLineDiffEvent =
|
|||
|
||||
async function* streamVerticalPerLineDiff(
|
||||
lineGenerator: AsyncGenerator<string>,
|
||||
oldCode: string
|
||||
oldCode: string,
|
||||
): AsyncGenerator<VerticalPerLineDiffEvent> {
|
||||
const remainingLines = oldCode.split("\n");
|
||||
let index = 0; // Index in terms of oldCode
|
||||
|
|
|
@ -18,7 +18,7 @@ type DiffEvent = LineUpdate | DiffBlock;
|
|||
* Returns a stream of characters, but with "undefined" inserted into the stream whenever there is a new line
|
||||
*/
|
||||
async function* streamWithLineBreaks(
|
||||
completionStream: AsyncGenerator<string>
|
||||
completionStream: AsyncGenerator<string>,
|
||||
): AsyncGenerator<string | undefined> {
|
||||
for await (const chunk of completionStream) {
|
||||
if (chunk.includes("\n")) {
|
||||
|
@ -37,7 +37,7 @@ async function* streamWithLineBreaks(
|
|||
|
||||
async function* streamDiffEvents(
|
||||
completionStream: AsyncGenerator<string>,
|
||||
oldCode: string
|
||||
oldCode: string,
|
||||
): AsyncGenerator<DiffEvent> {
|
||||
let remainingLines = oldCode.split("\n");
|
||||
let i = 0;
|
||||
|
|
|
@ -12,7 +12,7 @@ export const greenDecorationType = vscode.window.createTextEditorDecorationType(
|
|||
isWholeLine: true,
|
||||
backgroundColor: "rgba(0, 255, 0, 0.2)",
|
||||
rangeBehavior: vscode.DecorationRangeBehavior.ClosedClosed,
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
export const indexDecorationType = vscode.window.createTextEditorDecorationType(
|
||||
|
@ -20,7 +20,7 @@ export const indexDecorationType = vscode.window.createTextEditorDecorationType(
|
|||
isWholeLine: true,
|
||||
backgroundColor: "rgba(255, 255, 255, 0.2)",
|
||||
rangeBehavior: vscode.DecorationRangeBehavior.ClosedClosed,
|
||||
}
|
||||
},
|
||||
);
|
||||
export const belowIndexDecorationType =
|
||||
vscode.window.createTextEditorDecorationType({
|
||||
|
@ -35,7 +35,7 @@ export class DecorationTypeRangeManager {
|
|||
|
||||
constructor(
|
||||
decorationType: vscode.TextEditorDecorationType,
|
||||
editor: vscode.TextEditor
|
||||
editor: vscode.TextEditor,
|
||||
) {
|
||||
this.decorationType = decorationType;
|
||||
this.editor = editor;
|
||||
|
@ -48,7 +48,7 @@ export class DecorationTypeRangeManager {
|
|||
if (lastRange && lastRange.end.line === startIndex - 1) {
|
||||
this.ranges[this.ranges.length - 1] = lastRange.with(
|
||||
undefined,
|
||||
lastRange.end.translate(numLines)
|
||||
lastRange.end.translate(numLines),
|
||||
);
|
||||
} else {
|
||||
this.ranges.push(
|
||||
|
@ -56,8 +56,8 @@ export class DecorationTypeRangeManager {
|
|||
startIndex,
|
||||
0,
|
||||
startIndex + numLines - 1,
|
||||
Number.MAX_SAFE_INTEGER
|
||||
)
|
||||
Number.MAX_SAFE_INTEGER,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -79,11 +79,11 @@ export class DecorationTypeRangeManager {
|
|||
|
||||
private translateRange(
|
||||
range: vscode.Range,
|
||||
lineOffset: number
|
||||
lineOffset: number,
|
||||
): vscode.Range {
|
||||
return new vscode.Range(
|
||||
range.start.translate(lineOffset),
|
||||
range.end.translate(lineOffset)
|
||||
range.end.translate(lineOffset),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -36,9 +36,9 @@ export class VerticalPerLineDiffHandler {
|
|||
>,
|
||||
private readonly clearForFilepath: (
|
||||
filepath: string | undefined,
|
||||
accept: boolean
|
||||
accept: boolean,
|
||||
) => void,
|
||||
input?: string
|
||||
input?: string,
|
||||
) {
|
||||
this.currentLineIndex = startLine;
|
||||
this.startLine = startLine;
|
||||
|
@ -48,11 +48,11 @@ export class VerticalPerLineDiffHandler {
|
|||
|
||||
this.redDecorationManager = new DecorationTypeRangeManager(
|
||||
redDecorationType,
|
||||
this.editor
|
||||
this.editor,
|
||||
);
|
||||
this.greenDecorationManager = new DecorationTypeRangeManager(
|
||||
greenDecorationType,
|
||||
this.editor
|
||||
this.editor,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -93,16 +93,16 @@ export class VerticalPerLineDiffHandler {
|
|||
// Insert the block of deleted lines
|
||||
await this.insertTextAboveLine(
|
||||
this.currentLineIndex - this.insertedInCurrentBlock,
|
||||
totalDeletedContent
|
||||
totalDeletedContent,
|
||||
);
|
||||
this.redDecorationManager.addLines(
|
||||
this.currentLineIndex - this.insertedInCurrentBlock,
|
||||
this.deletionBuffer.length
|
||||
this.deletionBuffer.length,
|
||||
);
|
||||
// Shift green decorations downward
|
||||
this.greenDecorationManager.shiftDownAfterLine(
|
||||
this.currentLineIndex - this.insertedInCurrentBlock,
|
||||
this.deletionBuffer.length
|
||||
this.deletionBuffer.length,
|
||||
);
|
||||
|
||||
// Update line index, clear buffer
|
||||
|
@ -128,9 +128,9 @@ export class VerticalPerLineDiffHandler {
|
|||
editBuilder.insert(
|
||||
new vscode.Position(
|
||||
lineCount,
|
||||
this.editor.document.lineAt(lineCount - 1).text.length
|
||||
this.editor.document.lineAt(lineCount - 1).text.length,
|
||||
),
|
||||
"\n" + text
|
||||
"\n" + text,
|
||||
);
|
||||
} else {
|
||||
editBuilder.insert(new vscode.Position(index, 0), text + "\n");
|
||||
|
@ -148,7 +148,7 @@ export class VerticalPerLineDiffHandler {
|
|||
const startLine = new vscode.Position(index, 0);
|
||||
await this.editor.edit((editBuilder) => {
|
||||
editBuilder.delete(
|
||||
new vscode.Range(startLine, startLine.translate(numLines))
|
||||
new vscode.Range(startLine, startLine.translate(numLines)),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ export class VerticalPerLineDiffHandler {
|
|||
this.editor.setDecorations(indexDecorationType, [
|
||||
new vscode.Range(
|
||||
start,
|
||||
new vscode.Position(start.line, Number.MAX_SAFE_INTEGER)
|
||||
new vscode.Position(start.line, Number.MAX_SAFE_INTEGER),
|
||||
),
|
||||
]);
|
||||
const end = new vscode.Position(this.endLine, 0);
|
||||
|
@ -183,7 +183,7 @@ export class VerticalPerLineDiffHandler {
|
|||
vscode.commands.executeCommand(
|
||||
"setContext",
|
||||
"continue.streamingDiff",
|
||||
false
|
||||
false,
|
||||
);
|
||||
const rangesToDelete = accept
|
||||
? this.redDecorationManager.getRanges()
|
||||
|
@ -200,8 +200,8 @@ export class VerticalPerLineDiffHandler {
|
|||
editBuilder.delete(
|
||||
new vscode.Range(
|
||||
range.start,
|
||||
new vscode.Position(range.end.line + 1, 0)
|
||||
)
|
||||
new vscode.Position(range.end.line + 1, 0),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
@ -265,7 +265,7 @@ export class VerticalPerLineDiffHandler {
|
|||
accept: boolean,
|
||||
startLine: number,
|
||||
numGreen: number,
|
||||
numRed: number
|
||||
numRed: number,
|
||||
) {
|
||||
if (numGreen > 0) {
|
||||
// Delete the editor decoration
|
||||
|
|
|
@ -22,7 +22,7 @@ export class VerticalPerLineDiffManager {
|
|||
filepath: string,
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
input: string
|
||||
input: string,
|
||||
) {
|
||||
if (this.filepathToEditorMap.has(filepath)) {
|
||||
this.filepathToEditorMap.get(filepath)?.clear(false);
|
||||
|
@ -36,7 +36,7 @@ export class VerticalPerLineDiffManager {
|
|||
editor,
|
||||
this.editorToVerticalDiffCodeLens,
|
||||
this.clearForFilepath,
|
||||
input
|
||||
input,
|
||||
);
|
||||
this.filepathToEditorMap.set(filepath, handler);
|
||||
return handler;
|
||||
|
@ -48,7 +48,7 @@ export class VerticalPerLineDiffManager {
|
|||
getOrCreateVerticalPerLineDiffHandler(
|
||||
filepath: string,
|
||||
startLine: number,
|
||||
endLine: number
|
||||
endLine: number,
|
||||
) {
|
||||
if (this.filepathToEditorMap.has(filepath)) {
|
||||
return this.filepathToEditorMap.get(filepath)!;
|
||||
|
@ -60,7 +60,7 @@ export class VerticalPerLineDiffManager {
|
|||
endLine,
|
||||
editor,
|
||||
this.editorToVerticalDiffCodeLens,
|
||||
this.clearForFilepath
|
||||
this.clearForFilepath,
|
||||
);
|
||||
this.filepathToEditorMap.set(filepath, handler);
|
||||
return handler;
|
||||
|
@ -93,7 +93,7 @@ export class VerticalPerLineDiffManager {
|
|||
acceptRejectVerticalDiffBlock(
|
||||
accept: boolean,
|
||||
filepath?: string,
|
||||
index?: number
|
||||
index?: number,
|
||||
) {
|
||||
if (!filepath) {
|
||||
const activeEditor = vscode.window.activeTextEditor;
|
||||
|
@ -123,7 +123,7 @@ export class VerticalPerLineDiffManager {
|
|||
accept,
|
||||
block.start,
|
||||
block.numGreen,
|
||||
block.numRed
|
||||
block.numRed,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -146,7 +146,7 @@ export class VerticalPerLineDiffManager {
|
|||
filepath,
|
||||
existingHandler?.range.start.line ?? startLine,
|
||||
existingHandler?.range.end.line ?? endLine,
|
||||
input
|
||||
input,
|
||||
);
|
||||
if (!diffHandler) {
|
||||
return;
|
||||
|
@ -156,7 +156,7 @@ export class VerticalPerLineDiffManager {
|
|||
existingHandler?.range ??
|
||||
new vscode.Range(
|
||||
editor.selection.start.with(undefined, 0),
|
||||
editor.selection.end.with(undefined, Number.MAX_SAFE_INTEGER)
|
||||
editor.selection.end.with(undefined, Number.MAX_SAFE_INTEGER),
|
||||
);
|
||||
const rangeContent = editor.document.getText(selectedRange);
|
||||
const llm = await this.configHandler.llmFromTitle();
|
||||
|
@ -164,19 +164,19 @@ export class VerticalPerLineDiffManager {
|
|||
// Unselect the range
|
||||
editor.selection = new vscode.Selection(
|
||||
editor.selection.active,
|
||||
editor.selection.active
|
||||
editor.selection.active,
|
||||
);
|
||||
|
||||
vscode.commands.executeCommand(
|
||||
"setContext",
|
||||
"continue.streamingDiff",
|
||||
true
|
||||
true,
|
||||
);
|
||||
|
||||
if (existingHandler?.input) {
|
||||
if (existingHandler.input.startsWith("Original request: ")) {
|
||||
existingHandler.input = existingHandler.input.substring(
|
||||
"Original request: ".length
|
||||
"Original request: ".length,
|
||||
);
|
||||
}
|
||||
input = `Original request: ${existingHandler.input}\nUpdated request: ${input}`;
|
||||
|
@ -187,14 +187,14 @@ export class VerticalPerLineDiffManager {
|
|||
rangeContent,
|
||||
llm,
|
||||
input,
|
||||
getMarkdownLanguageTagForFile(filepath)
|
||||
)
|
||||
getMarkdownLanguageTagForFile(filepath),
|
||||
),
|
||||
);
|
||||
} finally {
|
||||
vscode.commands.executeCommand(
|
||||
"setContext",
|
||||
"continue.streamingDiff",
|
||||
false
|
||||
false,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ async function dynamicImportAndActivate(context: vscode.ExtensionContext) {
|
|||
.showInformationMessage(
|
||||
"Error activating the Continue extension.",
|
||||
"View Logs",
|
||||
"Retry"
|
||||
"Retry",
|
||||
)
|
||||
.then((selection) => {
|
||||
if (selection === "View Logs") {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue