fix ollama onboarding (#1539)

* allow setting apiBase for bedrock provider

* ️ better linking

*  new prompt file action

* ️ strip <COMPLETION> from GPT-4 completion

* continue proxy FIM support

* warn user if potentially dangerous command is generated

* tab autocomplete docs

* docs: update PR template (#1531)

* chore: add docs to install script (#1533)

* fix(extensions): schema for db ctx provider (#1534)

* fix(extensions): schema for db ctx provider

* fix: invalid enum

* prettierignore

* docs: add docs and schema for "OS" provider (#1536)

* tests (#1501)

* 👷 CI for jetbrains

* default working dir

* changelog

* build binaries

* binary testing setup

* idesettings

* core binary testing

* run binary tests in ci

* remove unused targets

* needs build

* console.log bin contents

* fix ci

* fix win32 binary download

* test

* no linux arm64

* macos latest

* macos-12

* binary permissions

* upload logs

* fix

* upload full folder as binary artifact

* test

* test macos only

* set full execute permissions

* copy sqlite binary

* cd

* it worked!

* build again in test job

* debug

* remove timeout

* info

* log

* log2

* more logs

* catch

* fewer logs

* test all platforms

* test downloaded artifact

* needs build

* updates

* build

* false

* release

* add tag and upload binaryes

* change tag name

* proper artifact upload

* jest updates

*  generate a few unit tests with Continue

* fix imports related to IdeSettings

* run tsc on PRs

* remove shareSession command (unused)

* update release process

* update plugin version

* don't show ghost text when jetbrains completion visible

* run jetbrains ci in main

* check ts in dev

* ignore .env

* 🚑 fix constant warnings when onboarding with Ollama

---------

Co-authored-by: Patrick Erichsen <patrick.a.erichsen@gmail.com>
This commit is contained in:
Nate Sesti 2024-06-21 11:09:03 -07:00 committed by GitHub
parent 066e6f932b
commit ca884b8212
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
72 changed files with 6500 additions and 1128 deletions

View File

@ -4,4 +4,5 @@
## Checklist
- [ ] The base branch of this PR is `preview`, rather than `main`
- [ ] The base branch of this PR is `dev`, rather than `main`
- [ ] The relevant docs, if any, have been updated or created

448
.github/workflows/jetbrains-build.yaml vendored Normal file
View File

@ -0,0 +1,448 @@
# GitHub Actions Workflow is created for testing and preparing the plugin release in the following steps:
# - Validate Gradle Wrapper.
# - Run 'test' and 'verifyPlugin' tasks.
# - Run Qodana inspections.
# - Run the 'buildPlugin' task and prepare artifact for further tests.
# - Run the 'runPluginVerifier' task.
# - Create a draft release.
#
# The workflow is triggered on push and pull_request events.
#
# GitHub Actions reference: https://help.github.com/en/actions
#
## JBIJPPTPL
name: Build
on:
# Trigger the workflow on pushes to only the 'main' branch (this avoids duplicate checks being run e.g., for dependabot pull requests)
push:
branches: [preview, main]
# Trigger the workflow on any pull request
# pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
defaults:
run:
working-directory: ./extensions/intellij
jobs:
# Prepare environment and build the plugin
build:
name: Build
runs-on: ubuntu-latest
outputs:
version: ${{ steps.properties.outputs.version }}
changelog: ${{ steps.properties.outputs.changelog }}
pluginVerifierHomeDir: ${{ steps.properties.outputs.pluginVerifierHomeDir }}
steps:
# Check out current repository
- name: Fetch Sources
uses: actions/checkout@v4
# Validate wrapper
- name: Gradle Wrapper Validation
uses: gradle/actions/wrapper-validation@v3
# Set up Java environment for the next steps
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 17
# Setup Gradle
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v3
with:
gradle-home-cache-cleanup: true
# Set environment variables
- name: Export Properties
id: properties
shell: bash
run: |
PROPERTIES="$(./gradlew properties --console=plain -q)"
VERSION="$(echo "$PROPERTIES" | grep "^version:" | cut -f2- -d ' ')"
# CHANGELOG="$(./gradlew getChangelog --unreleased --no-header --console=plain -q)"
CHANGELOG=""
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "pluginVerifierHomeDir=~/.pluginVerifier" >> $GITHUB_OUTPUT
echo "changelog<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
./gradlew listProductsReleases # prepare list of IDEs for Plugin Verifier
# Setup Node.js
- name: Use Node.js from .nvmrc
uses: actions/setup-node@v4
with:
node-version-file: ".nvmrc"
# Cache node_modules
- name: Cache core node_modules
uses: actions/cache@v3
with:
path: core/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('core/package-lock.json') }}
- name: Cache binary node_modules
uses: actions/cache@v3
with:
path: binary/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('binary/package-lock.json') }}
# npm install core
- name: Install core node_modules
run: |
cd ../../core
npm ci
# npm install binary
- name: Install core node_modules
run: |
cd ../../binary
npm ci
# Build binaries
- name: Install core node_modules
run: |
cd ../../binary
npm run build
# Build plugin
- name: Build plugin
run: ./gradlew buildPlugin
# Prepare plugin archive content for creating artifact
- name: Prepare Plugin Artifact
id: artifact
shell: bash
run: |
cd ${{ github.workspace }}/extensions/intellij/build/distributions
FILENAME=`ls *.zip`
unzip "$FILENAME" -d content
echo "filename=${FILENAME:0:-4}" >> $GITHUB_OUTPUT
# Store already-built plugin as an artifact for downloading
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ steps.artifact.outputs.filename }}
path: ./extensions/intellij/build/distributions/content/*/*
# Upload binaries as artifacts
- name: Upload artifact (darwin-arm64)
uses: actions/upload-artifact@v4
with:
name: continue-binary-darwin-arm64
path: ./binary/bin/darwin-arm64/
- name: Upload artifact (darwin-x64)
uses: actions/upload-artifact@v4
with:
name: continue-binary-darwin-x64
path: ./binary/bin/darwin-x64/
- name: Upload artifact (win32-x64)
uses: actions/upload-artifact@v4
with:
name: continue-binary-win32-x64
path: ./binary/bin/win32-x64/
- name: Upload artifact (linux-arm64)
uses: actions/upload-artifact@v4
with:
name: continue-binary-linux-arm64
path: ./binary/bin/linux-arm64/
- name: Upload artifact (linux-x64)
uses: actions/upload-artifact@v4
with:
name: continue-binary-linux-x64
path: ./binary/bin/linux-x64/
test-binaries:
needs: build
strategy:
matrix:
include:
- os: windows-latest
platform: win32
arch: x64
npm_config_arch: x64
- os: ubuntu-latest
platform: linux
arch: x64
npm_config_arch: x64
# arm64 not actually supported by GitHub
# - os: ubuntu-latest
# platform: linux
# arch: arm64
# npm_config_arch: arm64
- os: macos-12
platform: darwin
arch: x64
npm_config_arch: x64
- os: macos-latest
platform: darwin
arch: arm64
npm_config_arch: arm64
runs-on: ${{ matrix.os }}
steps:
# 1. Check-out repository
- name: Check-out repository
uses: actions/checkout@v4
# 2. Install npm dependencies
- name: Use Node.js from .nvmrc
uses: actions/setup-node@v4
with:
node-version-file: ".nvmrc"
- name: Cache core node_modules
uses: actions/cache@v3
with:
path: core/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('core/package-lock.json') }}
- name: Cache binary node_modules
uses: actions/cache@v3
with:
path: binary/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('binary/package-lock.json') }}
- name: Install Core Dependencies
run: |
cd ../../core
npm ci
- name: Install Binary Dependencies
run: |
cd ../../binary
npm ci
# Download the binary artifact
- name: Download binary artifact
uses: actions/download-artifact@v4
with:
name: continue-binary-${{ matrix.platform }}-${{ matrix.arch }}
path: ./binary/bin/${{ matrix.platform }}-${{ matrix.arch }}/
# Set execute permissions for the binary (non-Windows)
- name: Set execute permissions
run: |
cd ../../binary/bin/${{ matrix.platform }}-${{ matrix.arch }}
chmod +x continue-binary
chmod +x node_sqlite3.node
chmod +x index.node
chmod +x esbuild
if: ${{ matrix.platform }} != 'win32'
- name: Copy node_sqlite3.node to necessary location
run: |
cd ../../binary
mkdir build
cp ./bin/${{ matrix.platform }}-${{ matrix.arch }}/node_sqlite3.node ./build/node_sqlite3.node
# Run tests for binary
- name: Run binary tests
run: |
cd ../../binary
npm run test
- name: Upload logs
if: ${{ always() }}
uses: actions/upload-artifact@v2
with:
name: core-logs-${{ matrix.platform }}-${{ matrix.arch }}
path: binary/.continue/logs/core.log
# Run tests and upload a code coverage report
test:
name: Test
needs: [build]
runs-on: ubuntu-latest
steps:
# Check out current repository
- name: Fetch Sources
uses: actions/checkout@v4
# Set up Java environment for the next steps
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 17
# Setup Gradle
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v3
with:
gradle-home-cache-cleanup: true
# Run tests
- name: Run Tests
run: ./gradlew check
# Collect Tests Result of failed tests
- name: Collect Tests Result
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: tests-result
path: ${{ github.workspace }}/extensions/intellij/build/reports/tests
# Upload the Kover report to CodeCov
# - name: Upload Code Coverage Report
# uses: codecov/codecov-action@v4
# with:
# files: ${{ github.workspace }}/build/reports/kover/report.xml
# Run Qodana inspections and provide report
inspectCode:
if: false
name: Inspect code
needs: [build]
runs-on: ubuntu-latest
permissions:
contents: write
checks: write
pull-requests: write
steps:
# Free GitHub Actions Environment Disk Space
- name: Maximize Build Space
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
large-packages: false
# Check out current repository
- name: Fetch Sources
uses: actions/checkout@v4
# Set up Java environment for the next steps
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 17
# Run Qodana inspections
- name: Qodana - Code Inspection
uses: JetBrains/qodana-action@v2024.1.5
with:
cache-default-branch-only: true
# Run plugin structure verification along with IntelliJ Plugin Verifier
verify:
name: Verify plugin
needs: [build]
runs-on: ubuntu-latest
steps:
# Free GitHub Actions Environment Disk Space
- name: Maximize Build Space
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
large-packages: false
# Check out current repository
- name: Fetch Sources
uses: actions/checkout@v4
# Set up Java environment for the next steps
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 17
# Setup Gradle
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v3
with:
gradle-home-cache-cleanup: true
# Cache Plugin Verifier IDEs
- name: Setup Plugin Verifier IDEs Cache
uses: actions/cache@v4
with:
path: ${{ needs.build.outputs.pluginVerifierHomeDir }}/ides
key: plugin-verifier-${{ hashFiles('build/listProductsReleases.txt') }}
# Run Verify Plugin task and IntelliJ Plugin Verifier tool
- name: Run Plugin Verification tasks
run: ./gradlew runPluginVerifier -Dplugin.verifier.home.dir=${{ needs.build.outputs.pluginVerifierHomeDir }}
# Collect Plugin Verifier Result
- name: Collect Plugin Verifier Result
if: ${{ always() }}
uses: actions/upload-artifact@v4
with:
name: pluginVerifier-result
path: ${{ github.workspace }}/build/reports/pluginVerifier
# Prepare a draft release for GitHub Releases page for the manual verification
# If accepted and published, release workflow would be triggered
releaseDraft:
name: Release draft
if: github.event_name != 'pull_request'
needs: [build, test, test-binaries, verify]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
# Check out current repository
- name: Fetch Sources
uses: actions/checkout@v4
- name: Set up Git
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
- name: Tag the repository
id: tag
run: |
# See https://docs.github.com/en/get-started/using-git/dealing-with-special-characters-in-branch-and-tag-names
TAG=v${{ needs.build.outputs.version }}-jetbrains
echo "$TAG"
echo "tag=$TAG" >> $GITHUB_OUTPUT
git tag -a $TAG -m "Published version $TAG" ${GITHUB_SHA}
git push origin $TAG
# Remove old release drafts by using the curl request for the available releases with a draft flag
- name: Remove Old Release Drafts
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh api repos/{owner}/{repo}/releases \
--jq '.[] | select(.draft == true) | .id' \
| xargs -I '{}' gh api -X DELETE repos/{owner}/{repo}/releases/{}
# Create a new release draft which is not publicly visible and requires manual acceptance
- name: Create Release Draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh release create "${{ steps.tag.outputs.tag }}" \
--draft \
--title "v${{ needs.build.outputs.version }}-jetbrains" \
--notes "$(cat << 'EOM'
${{ needs.build.outputs.changelog }}
EOM
)"
# Download the Gradle build artifact
- name: Download Gradle build artifact
uses: actions/download-artifact@v4
with:
name: ${{ needs.build.outputs.filename }}
path: ./artifacts

142
.github/workflows/jetbrains-release.yaml vendored Normal file
View File

@ -0,0 +1,142 @@
# GitHub Actions Workflow created for handling the release process based on the draft release prepared with the Build workflow.
# Running the publishPlugin task requires all following secrets to be provided: PUBLISH_TOKEN, PRIVATE_KEY, PRIVATE_KEY_PASSWORD, CERTIFICATE_CHAIN.
# See https://plugins.jetbrains.com/docs/intellij/plugin-signing.html for more information.
name: Release
on:
release:
types: [prereleased, released]
defaults:
run:
working-directory: ./extensions/intellij
jobs:
# Prepare and publish the plugin to JetBrains Marketplace repository
release:
name: Publish Plugin
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
# Check out current repository
- name: Fetch Sources
uses: actions/checkout@v4
with:
ref: ${{ github.event.release.tag_name }}
# Set up Java environment for the next steps
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 17
# Setup Gradle
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v3
with:
gradle-home-cache-cleanup: true
# Set environment variables
- name: Export Properties
id: properties
shell: bash
run: |
CHANGELOG="$(cat << 'EOM' | sed -e 's/^[[:space:]]*$//g' -e '/./,$!d'
${{ github.event.release.body }}
EOM
)"
echo "changelog<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Setup Node.js
- name: Use Node.js from .nvmrc
uses: actions/setup-node@v4
with:
node-version-file: ".nvmrc"
# Cache node_modules
- name: Cache core node_modules
uses: actions/cache@v3
with:
path: core/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('core/package-lock.json') }}
- name: Cache binary node_modules
uses: actions/cache@v3
with:
path: binary/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('binary/package-lock.json') }}
# npm install core
- name: Install core node_modules
run: |
cd ../../core
npm ci
# npm install binary
- name: Install core node_modules
run: |
cd ../../binary
npm ci
# Build binaries
- name: Install core node_modules
run: |
cd ../../binary
npm run build
# Update Unreleased section with the current release note
- name: Patch Changelog
if: ${{ steps.properties.outputs.changelog != '' }}
env:
CHANGELOG: ${{ steps.properties.outputs.changelog }}
run: |
./gradlew patchChangelog --release-note="$CHANGELOG"
# Publish the plugin to JetBrains Marketplace
- name: Publish Plugin
env:
PUBLISH_TOKEN: ${{ secrets.JETBRAINS_PUBLISH_TOKEN }}
CERTIFICATE_CHAIN: ${{ secrets.JETBRAINS_CERTIFICATE_CHAIN }}
PRIVATE_KEY: ${{ secrets.JETBRAINS_PRIVATE_KEY }}
PRIVATE_KEY_PASSWORD: ${{ secrets.JETBRAINS_PRIVATE_KEY_PASSWORD }}
run: ./gradlew publishPlugin
# Upload artifact as a release asset
- name: Upload Release Asset
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release upload ${{ github.event.release.tag_name }} ./build/distributions/*
# Create a pull request
- name: Create Pull Request
if: ${{ steps.properties.outputs.changelog != '' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
VERSION="${{ github.event.release.tag_name }}"
BRANCH="changelog-update-$VERSION"
LABEL="release changelog"
git config user.email "action@github.com"
git config user.name "GitHub Action"
git checkout -b $BRANCH
git commit -am "Changelog update - $VERSION"
git push --set-upstream origin $BRANCH
gh label create "$LABEL" \
--description "Pull requests with release changelog update" \
--force \
|| true
gh pr create \
--title "Changelog update - \`$VERSION\`" \
--body "Current pull request contains patched \`CHANGELOG.md\` file for the \`$VERSION\` version." \
--label "$LABEL" \
--head $BRANCH

View File

@ -90,6 +90,12 @@ jobs:
cd core
npm ci
# 2.25 Run core tests
- name: Run core tests
run: |
cd core
npm run test
# 2.5. Pre package
- name: Set var for environment info
shell: pwsh
@ -210,6 +216,7 @@ jobs:
# 3. Publish the extension to Open VSX Registry
- name: Publish (Open VSX Registry)
continue-on-error: true
run: |
cd extensions/vscode
npx ovsx publish --pre-release -p ${{ secrets.VSX_REGISTRY_TOKEN }} --packagePath ../../alpine-x64/*.vsix ../../darwin-arm64/*.vsix ../../darwin-x64/*.vsix ../../linux-arm64/*.vsix ../../linux-armhf/*.vsix ../../linux-x64/*.vsix ../../win32-x64/*.vsix ../../win32-arm64/*.vsix

90
.github/workflows/ts-check.yaml vendored Normal file
View File

@ -0,0 +1,90 @@
name: TypeScript Check
on:
pull_request:
branches:
- main
- preview
- dev
jobs:
tsc-check:
runs-on: ubuntu-latest
steps:
# 1. Check-out repository
- name: Check-out repository
uses: actions/checkout@v4
# 2. Install npm dependencies
- name: Use Node.js from .nvmrc
uses: actions/setup-node@v4
with:
node-version-file: ".nvmrc"
- name: Cache extension node_modules
uses: actions/cache@v3
with:
path: extensions/vscode/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('extensions/vscode/package-lock.json') }}
- name: Cache core node_modules
uses: actions/cache@v3
with:
path: core/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('core/package-lock.json') }}
- name: Cache gui node_modules
uses: actions/cache@v3
with:
path: gui/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('gui/package-lock.json') }}
- name: Cache binary node_modules
uses: actions/cache@v3
with:
path: binary/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('binary/package-lock.json') }}
- name: Install extension Dependencies
run: |
cd extensions/vscode
npm ci
env:
# https://github.com/microsoft/vscode-ripgrep/issues/9#issuecomment-643965333
GITHUB_TOKEN: ${{ secrets.CI_GITHUB_TOKEN }}
- name: Install gui Dependencies
run: |
cd gui
npm ci
- name: Install Core Dependencies
run: |
cd core
npm ci
- name: Install Binary Dependencies
run: |
cd binary
npm ci
- name: tsc core
run: |
cd core
npx tsc
- name: tsc extensions/vscode
run: |
cd extensions/vscode
npx tsc
- name: tsc binary
run: |
cd binary
npx tsc
- name: tsc gui
run: |
cd gui
npx tsc

1
.prettierignore Normal file
View File

@ -0,0 +1 @@
extensions/vscode/continue_rc_schema.json

19
.prompts/test.prompt Normal file
View File

@ -0,0 +1,19 @@
temperature: 0.5
maxTokens: 4096
name: jest
description: Write Jest unit tests
---
<system>
You are an expert programmer
</system>
{{{ input }}}
Write unit tests for the above selected code, following each of these instructions:
- Use `jest`
- Properly set up and tear down
- Include important edge cases
- The tests should be complete and sophisticated
- Give the tests just as chat output, don't edit any file
- Don't explain how to set up `jest`
- Write a single code block, making sure to label with the language being used (e.g. "```typscript")

3
.vscode/launch.json vendored
View File

@ -18,7 +18,8 @@
"internalConsoleOptions": "openOnSessionStart",
"cwd": "${workspaceFolder}/binary",
"env": {
"CONTINUE_DEVELOPMENT": "true"
"CONTINUE_DEVELOPMENT": "true",
"CONTINUE_GLOBAL_DIR": "${workspaceFolder}/binary/.continue"
}
},
{

10
.vscode/tasks.json vendored
View File

@ -154,6 +154,16 @@
"options": {
"cwd": "binary"
}
},
{
"label": "docs:start",
"type": "shell",
"command": "npm",
"args": ["run", "start"],
"problemMatcher": [],
"options": {
"cwd": "docs"
}
}
]
}

23
binary/jest.config.js Normal file
View File

@ -0,0 +1,23 @@
module.exports = {
roots: ["<rootDir>/test"],
transform: {
"^.+\\.ts?$": [
"ts-jest",
{
useESM: true,
},
],
"^.+\\.js$": [
"babel-jest",
{
presets: [["@babel/preset-env", { targets: { node: "current" } }]],
},
],
},
moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node", ".d.ts"],
extensionsToTreatAsEsm: [".ts", ".d.ts"],
// Remove or comment out the moduleNameMapper configuration
moduleNameMapper: {
"^(.*)\\.js$": "$1",
},
};

3494
binary/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -20,7 +20,7 @@
"outputPath": "bin"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"test": "jest",
"build": "node build.js",
"build:old": "ncc build src/index.ts -o out && pkg .",
"build:dev": "tsc",
@ -31,11 +31,14 @@
"devDependencies": {
"@biomejs/biome": "1.6.4",
"@types/follow-redirects": "^1.14.4",
"@types/jest": "^29.5.12",
"@types/uuid": "^9.0.8",
"@vercel/ncc": "^0.38.1",
"esbuild": "0.19.11",
"jest": "^29.7.0",
"pkg": "^5.8.1",
"rimraf": "^5.0.7",
"ts-jest": "^29.1.4",
"typescript": "^5.3.3"
},
"dependencies": {

View File

@ -1,43 +1,22 @@
import { IProtocol } from "core/protocol";
import { IProtocol } from "core/protocol/index.js";
import { IMessenger, type Message } from "core/util/messenger";
import { ChildProcessWithoutNullStreams } from "node:child_process";
import * as fs from "node:fs";
import net from "node:net";
import { v4 as uuidv4 } from "uuid";
export class IpcMessenger<
class IPCMessengerBase<
ToProtocol extends IProtocol,
FromProtocol extends IProtocol,
> implements IMessenger<ToProtocol, FromProtocol>
{
_sendMsg(message: Message) {
throw new Error("Not implemented");
}
typeListeners = new Map<keyof ToProtocol, ((message: Message) => any)[]>();
idListeners = new Map<string, (message: Message) => any>();
constructor() {
process.stdin.on("data", (data) => {
this._handleData(data);
});
process.stdout.on("close", () => {
fs.writeFileSync("./error.log", `${new Date().toISOString()}\n`);
console.log("[info] Exiting Continue core...");
process.exit(1);
});
process.stdin.on("close", () => {
fs.writeFileSync("./error.log", `${new Date().toISOString()}\n`);
console.log("[info] Exiting Continue core...");
process.exit(1);
});
}
private _onErrorHandlers: ((error: Error) => void)[] = [];
onError(handler: (error: Error) => void) {
this._onErrorHandlers.push(handler);
}
mock(data: any) {
const d = JSON.stringify(data);
this._handleData(Buffer.from(d));
}
private _handleLine(line: string) {
try {
const msg: Message = JSON.parse(line);
@ -83,7 +62,7 @@ export class IpcMessenger<
}
private _unfinishedLine: string | undefined = undefined;
private _handleData(data: Buffer) {
protected _handleData(data: Buffer) {
const d = data.toString();
const lines = d.split(/\r\n/).filter((line) => line.trim() !== "");
if (lines.length === 0) {
@ -100,41 +79,10 @@ export class IpcMessenger<
lines.forEach((line) => this._handleLine(line));
}
send<T extends keyof FromProtocol>(
messageType: T,
data: FromProtocol[T][0],
messageId?: string,
): string {
messageId = messageId ?? uuidv4();
const msg: Message = {
messageType: messageType as string,
data,
messageId,
};
// process.send?.(data);
process.stdout?.write(JSON.stringify(msg) + "\r\n");
return messageId;
}
private _onErrorHandlers: ((error: Error) => void)[] = [];
on<T extends keyof ToProtocol>(
messageType: T,
handler: (message: Message<ToProtocol[T][0]>) => ToProtocol[T][1],
): void {
if (!this.typeListeners.has(messageType)) {
this.typeListeners.set(messageType, []);
}
this.typeListeners.get(messageType)?.push(handler);
}
invoke<T extends keyof ToProtocol>(
messageType: T,
data: ToProtocol[T][0],
): ToProtocol[T][1] {
return this.typeListeners.get(messageType)?.[0]?.({
messageId: uuidv4(),
messageType: messageType as string,
data,
});
onError(handler: (error: Error) => void) {
this._onErrorHandlers.push(handler);
}
request<T extends keyof FromProtocol>(
@ -151,4 +99,161 @@ export class IpcMessenger<
this.send(messageType, data, messageId);
});
}
mock(data: any) {
const d = JSON.stringify(data);
this._handleData(Buffer.from(d));
}
send<T extends keyof FromProtocol>(
messageType: T,
data: FromProtocol[T][0],
messageId?: string,
): string {
messageId = messageId ?? uuidv4();
const msg: Message = {
messageType: messageType as string,
data,
messageId,
};
this._sendMsg(msg);
return messageId;
}
invoke<T extends keyof ToProtocol>(
messageType: T,
data: ToProtocol[T][0],
): ToProtocol[T][1] {
return this.typeListeners.get(messageType)?.[0]?.({
messageId: uuidv4(),
messageType: messageType as string,
data,
});
}
on<T extends keyof ToProtocol>(
messageType: T,
handler: (
message: Message<ToProtocol[T][0]>,
) => Promise<ToProtocol[T][1]> | ToProtocol[T][1],
): void {
if (!this.typeListeners.has(messageType)) {
this.typeListeners.set(messageType, []);
}
this.typeListeners.get(messageType)?.push(handler);
}
}
export class IpcMessenger<
ToProtocol extends IProtocol,
FromProtocol extends IProtocol,
>
extends IPCMessengerBase<ToProtocol, FromProtocol>
implements IMessenger<ToProtocol, FromProtocol>
{
constructor() {
super();
console.log("Setup");
process.stdin.on("data", (data) => {
// console.log("[info] Received data: ", data.toString());
this._handleData(data);
});
process.stdout.on("close", () => {
fs.writeFileSync("./error.log", `${new Date().toISOString()}\n`);
console.log("[info] Exiting Continue core...");
process.exit(1);
});
process.stdin.on("close", () => {
fs.writeFileSync("./error.log", `${new Date().toISOString()}\n`);
console.log("[info] Exiting Continue core...");
process.exit(1);
});
}
_sendMsg(msg: Message) {
// console.log("[info] Sending message: ", msg);
const d = JSON.stringify(msg);
process.stdout?.write(d + "\r\n");
}
}
export class CoreBinaryMessenger<
ToProtocol extends IProtocol,
FromProtocol extends IProtocol,
>
extends IPCMessengerBase<ToProtocol, FromProtocol>
implements IMessenger<ToProtocol, FromProtocol>
{
private errorHandler: (error: Error) => void = () => {};
private messageHandlers: Map<
keyof ToProtocol,
(message: Message<any>) => Promise<any> | any
> = new Map();
constructor(private readonly subprocess: ChildProcessWithoutNullStreams) {
super();
console.log("Setup");
this.subprocess.stdout.on("data", (data) => {
console.log("[info] Received data from core:", data.toString() + "\n");
this._handleData(data);
});
this.subprocess.stdout.on("close", () => {
console.log("[info] Continue core exited");
});
this.subprocess.stdin.on("close", () => {
console.log("[info] Continue core exited");
});
}
_sendMsg(msg: Message) {
console.log("[info] Sending message to core:", msg);
const d = JSON.stringify(msg);
this.subprocess.stdin.write(d + "\r\n");
}
}
export class CoreBinaryTcpMessenger<
ToProtocol extends IProtocol,
FromProtocol extends IProtocol,
>
extends IPCMessengerBase<ToProtocol, FromProtocol>
implements IMessenger<ToProtocol, FromProtocol>
{
private port: number = 3000;
private socket: net.Socket | null = null;
typeListeners = new Map<keyof ToProtocol, ((message: Message) => any)[]>();
idListeners = new Map<string, (message: Message) => any>();
constructor() {
super();
const socket = net.createConnection(this.port, "localhost");
this.socket = socket;
socket.on("data", (data: Buffer) => {
// console.log("[info] Received data from core:", data.toString() + "\n");
this._handleData(data);
});
socket.on("end", () => {
console.log("Disconnected from server");
});
socket.on("error", (err: any) => {
console.error("Client error:", err);
});
}
close() {
this.socket?.end();
}
_sendMsg(msg: Message) {
if (this.socket) {
// console.log("[info] Sending message to core:", msg);
const d = JSON.stringify(msg);
this.socket.write(d + "\r\n");
} else {
console.error("Socket is not connected");
}
}
}

View File

@ -18,6 +18,11 @@ export class TcpMessenger<
constructor() {
const server = net.createServer((socket) => {
this.socket = socket;
socket.on("connect", () => {
console.log("Connected to server");
});
socket.on("data", (data: Buffer) => {
this._handleData(data);
});

View File

@ -35,6 +35,7 @@ program.action(async () => {
const core = new Core(messenger, ide, async (text) => {
fs.appendFileSync(promptLogsPath, text + "\n\n");
});
console.log("Core started");
} catch (e) {
fs.writeFileSync("./error.log", `${new Date().toISOString()} ${e}\n`);
console.log("Error: ", e);

136
binary/test/binary.test.ts Normal file
View File

@ -0,0 +1,136 @@
import { FromIdeProtocol, ToIdeProtocol } from "core/protocol/index.js";
import FileSystemIde from "core/util/filesystem";
import { IMessenger } from "core/util/messenger";
import { ReverseMessageIde } from "core/util/reverseMessageIde";
import fs from "fs";
import { ChildProcessWithoutNullStreams, spawn } from "node:child_process";
import path from "path";
import {
CoreBinaryMessenger,
CoreBinaryTcpMessenger,
} from "../src/IpcMessenger";
// jest.setTimeout(100_000);
const USE_TCP = false;
function autodetectPlatformAndArch() {
const platform = {
aix: "linux",
darwin: "darwin",
freebsd: "linux",
linux: "linux",
openbsd: "linux",
sunos: "linux",
win32: "win32",
android: "linux",
cygwin: "win32",
netbsd: "linux",
haiku: "linux",
}[process.platform];
const arch = {
arm: "arm64",
arm64: "arm64",
ia32: "x64",
loong64: "arm64",
mips: "arm64",
mipsel: "arm64",
ppc: "x64",
ppc64: "x64",
riscv64: "arm64",
s390: "x64",
s390x: "x64",
x64: "x64",
}[process.arch];
return [platform, arch];
}
const CONTINUE_GLOBAL_DIR = path.join(__dirname, "..", ".continue");
describe("Test Suite", () => {
let messenger: IMessenger<ToIdeProtocol, FromIdeProtocol>;
let subprocess: ChildProcessWithoutNullStreams;
beforeAll(async () => {
const [platform, arch] = autodetectPlatformAndArch();
const binaryPath = path.join(
__dirname,
"..",
"bin",
`${platform}-${arch}`,
`continue-binary${platform === "win32" ? ".exe" : ""}`,
);
expect(fs.existsSync(binaryPath)).toBe(true);
if (USE_TCP) {
messenger = new CoreBinaryTcpMessenger<ToIdeProtocol, FromIdeProtocol>();
} else {
subprocess = spawn(binaryPath, {
env: { ...process.env, CONTINUE_GLOBAL_DIR },
});
messenger = new CoreBinaryMessenger<ToIdeProtocol, FromIdeProtocol>(
subprocess,
);
}
const ide = new FileSystemIde();
const reverseIde = new ReverseMessageIde(messenger.on.bind(messenger), ide);
// Wait for core to set itself up
await new Promise((resolve) => setTimeout(resolve, 1000));
});
afterAll(async () => {
// Wait for the subprocess to exit
if (USE_TCP) {
(
messenger as CoreBinaryTcpMessenger<ToIdeProtocol, FromIdeProtocol>
).close();
} else {
subprocess.kill();
await new Promise((resolve) => subprocess.on("close", resolve));
await new Promise((resolve) => setTimeout(resolve, 1000));
}
});
it("should respond to ping with pong", async () => {
const resp = await messenger.request("ping", "ping");
expect(resp).toBe("pong");
});
it("should create .continue directory at the specified location with expected files", async () => {
expect(fs.existsSync(CONTINUE_GLOBAL_DIR)).toBe(true);
// Many of the files are only created when trying to load the config
const config = await messenger.request(
"config/getBrowserSerialized",
undefined,
);
const expectedFiles = [
"config.json",
"config.ts",
"package.json",
"logs/core.log",
"index/autocompleteCache.sqlite",
"out/config.js",
"types/core/index.d.ts",
];
for (const file of expectedFiles) {
const filePath = path.join(CONTINUE_GLOBAL_DIR, file);
expect(fs.existsSync(filePath)).toBe(true);
}
});
it("should properly edit config", async () => {
const config = await messenger.request(
"config/getBrowserSerialized",
undefined,
);
expect(config).toHaveProperty("models");
expect(config).toHaveProperty("embeddingsProvider");
expect(config).toHaveProperty("contextProviders");
expect(config).toHaveProperty("slashCommands");
});
});

View File

@ -13,7 +13,8 @@
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
"resolveJsonModule": true /* Enable importing .json files */
"resolveJsonModule": true /* Enable importing .json files */,
"types": ["jest"]
},
"include": ["src/**/*"]
"include": ["src/**/*", "test/**/*"]
}

View File

@ -218,6 +218,8 @@ function isEnglishFirstLine(line: string) {
line.startsWith("sure thing") ||
line.startsWith("sure!") ||
line.startsWith("to fill") ||
line.startsWith("certainly") ||
line.startsWith("of course") ||
line.startsWith("the code should")
) {
return true;

View File

@ -28,7 +28,13 @@ interface AutocompleteTemplate {
const stableCodeFimTemplate: AutocompleteTemplate = {
template: "<fim_prefix>{{{prefix}}}<fim_suffix>{{{suffix}}}<fim_middle>",
completionOptions: {
stop: ["<fim_prefix>", "<fim_suffix>", "<fim_middle>", "<|endoftext|>"],
stop: [
"<fim_prefix>",
"<fim_suffix>",
"<fim_middle>",
"<|endoftext|>",
"<file_sep>",
],
},
};
@ -277,7 +283,8 @@ export function getTemplateForModel(model: string): AutocompleteTemplate {
lowerCaseModel.includes("starchat") ||
lowerCaseModel.includes("octocoder") ||
lowerCaseModel.includes("stable") ||
lowerCaseModel.includes("codeqwen")
lowerCaseModel.includes("codeqwen") ||
lowerCaseModel.includes("qwen")
) {
return stableCodeFimTemplate;
}

View File

@ -4,9 +4,9 @@ import {
ContinueRcJson,
IContextProvider,
IDE,
IdeSettings,
ILLM,
} from "../index.js";
import { IdeSettings } from "../protocol/ideWebview.js";
import { Telemetry } from "../util/posthog.js";
import { finalToBrowserConfig, loadFullConfigNode } from "./load.js";
@ -72,11 +72,12 @@ export class ConfigHandler {
const ideInfo = await this.ide.getIdeInfo();
const uniqueId = await this.ide.getUniqueId();
const ideSettings = await this.ideSettingsPromise;
const newConfig = await loadFullConfigNode(
this.ide,
workspaceConfigs,
await this.ideSettingsPromise,
ideSettings,
ideInfo.ideType,
uniqueId,
this.writeLog,

View File

@ -21,6 +21,7 @@ import {
EmbeddingsProviderDescription,
IContextProvider,
IDE,
IdeSettings,
IdeType,
ModelDescription,
Reranker,
@ -34,7 +35,7 @@ import { BaseLLM } from "../llm/index.js";
import CustomLLMClass from "../llm/llms/CustomLLM.js";
import FreeTrial from "../llm/llms/FreeTrial.js";
import { llmFromDescription } from "../llm/llms/index.js";
import { IdeSettings } from "../protocol/ideWebview.js";
import { fetchwithRequestOptions } from "../util/fetchWithOptions.js";
import { copyOf } from "../util/index.js";
import mergeJson from "../util/merge.js";

View File

@ -1,6 +1,3 @@
//os.platform()
//os.arch()
import os from "os";
import {
ContextItem,
@ -11,9 +8,9 @@ import { BaseContextProvider } from "../index.js";
class OSContextProvider extends BaseContextProvider {
static description: ContextProviderDescription = {
title: "OS",
displayTitle: "OS",
description: "OS and CPU Information.",
title: "os",
displayTitle: "Operating System",
description: "Operating system and CPU Information.",
type: "normal",
};
@ -25,9 +22,9 @@ class OSContextProvider extends BaseContextProvider {
const platform = os.platform();
return [
{
description: "Your OS and CPU",
description: "Your operating system and CPU",
content: `I am running ${platform} on ${cpu}.`,
name: "OS",
name: "os",
},
];
}

View File

@ -196,6 +196,7 @@ export class Core {
this.ide,
(await this.config()).experimental?.promptPath,
);
this.configHandler.reloadConfig();
});
on("config/reload", (msg) => {
this.configHandler.reloadConfig();
@ -367,7 +368,13 @@ export class Core {
return model.listModels();
} else {
if (msg.data.title === "Ollama") {
return new Ollama({ model: "" }).listModels();
try {
const models = await new Ollama({ model: "" }).listModels();
return models;
} catch (e) {
console.warn(`Error listing Ollama models: ${e}`);
return undefined;
}
} else {
return undefined;
}

7
core/index.d.ts vendored
View File

@ -402,8 +402,15 @@ export enum FileType {
SymbolicLink = 64,
}
export interface IdeSettings {
remoteConfigServerUrl: string | undefined;
remoteConfigSyncPeriod: number;
userToken: string;
}
export interface IDE {
getIdeInfo(): Promise<IdeInfo>;
getIdeSettings(): Promise<IdeSettings>;
getDiff(): Promise<string>;
isTelemetryEnabled(): Promise<boolean>;
getUniqueId(): Promise<string>;

View File

@ -59,6 +59,7 @@ const DEFAULT_IGNORE_FILETYPES = [
"*.gcda",
"*.gcno",
"go.sum",
".env",
];
export const defaultIgnoreFile = ignore().add(DEFAULT_IGNORE_FILETYPES);
export const DEFAULT_IGNORE_DIRS = [

View File

@ -1,23 +1,12 @@
export default {
roots: ["<rootDir>"],
transform: {
"^.+\\.ts?$": [
"ts-jest",
{
useESM: true,
},
],
"^.+\\.js$": [
"babel-jest",
{
presets: [["@babel/preset-env", { targets: { node: "current" } }]],
},
],
"\\.[jt]sx?$": ["ts-jest", { useESM: true }],
},
moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"],
extensionsToTreatAsEsm: [".ts"],
// Remove or comment out the moduleNameMapper configuration
moduleNameMapper: {
"^(.*)\\.js$": "$1",
"(.+)\\.js": "$1",
},
extensionsToTreatAsEsm: [".ts"],
preset: "ts-jest/presets/default-esm",
testTimeout: 10000,
};

View File

@ -293,17 +293,20 @@ ${prompt}`;
return resp;
} catch (e: any) {
console.warn(
`${e.message}\n\nCode: ${e.code}\nError number: ${e.errno}\nSyscall: ${e.erroredSysCall}\nType: ${e.type}\n\n${e.stack}`,
);
if (
e.code === "ECONNREFUSED" &&
e.message.includes("http://127.0.0.1:11434")
) {
throw new Error(
"Failed to connect to local Ollama instance. To start Ollama, first download it at https://ollama.ai.",
// Errors to ignore
if (!e.message.includes("/api/show")) {
console.warn(
`${e.message}\n\nCode: ${e.code}\nError number: ${e.errno}\nSyscall: ${e.erroredSysCall}\nType: ${e.type}\n\n${e.stack}`,
);
if (
e.code === "ECONNREFUSED" &&
e.message.includes("http://127.0.0.1:11434")
) {
throw new Error(
"Failed to connect to local Ollama instance. To start Ollama, first download it at https://ollama.ai.",
);
}
}
throw new Error(e.message);
}

View File

@ -21,14 +21,15 @@ class Ollama extends BaseLLM {
if (options.model === "AUTODETECT") {
return;
}
try {
this.fetch(this.getEndpoint("api/show"), {
method: "POST",
headers: {
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify({ name: this._getModel() }),
}).then(async (response) => {
this.fetch(this.getEndpoint("api/show"), {
method: "POST",
headers: {
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify({ name: this._getModel() }),
})
.then(async (response) => {
if (response.status !== 200) {
// console.warn(
// "Error calling Ollama /api/show endpoint: ",
@ -67,8 +68,10 @@ class Ollama extends BaseLLM {
}
}
}
})
.catch((e) => {
// console.warn("Error calling the Ollama /api/show endpoint: ", e);
});
} catch (e) {}
}
private _getModel() {

View File

@ -2,11 +2,11 @@ import Handlebars from "handlebars";
import { v4 as uuidv4 } from "uuid";
import {
BaseCompletionOptions,
IdeSettings,
ILLM,
LLMOptions,
ModelDescription,
} from "../../index.js";
import { IdeSettings } from "../../protocol/ideWebview.js";
import { DEFAULT_MAX_TOKENS } from "../constants.js";
import { BaseLLM } from "../index.js";
import Anthropic from "./Anthropic.js";

View File

@ -26,7 +26,7 @@
"commander": "^12.0.0",
"comment-json": "^4.2.3",
"dbinfoz": "^0.1.4",
"dotenv": "^16.3.1",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"follow-redirects": "^1.15.5",
"handlebars": "^4.7.8",

View File

@ -3,7 +3,7 @@
"version": "1.0.13",
"description": "The Continue Core contains functionality that can be shared across web, VS Code, or Node.js server. It is written in TypeScript and contains much of the functionality that was previously inside of the legacy Continue Python Server.",
"scripts": {
"test": "NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules\" jest ./test",
"test": "NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules\" jest",
"build:npm": "tsc -p ./tsconfig.npm.json",
"lint": "eslint . --ext ts",
"lint:fix": "eslint . --ext ts --fix"
@ -46,7 +46,7 @@
"commander": "^12.0.0",
"comment-json": "^4.2.3",
"dbinfoz": "^0.1.4",
"dotenv": "^16.3.1",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"follow-redirects": "^1.15.5",
"handlebars": "^4.7.8",

View File

@ -4,6 +4,7 @@ import type {
ContextItemWithId,
ContextSubmenuItem,
DiffLine,
IdeSettings,
LLMFullCompletionOptions,
MessageContent,
ModelDescription,
@ -14,7 +15,6 @@ import type {
SiteIndexingConfig,
} from "..";
import type { AutocompleteInput } from "../autocomplete/completionProvider";
import type { IdeSettings } from "./ideWebview";
export type ProtocolGeneratorType<T> = AsyncGenerator<{
done?: boolean;

View File

@ -3,12 +3,12 @@ import type {
DiffLine,
FileType,
IdeInfo,
IdeSettings,
IndexTag,
Problem,
Range,
Thread,
} from "..";
import type { IdeSettings } from "./ideWebview";
export type ToIdeFromWebviewOrCoreProtocol = {
// Methods from IDE type

View File

@ -3,12 +3,6 @@ import type { RangeInFileWithContents } from "../commands/util";
import { ToIdeFromWebviewOrCoreProtocol } from "./ide.js";
import { ToWebviewFromIdeOrCoreProtocol } from "./webview.js";
export interface IdeSettings {
remoteConfigServerUrl: string | undefined;
remoteConfigSyncPeriod: number;
userToken: string;
}
export type ToIdeFromWebviewProtocol = ToIdeFromWebviewOrCoreProtocol & {
onLoad: [
undefined,

View File

@ -102,7 +102,7 @@ async function* generateLines(lines: string[]): AsyncGenerator<string> {
}
}
describe("streamDiff", () => {
describe.skip("streamDiff", () => {
for (let i = 0; i < oldCode.length; i++) {
test(`outputs valid diff #${i}`, async () => {
const oldLines = oldCode[i].split("\n");

View File

@ -2,8 +2,9 @@ import * as dotenv from "dotenv";
import { CompletionOptions } from "../index.js";
import { BaseLLM } from "../llm/index.js";
import OpenAI from "../llm/llms/OpenAI.js";
jest.setTimeout(100_000);
// jest.setTimeout(100_000);
dotenv.config();
@ -61,17 +62,17 @@ describe("LLM", () => {
// testLLM(
// new FreeTrial({
// model: "gpt-3.5-turbo",
// })
// }),
// );
// testLLM(
// new Anthropic({
// model: "claude-2",
// apiKey: process.env.ANTHROPIC_API_KEY,
// })
// );
// testLLM(
// new OpenAI({ apiKey: process.env.OPENAI_API_KEY, model: "gpt-3.5-turbo" })
// }),
// );
testLLM(
new OpenAI({ apiKey: process.env.OPENAI_API_KEY, model: "gpt-3.5-turbo" }),
);
// TODO: Fix Replicate
// testLLM(
// new Replicate({

View File

@ -0,0 +1,76 @@
// File generated by Continue
import { dedentAndGetCommonWhitespace } from "../../util";
describe("dedentAndGetCommonWhitespace", () => {
let originalString: string;
beforeEach(() => {
// Setup any global variables or states if needed
originalString = " line1\n line2\n line3";
});
afterEach(() => {
// Tear down any changes to global variables or states if needed
originalString = "";
});
test("should dedent and return common whitespace for a simple case", () => {
const input = " line1\n line2\n line3";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual(["line1\nline2\nline3", " "]);
});
test("should handle empty string", () => {
const input = "";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual(["", ""]);
});
test("should handle string with only whitespace", () => {
const input = " ";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual(["", ""]);
});
test("should handle string with mixed whitespace and content", () => {
const input = " line1\n line2\n line3";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual([" line1\nline2\n line3", " "]);
});
test("should handle string with no common leading whitespace", () => {
const input = "line1\n line2\n line3";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual([input, ""]);
});
test("should handle string with empty lines", () => {
const input = " line1\n\n line3";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual(["line1\n\nline3", " "]);
});
test("should handle string with only empty lines", () => {
const input = "\n\n";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual(["\n\n", ""]);
});
test("should handle string with tabs as whitespace", () => {
const input = "\tline1\n\tline2\n\tline3";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual(["line1\nline2\nline3", "\t"]);
});
test("should handle string with mixed tabs and spaces", () => {
const input = "\t line1\n\t line2\n\t line3";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual(["line1\nline2\nline3", "\t "]);
});
test("should handle string with different leading whitespace lengths", () => {
const input = " line1\n line2\n line3";
const output = dedentAndGetCommonWhitespace(input);
expect(output).toEqual([" line1\nline2\n line3", " "]);
});
});

View File

@ -0,0 +1,51 @@
// File generated by Continue
import { getLastNPathParts } from "../../util";
describe("getLastNPathParts", () => {
const SEP_REGEX = /[\\/]+/;
beforeAll(() => {
// Setup code if needed
});
afterAll(() => {
// Teardown code if needed
});
test("returns the last N parts of a filepath with forward slashes", () => {
const filepath = "home/user/documents/project/file.txt";
expect(getLastNPathParts(filepath, 2)).toBe("project/file.txt");
});
test("returns the last N parts of a filepath with backward slashes", () => {
const filepath = "C:\\home\\user\\documents\\project\\file.txt";
expect(getLastNPathParts(filepath, 3)).toBe("documents/project/file.txt");
});
test("returns the last part if N is 1", () => {
const filepath = "/home/user/documents/project/file.txt";
expect(getLastNPathParts(filepath, 1)).toBe("file.txt");
});
test("returns the entire path if N is greater than the number of parts", () => {
const filepath = "home/user/documents/project/file.txt";
expect(getLastNPathParts(filepath, 10)).toBe(
"home/user/documents/project/file.txt",
);
});
test("returns an empty string if N is 0", () => {
const filepath = "home/user/documents/project/file.txt";
expect(getLastNPathParts(filepath, 0)).toBe("");
});
test("handles paths with mixed forward and backward slashes", () => {
const filepath = "home\\user/documents\\project/file.txt";
expect(getLastNPathParts(filepath, 3)).toBe("documents/project/file.txt");
});
test("handles edge case with empty filepath", () => {
const filepath = "";
expect(getLastNPathParts(filepath, 2)).toBe("");
});
});

View File

@ -0,0 +1,40 @@
// This file is generated by Continue
import { longestCommonSubsequence } from "../../util/lcs";
describe("longestCommonSubsequence", () => {
beforeAll(() => {
// Setup if needed
});
afterAll(() => {
// Teardown if needed
});
test("should return the longest common subsequence for non-empty strings", () => {
expect(longestCommonSubsequence("abcde", "ace")).toBe("ace");
expect(longestCommonSubsequence("abc", "abc")).toBe("abc");
expect(longestCommonSubsequence("abc", "def")).toBe("");
});
test("should handle cases where one or both strings are empty", () => {
expect(longestCommonSubsequence("", "abc")).toBe("");
expect(longestCommonSubsequence("abc", "")).toBe("");
expect(longestCommonSubsequence("", "")).toBe("");
});
test("should handle cases with no common subsequence", () => {
expect(longestCommonSubsequence("abc", "def")).toBe("");
expect(longestCommonSubsequence("xyz", "abc")).toBe("");
});
test("should handle cases with special characters", () => {
expect(longestCommonSubsequence("a!@#b$c", "!@#$")).toBe("!@#$");
expect(longestCommonSubsequence("a!@#b$c", "xyz")).toBe("");
});
test("should handle long strings efficiently", () => {
const str1 = "a".repeat(1000) + "b".repeat(1000);
const str2 = "a".repeat(1000) + "c".repeat(1000);
expect(longestCommonSubsequence(str1, str2)).toBe("a".repeat(1000));
});
});

View File

@ -0,0 +1,100 @@
// File generated by Continue
import { mergeJson } from "../../util/merge";
describe("mergeJson", () => {
it("should merge two simple JSON objects", () => {
const first = { a: 1, b: 2 };
const second = { b: 3, c: 4 };
const result = mergeJson(first, second);
expect(result).toEqual({ a: 1, b: 3, c: 4 });
});
it('should overwrite values when mergeBehavior is "overwrite"', () => {
const first = { a: 1, b: 2 };
const second = { b: 3, c: 4 };
const result = mergeJson(first, second, "overwrite");
expect(result).toEqual({ a: 1, b: 3, c: 4 });
});
it("should merge nested objects", () => {
const first = { a: { b: 1 } };
const second = { a: { c: 2 } };
const result = mergeJson(first, second);
expect(result).toEqual({ a: { b: 1, c: 2 } });
});
it("should merge arrays without mergeKeys", () => {
const first = { a: [1, 2] };
const second = { a: [3, 4] };
const result = mergeJson(first, second);
expect(result).toEqual({ a: [1, 2, 3, 4] });
});
it("should merge arrays with mergeKeys", () => {
const first = {
a: [
{ id: 1, value: "first" },
{ id: 2, value: "second" },
],
};
const second = {
a: [
{ id: 2, value: "updated" },
{ id: 3, value: "third" },
],
};
const mergeKeys = {
a: (item1: any, item2: any) => item1.id === item2.id,
};
const result = mergeJson(first, second, undefined, mergeKeys);
expect(result).toEqual({
a: [
{ id: 1, value: "first" },
{ id: 2, value: "updated" },
{ id: 3, value: "third" },
],
});
});
it("should handle non-object values correctly", () => {
const first = { a: 1, b: "string", c: true };
const second = { a: 2, b: "new string", c: false };
const result = mergeJson(first, second);
expect(result).toEqual({ a: 2, b: "new string", c: false });
});
it("should handle null and undefined values correctly", () => {
const first = { a: 1, b: null };
const second = { b: 2, c: undefined };
const result = mergeJson(first, second);
expect(result).toEqual({ a: 1, b: 2, c: undefined });
});
it("should handle empty objects", () => {
const first = {};
const second = { a: 1 };
const result = mergeJson(first, second);
expect(result).toEqual({ a: 1 });
});
it("should handle empty second object", () => {
const first = { a: 1 };
const second = {};
const result = mergeJson(first, second);
expect(result).toEqual({ a: 1 });
});
it("should handle errors gracefully", () => {
const first = { a: 1 };
const second = { b: () => {} }; // Functions are not serializable in JSON
const result = mergeJson(first, second);
expect(result).toEqual({ a: 1, b: expect.any(Function) });
});
it("should merge deeply nested objects", () => {
const first = { a: { b: { c: 1 } } };
const second = { a: { b: { d: 2 } } };
const result = mergeJson(first, second);
expect(result).toEqual({ a: { b: { c: 1, d: 2 } } });
});
});

View File

@ -0,0 +1,105 @@
// This file is generated by Continue
import { getRangeInString } from "../../../util/ranges";
describe.skip("getRangeInString", () => {
let content: string;
beforeEach(() => {
content = `Line 1
Line 2
Line 3
Line 4
Line 5`;
});
afterEach(() => {
content = "";
});
test("should return substring within the same line", () => {
const range = {
start: { line: 1, character: 0 },
end: { line: 1, character: 4 },
};
expect(getRangeInString(content, range)).toBe("Line");
});
test("should return substring spanning multiple lines", () => {
const range = {
start: { line: 1, character: 2 },
end: { line: 3, character: 3 },
};
const expected = `ne 2
Line 3
Line 4`;
expect(getRangeInString(content, range)).toBe(expected);
});
test("should handle range that starts and ends at the same character", () => {
const range = {
start: { line: 2, character: 0 },
end: { line: 2, character: 0 },
};
expect(getRangeInString(content, range)).toBe("L");
});
test("should handle range that spans entire content", () => {
const range = {
start: { line: 0, character: 0 },
end: { line: 4, character: 5 },
};
expect(getRangeInString(content, range)).toBe(content);
});
test("should handle range that spans to the end of the last line", () => {
const range = {
start: { line: 3, character: 2 },
end: { line: 4, character: 5 },
};
const expected = `ne 4
Line 5`;
expect(getRangeInString(content, range)).toBe(expected);
});
test("should handle empty content", () => {
content = "";
const range = {
start: { line: 0, character: 0 },
end: { line: 0, character: 0 },
};
expect(getRangeInString(content, range)).toBe("");
});
test("should handle invalid range (start line > end line)", () => {
const range = {
start: { line: 3, character: 0 },
end: { line: 2, character: 0 },
};
expect(getRangeInString(content, range)).toBe("");
});
test("should handle invalid range (start character > end character on same line)", () => {
const range = {
start: { line: 1, character: 5 },
end: { line: 1, character: 2 },
};
expect(getRangeInString(content, range)).toBe("");
});
test("should handle range that starts and ends at the same line but different characters", () => {
const range = {
start: { line: 1, character: 2 },
end: { line: 1, character: 6 },
};
expect(getRangeInString(content, range)).toBe("ne 2");
});
test("should handle range that starts and ends at the same line and same characters", () => {
const range = {
start: { line: 1, character: 2 },
end: { line: 1, character: 2 },
};
expect(getRangeInString(content, range)).toBe("n");
});
});

View File

@ -0,0 +1,123 @@
// This file is generated by Continue
import { intersection } from "../../../util/ranges";
interface Position {
line: number;
character: number;
}
interface Range {
start: Position;
end: Position;
}
describe("intersection", () => {
let rangeA: Range;
let rangeB: Range;
beforeEach(() => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 3, character: 5 },
};
rangeB = {
start: { line: 2, character: 2 },
end: { line: 4, character: 0 },
};
});
test("returns correct intersection for overlapping ranges", () => {
const result = intersection(rangeA, rangeB);
expect(result).toEqual({
start: { line: 2, character: 2 },
end: { line: 3, character: 5 },
});
});
test("returns null for non-overlapping ranges", () => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 2, character: 0 },
};
rangeB = {
start: { line: 3, character: 0 },
end: { line: 4, character: 0 },
};
const result = intersection(rangeA, rangeB);
expect(result).toBeNull();
});
// TODO
test.skip("returns correct intersection for single line overlap", () => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 1, character: 5 },
};
rangeB = {
start: { line: 1, character: 3 },
end: { line: 2, character: 0 },
};
const result = intersection(rangeA, rangeB);
expect(result).toEqual({
start: { line: 1, character: 3 },
end: { line: 1, character: 5 },
});
});
test("returns null for single line non-overlapping ranges", () => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 1, character: 2 },
};
rangeB = {
start: { line: 1, character: 3 },
end: { line: 1, character: 5 },
};
const result = intersection(rangeA, rangeB);
expect(result).toBeNull();
});
test("returns correct intersection when one range is fully within another", () => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 4, character: 5 },
};
rangeB = {
start: { line: 2, character: 2 },
end: { line: 3, character: 3 },
};
const result = intersection(rangeA, rangeB);
expect(result).toEqual({
start: { line: 2, character: 2 },
end: { line: 3, character: 3 },
});
});
test("returns correct intersection when ranges touch at the edge", () => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 2, character: 0 },
};
rangeB = {
start: { line: 2, character: 0 },
end: { line: 3, character: 0 },
};
const result = intersection(rangeA, rangeB);
expect(result).toEqual({
start: { line: 2, character: 0 },
end: { line: 2, character: 0 },
});
});
});

View File

@ -0,0 +1,70 @@
import { maxPosition, minPosition } from "../../../util/ranges";
// This file is generated by Continue
interface Position {
line: number;
character: number;
}
describe("Position comparison functions", () => {
let pos1: Position;
let pos2: Position;
let pos3: Position;
let pos4: Position;
beforeAll(() => {
// Setup: Initializing positions
pos1 = { line: 1, character: 5 };
pos2 = { line: 2, character: 3 };
pos3 = { line: 1, character: 7 };
pos4 = { line: 2, character: 3 }; // Same as pos2 to test equality
});
describe("maxPosition", () => {
it("should return the position with the greater line number", () => {
expect(maxPosition(pos1, pos2)).toEqual(pos2);
});
it("should return the position with the greater character number when lines are equal", () => {
expect(maxPosition(pos1, pos3)).toEqual(pos3);
});
it("should return the first position when both positions are equal", () => {
expect(maxPosition(pos2, pos4)).toEqual(pos2);
});
it("should handle positions with negative line numbers", () => {
const posNegative = { line: -1, character: 0 };
expect(maxPosition(posNegative, pos1)).toEqual(pos1);
});
it("should handle positions with negative character numbers", () => {
const posNegativeChar = { line: 1, character: -1 };
expect(maxPosition(posNegativeChar, pos1)).toEqual(pos1);
});
});
describe("minPosition", () => {
it("should return the position with the lesser line number", () => {
expect(minPosition(pos1, pos2)).toEqual(pos1);
});
it("should return the position with the lesser character number when lines are equal", () => {
expect(minPosition(pos1, pos3)).toEqual(pos1);
});
it("should return the first position when both positions are equal", () => {
expect(minPosition(pos2, pos4)).toEqual(pos2);
});
it("should handle positions with negative line numbers", () => {
const posNegative = { line: -1, character: 0 };
expect(minPosition(posNegative, pos1)).toEqual(posNegative);
});
it("should handle positions with negative character numbers", () => {
const posNegativeChar = { line: 1, character: -1 };
expect(minPosition(posNegativeChar, pos1)).toEqual(posNegativeChar);
});
});
});

View File

@ -0,0 +1,92 @@
import { union } from "../../../util/ranges";
// This file is generated by Continue
interface Position {
line: number;
character: number;
}
interface Range {
start: Position;
end: Position;
}
describe("union", () => {
let rangeA: Range;
let rangeB: Range;
beforeEach(() => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 3, character: 5 },
};
rangeB = {
start: { line: 2, character: 3 },
end: { line: 4, character: 2 },
};
});
test("should return the union of two overlapping ranges", () => {
const result = union(rangeA, rangeB);
expect(result).toEqual({
start: { line: 1, character: 0 },
end: { line: 4, character: 2 },
});
});
test("should return the union when ranges are identical", () => {
const result = union(rangeA, rangeA);
expect(result).toEqual(rangeA);
});
test("should return the union when ranges do not overlap but are adjacent", () => {
rangeB = {
start: { line: 3, character: 5 },
end: { line: 4, character: 2 },
};
const result = union(rangeA, rangeB);
expect(result).toEqual({
start: { line: 1, character: 0 },
end: { line: 4, character: 2 },
});
});
test("should return the union when one range is completely within the other", () => {
rangeB = {
start: { line: 2, character: 1 },
end: { line: 2, character: 4 },
};
const result = union(rangeA, rangeB);
expect(result).toEqual(rangeA);
});
test("should return the union when ranges do not overlap and are not adjacent", () => {
rangeB = {
start: { line: 4, character: 3 },
end: { line: 5, character: 1 },
};
const result = union(rangeA, rangeB);
expect(result).toEqual({
start: { line: 1, character: 0 },
end: { line: 5, character: 1 },
});
});
// TODO
test.skip("should handle edge case where start and end lines are the same", () => {
rangeA = {
start: { line: 1, character: 0 },
end: { line: 1, character: 5 },
};
rangeB = {
start: { line: 1, character: 3 },
end: { line: 1, character: 7 },
};
const result = union(rangeA, rangeB);
expect(result).toEqual({
start: { line: 1, character: 0 },
end: { line: 1, character: 7 },
});
});
});

View File

@ -1,4 +1,6 @@
import fs from "fs";
import { open } from "sqlite";
import sqlite3 from "sqlite3";
import { DatabaseConnection } from "../indexing/refreshIndex.js";
import { getDevDataSqlitePath } from "./paths.js";
@ -72,8 +74,6 @@ export class DevDataSqliteDb {
return DevDataSqliteDb.db;
}
const { open } = require("sqlite");
const sqlite3 = require("sqlite3");
DevDataSqliteDb.db = await open({
filename: devDataSqlitePath,
driver: sqlite3.Database,

View File

@ -1,4 +1,4 @@
import { http, https } from "follow-redirects";
import * as followRedirects from "follow-redirects";
import { HttpProxyAgent } from "http-proxy-agent";
import { globalAgent } from "https";
import { HttpsProxyAgent } from "https-proxy-agent";
@ -7,6 +7,8 @@ import * as fs from "node:fs";
import tls from "node:tls";
import { RequestOptions } from "../index.js";
const { http, https } = (followRedirects as any).default;
export function fetchwithRequestOptions(
url_: URL | string,
init?: RequestInit,

View File

@ -4,15 +4,29 @@ import {
FileType,
IDE,
IdeInfo,
IdeSettings,
IndexTag,
Problem,
Range,
Thread,
} from "../index.js";
} from "../index.d.js";
import { getContinueGlobalPath } from "./paths.js";
class FileSystemIde implements IDE {
static workspaceDir = "/tmp/continue";
constructor() {
fs.mkdirSync(FileSystemIde.workspaceDir, { recursive: true });
}
async getIdeSettings(): Promise<IdeSettings> {
return {
remoteConfigServerUrl: undefined,
remoteConfigSyncPeriod: 60,
userToken: "",
};
}
async getGitHubAuthToken(): Promise<string | undefined> {
return undefined;
}
@ -68,7 +82,7 @@ class FileSystemIde implements IDE {
}
isTelemetryEnabled(): Promise<boolean> {
return Promise.resolve(false);
return Promise.resolve(true);
}
getUniqueId(): Promise<string> {
@ -115,7 +129,7 @@ class FileSystemIde implements IDE {
useGitIgnore?: boolean,
): Promise<string[]> {
return new Promise((resolve, reject) => {
fs.readdir("/tmp/continue", (err, files) => {
fs.readdir(FileSystemIde.workspaceDir, (err, files) => {
if (err) {
reject(err);
}
@ -126,7 +140,7 @@ class FileSystemIde implements IDE {
getWorkspaceDirs(): Promise<string[]> {
return new Promise((resolve, reject) => {
fs.mkdtemp("/tmp/continue", (err, folder) => {
fs.mkdtemp(FileSystemIde.workspaceDir, (err, folder) => {
if (err) {
reject(err);
}

View File

@ -27,29 +27,6 @@ export function removeQuotesAndEscapes(output: string): string {
return output;
}
export function proxyFetch(url: string, init?: RequestInit): Promise<Response> {
if (!(window as any)._fetch) {
throw new Error("Proxy fetch not initialized");
}
if (!(url.startsWith("http://") || url.startsWith("https://"))) {
// Relative URL
const fullUrl = `${window.vscMediaUrl}/${url}`;
return (window as any)._fetch(fullUrl, init);
}
const proxyServerUrl =
(window as any).proxyServerUrl || "http://localhost:65433";
const headers = new Headers(init?.headers);
headers.append("x-continue-url", url);
return (window as any)._fetch(proxyServerUrl, {
...init,
headers,
});
}
export function dedentAndGetCommonWhitespace(s: string): [string, string] {
const lines = s.split("\n");
if (lines.length === 0 || (lines[0].trim() === "" && lines.length === 1)) {
@ -96,6 +73,9 @@ export function getBasename(filepath: string): string {
}
export function getLastNPathParts(filepath: string, n: number): string {
if (n <= 0) {
return "";
}
return filepath.split(SEP_REGEX).slice(-n).join("/");
}

View File

@ -3,6 +3,7 @@ import type {
FileType,
IDE,
IdeInfo,
IdeSettings,
IndexTag,
Problem,
Range,
@ -17,6 +18,9 @@ export class MessageIde implements IDE {
data: ToIdeFromWebviewOrCoreProtocol[T][0],
) => Promise<ToIdeFromWebviewOrCoreProtocol[T][1]>,
) {}
getIdeSettings(): Promise<IdeSettings> {
return this.request("getIdeSettings", undefined);
}
getGitHubAuthToken(): Promise<string | undefined> {
return this.request("getGitHubAuthToken", undefined);
}

View File

@ -7,9 +7,13 @@ import { defaultConfig, defaultConfigJetBrains } from "../config/default.js";
import Types from "../config/types.js";
import { IdeType, SerializedContinueConfig } from "../index.js";
dotenv.config();
const CONTINUE_GLOBAL_DIR =
process.env.CONTINUE_GLOBAL_DIR ?? path.join(os.homedir(), ".continue");
export function getContinueGlobalPath(): string {
// This is ~/.continue on mac/linux
const continuePath = path.join(os.homedir(), ".continue");
const continuePath = CONTINUE_GLOBAL_DIR;
if (!fs.existsSync(continuePath)) {
fs.mkdirSync(continuePath);
}

View File

@ -0,0 +1,191 @@
import { IDE } from "..";
import { ToIdeFromWebviewOrCoreProtocol } from "../protocol/ide";
import { Message } from "./messenger";
export class ReverseMessageIde {
private on<T extends keyof ToIdeFromWebviewOrCoreProtocol>(
messageType: T,
handler: (
data: ToIdeFromWebviewOrCoreProtocol[T][0],
) =>
| Promise<ToIdeFromWebviewOrCoreProtocol[T][1]>
| ToIdeFromWebviewOrCoreProtocol[T][1],
): void {
this._on(messageType, (msg) => {
const data = msg.data;
const result = handler(data);
return result;
});
}
constructor(
private readonly _on: <T extends keyof ToIdeFromWebviewOrCoreProtocol>(
messageType: T,
handler: (
message: Message<ToIdeFromWebviewOrCoreProtocol[T][0]>,
) =>
| Promise<ToIdeFromWebviewOrCoreProtocol[T][1]>
| ToIdeFromWebviewOrCoreProtocol[T][1],
) => void,
private readonly ide: IDE,
) {
this.initializeListeners();
}
private initializeListeners() {
this.on("getGitHubAuthToken", () => {
return this.ide.getGitHubAuthToken();
});
this.on("getLastModified", (data) => {
return this.ide.getLastModified(data.files);
});
this.on("getGitRootPath", (data) => {
return this.ide.getGitRootPath(data.dir);
});
this.on("listDir", (data) => {
return this.ide.listDir(data.dir);
});
this.on("infoPopup", (data) => {
return this.ide.infoPopup(data.message);
});
this.on("errorPopup", (data) => {
return this.ide.errorPopup(data.message);
});
this.on("getRepoName", (data) => {
return this.ide.getRepoName(data.dir);
});
this.on("getDebugLocals", (data) => {
return this.ide.getDebugLocals(data.threadIndex);
});
this.on("getTopLevelCallStackSources", (data) => {
return this.ide.getTopLevelCallStackSources(
data.threadIndex,
data.stackDepth,
);
});
this.on("getAvailableThreads", () => {
return this.ide.getAvailableThreads();
});
this.on("getTags", (data) => {
return this.ide.getTags(data);
});
this.on("getIdeInfo", () => {
return this.ide.getIdeInfo();
});
this.on("readRangeInFile", (data) => {
return this.ide.readRangeInFile(data.filepath, data.range);
});
this.on("isTelemetryEnabled", () => {
return this.ide.isTelemetryEnabled();
});
this.on("getUniqueId", () => {
return this.ide.getUniqueId();
});
this.on("getWorkspaceConfigs", () => {
return this.ide.getWorkspaceConfigs();
});
this.on("getIdeSettings", () => {
return this.ide.getIdeSettings();
});
this.on("getDiff", () => {
return this.ide.getDiff();
});
this.on("getTerminalContents", () => {
return this.ide.getTerminalContents();
});
this.on("listWorkspaceContents", (data) => {
return this.ide.listWorkspaceContents(data.directory, data.useGitIgnore);
});
this.on("getWorkspaceDirs", () => {
return this.ide.getWorkspaceDirs();
});
this.on("showLines", (data) => {
return this.ide.showLines(data.filepath, data.startLine, data.endLine);
});
this.on("listFolders", () => {
return this.ide.listFolders();
});
this.on("getContinueDir", () => {
return this.ide.getContinueDir();
});
this.on("writeFile", (data) => {
return this.ide.writeFile(data.path, data.contents);
});
this.on("showVirtualFile", (data) => {
return this.ide.showVirtualFile(data.name, data.content);
});
this.on("openFile", (data) => {
return this.ide.openFile(data.path);
});
this.on("runCommand", (data) => {
return this.ide.runCommand(data.command);
});
this.on("saveFile", (data) => {
return this.ide.saveFile(data.filepath);
});
this.on("readFile", (data) => {
return this.ide.readFile(data.filepath);
});
this.on("showDiff", (data) => {
return this.ide.showDiff(data.filepath, data.newContents, data.stepIndex);
});
this.on("getOpenFiles", () => {
return this.ide.getOpenFiles();
});
this.on("getCurrentFile", () => {
return this.ide.getCurrentFile();
});
this.on("getPinnedFiles", () => {
return this.ide.getPinnedFiles();
});
this.on("getSearchResults", (data) => {
return this.ide.getSearchResults(data.query);
});
this.on("getProblems", (data) => {
return this.ide.getProblems(data.filepath);
});
this.on("subprocess", (data) => {
return this.ide.subprocess(data.command);
});
this.on("getBranch", (data) => {
return this.ide.getBranch(data.dir);
});
}
}

View File

@ -288,6 +288,14 @@ Type `@locals` to reference the contents of the local variables with top n level
}
```
### Operating System
Type `@os` to reference the architecture and platform of your current operating system.
```json
{ "name": "os" }
```
### Requesting Context Providers
Not seeing what you want? Create an issue [here](https://github.com/continuedev/continue/issues/new?assignees=TyDunn&labels=enhancement&projects=&template=feature-request-%F0%9F%92%AA.md&title=) to request a new ContextProvider.

1350
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -15,9 +15,9 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "^3.2.1",
"@docusaurus/plugin-client-redirects": "^3.2.1",
"@docusaurus/preset-classic": "^3.2.1",
"@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-client-redirects": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.1.0",
"docusaurus-json-schema-plugin": "^1.11.0",

View File

@ -1319,13 +1319,15 @@
"outline",
"postgres",
"code",
"system",
"currentFile",
"url"
"url",
"database",
"os"
],
"markdownEnumDescriptions": [
"Reference the contents of the current changes as given by `git diff`",
"Reference the contents of the terminal",
"Reference the contents of the local variables with top n level (defaulting to 3) of call stack for that thread",
"Reference the contents of all open or pinned files.",
"Enter a search phrase and include the Google search results as context",
"Reference the results of a ripgrep search in your codebase",
@ -1335,16 +1337,15 @@
"Include important files from a folder in the prompt, as determined by similarity search",
"Reference GitHub issues from a repository",
"Retrieve important pages from a documentation website, as determined by similarity search",
"Index and retrieve the contents of any documentation site, using embeddings to find important snippets",
"Display a file tree of the current workspace",
"Include important highlighted sections from your code",
"Include a repo map showing important code objects",
"Displays important snippets of code from the currently open files",
"Displays definition lines from the currently open files",
"References Postgres table schema and sample rows",
"Reference specific functions and classes from throughout your codebase",
"Reference your operating system and cpu",
"Reference the contents of the currently active file",
"Reference the contents of a page at a URL"
"Reference the contents of a page at a URL",
"Reference table schemas",
"Operating system and CPU Information"
],
"type": "string"
},
@ -1460,56 +1461,63 @@
}
},
"then": {
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"params": {
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": ["postgres", "mysql", "sqlite"]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": ["postgres", "mysql", "sqlite"]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
}
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
"required": ["name", "connection_type", "connection"]
}
}
},
"required": ["name", "type", "connection"]
"required": ["connections"]
}
},
"required": ["connections"]
"required": ["params"]
}
},
{

View File

@ -137,7 +137,7 @@ tasks {
}
publishPlugin {
dependsOn("patchChangelog")
// dependsOn("patchChangelog")
token = environment("PUBLISH_TOKEN")
// The pluginVersion is based on the SemVer (https://semver.org) and supports pre-release labels, like 2.1.7-alpha.3
// Specify pre-release label to publish the plugin in a custom Release Channel automatically. Read more:

View File

@ -4,7 +4,7 @@ pluginGroup = com.github.continuedev.continueintellijextension
pluginName = continue-intellij-extension
pluginRepositoryUrl = https://github.com/continuedev/continue
# SemVer format -> https://semver.org
pluginVersion = 0.0.51
pluginVersion = 0.0.52
# Supported build number ranges and IntelliJ Platform versions -> https://plugins.jetbrains.com/docs/intellij/build-number-ranges.html
pluginSinceBuild = 223

View File

@ -42,7 +42,7 @@ class AutocompleteEditorListener: EditorFactoryListener {
override fun editorCreated(event: EditorFactoryEvent) {
val editor = event.editor
val project = editor.project ?: return
val editorManager = project.let { FileEditorManager.getInstance(it) }
val editorManager = project.let { FileEditorManager.getInstance(it) } ?: return
val completionProvider = project.service<AutocompleteService>()
// Listen to changes to mouse position

View File

@ -0,0 +1,48 @@
package com.github.continuedev.continueintellijextension.autocomplete
import com.intellij.codeInsight.lookup.impl.LookupImpl
import com.intellij.codeInsight.lookup.Lookup
import com.intellij.codeInsight.lookup.LookupEvent
import com.intellij.codeInsight.lookup.LookupListener
import com.intellij.codeInsight.lookup.LookupManagerListener
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components.Service
import com.intellij.openapi.components.service
import com.intellij.openapi.project.Project
import java.util.concurrent.atomic.AtomicBoolean
@Service(Service.Level.PROJECT)
class AutocompleteLookupListener(project: Project) : LookupManagerListener {
private val isLookupShown = AtomicBoolean(true)
fun isLookupEmpty(): Boolean {
return isLookupShown.get()
}
init {
project.messageBus.connect().subscribe(LookupManagerListener.TOPIC, this)
}
override fun activeLookupChanged(oldLookup: Lookup?, newLookup: Lookup?) {
val newEditor = newLookup?.editor ?: return
if (newLookup is LookupImpl) {
newLookup.addLookupListener(
object : LookupListener {
override fun lookupShown(event: LookupEvent) {
super.lookupShown(event)
ApplicationManager.getApplication().invokeLater {
isLookupShown.set(false)
event.lookup.editor.project?.service<AutocompleteService>()?.hideCompletions(newEditor)
}
}
override fun lookupCanceled(event: LookupEvent) {
super.lookupCanceled(event)
ApplicationManager.getApplication().invokeLater {
isLookupShown.set(true)
}
}
})
}
}
}

View File

@ -25,6 +25,7 @@ data class PendingCompletion (
@Service(Service.Level.PROJECT)
class AutocompleteService(private val project: Project) {
var pendingCompletion: PendingCompletion? = null;
private val autocompleteLookupListener = project.service<AutocompleteLookupListener>()
fun triggerCompletion(editor: Editor) {
val settings =
@ -79,6 +80,10 @@ class AutocompleteService(private val project: Project) {
}
private fun renderCompletion(editor: Editor, offset: Int, text: String) {
// Don't render completions when code completion dropdown is visible
if (!autocompleteLookupListener.isLookupEmpty()) {
return
}
ApplicationManager.getApplication().invokeLater {
WriteAction.run<Throwable> {
val properties = InlayProperties()
@ -136,4 +141,17 @@ class AutocompleteService(private val project: Project) {
}
}
}
fun hideCompletions(editor: Editor) {
editor.inlayModel.getInlineElementsInRange(0, editor.document.textLength).forEach {
if (it.renderer is ContinueCustomElementRenderer) {
it.dispose()
}
}
editor.inlayModel.getBlockElementsInRange(0, editor.document.textLength).forEach {
if (it.renderer is ContinueMultilineCustomElementRenderer) {
it.dispose()
}
}
}
}

View File

@ -19,6 +19,9 @@
<projectService
id="DiffStreamService"
serviceImplementation="com.github.continuedev.continueintellijextension.editor.DiffStreamService"/>
<projectService
id="AutocompleteLookupListener"
serviceImplementation="com.github.continuedev.continueintellijextension.autocomplete.AutocompleteLookupListener"/>
</extensions>
<resource-bundle>messages.MyBundle</resource-bundle>

View File

@ -1319,13 +1319,15 @@
"outline",
"postgres",
"code",
"system",
"currentFile",
"url"
"url",
"database",
"os"
],
"markdownEnumDescriptions": [
"Reference the contents of the current changes as given by `git diff`",
"Reference the contents of the terminal",
"Reference the contents of the local variables with top n level (defaulting to 3) of call stack for that thread",
"Reference the contents of all open or pinned files.",
"Enter a search phrase and include the Google search results as context",
"Reference the results of a ripgrep search in your codebase",
@ -1335,16 +1337,15 @@
"Include important files from a folder in the prompt, as determined by similarity search",
"Reference GitHub issues from a repository",
"Retrieve important pages from a documentation website, as determined by similarity search",
"Index and retrieve the contents of any documentation site, using embeddings to find important snippets",
"Display a file tree of the current workspace",
"Include important highlighted sections from your code",
"Include a repo map showing important code objects",
"Displays important snippets of code from the currently open files",
"Displays definition lines from the currently open files",
"References Postgres table schema and sample rows",
"Reference specific functions and classes from throughout your codebase",
"Reference your operating system and cpu",
"Reference the contents of the currently active file",
"Reference the contents of a page at a URL"
"Reference the contents of a page at a URL",
"Reference table schemas",
"Operating system and CPU Information"
],
"type": "string"
},
@ -1460,56 +1461,63 @@
}
},
"then": {
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"params": {
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": ["postgres", "mysql", "sqlite"]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": ["postgres", "mysql", "sqlite"]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
}
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
"required": ["name", "connection_type", "connection"]
}
}
},
"required": ["name", "type", "connection"]
"required": ["connections"]
}
},
"required": ["connections"]
"required": ["params"]
}
},
{

View File

@ -1319,13 +1319,15 @@
"outline",
"postgres",
"code",
"system",
"currentFile",
"url"
"url",
"database",
"os"
],
"markdownEnumDescriptions": [
"Reference the contents of the current changes as given by `git diff`",
"Reference the contents of the terminal",
"Reference the contents of the local variables with top n level (defaulting to 3) of call stack for that thread",
"Reference the contents of all open or pinned files.",
"Enter a search phrase and include the Google search results as context",
"Reference the results of a ripgrep search in your codebase",
@ -1335,16 +1337,15 @@
"Include important files from a folder in the prompt, as determined by similarity search",
"Reference GitHub issues from a repository",
"Retrieve important pages from a documentation website, as determined by similarity search",
"Index and retrieve the contents of any documentation site, using embeddings to find important snippets",
"Display a file tree of the current workspace",
"Include important highlighted sections from your code",
"Include a repo map showing important code objects",
"Displays important snippets of code from the currently open files",
"Displays definition lines from the currently open files",
"References Postgres table schema and sample rows",
"Reference specific functions and classes from throughout your codebase",
"Reference your operating system and cpu",
"Reference the contents of the currently active file",
"Reference the contents of a page at a URL"
"Reference the contents of a page at a URL",
"Reference table schemas",
"Operating system and CPU Information"
],
"type": "string"
},
@ -1460,56 +1461,63 @@
}
},
"then": {
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"params": {
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": ["postgres", "mysql", "sqlite"]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": ["postgres", "mysql", "sqlite"]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
}
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
"required": ["name", "connection_type", "connection"]
}
}
},
"required": ["name", "type", "connection"]
"required": ["connections"]
}
},
"required": ["connections"]
"required": ["params"]
}
},
{

View File

@ -1462,13 +1462,15 @@
"outline",
"postgres",
"code",
"system",
"currentFile",
"url"
"url",
"database",
"os"
],
"markdownEnumDescriptions": [
"Reference the contents of the current changes as given by `git diff`",
"Reference the contents of the terminal",
"Reference the contents of the local variables with top n level (defaulting to 3) of call stack for that thread",
"Reference the contents of all open or pinned files.",
"Enter a search phrase and include the Google search results as context",
"Reference the results of a ripgrep search in your codebase",
@ -1478,16 +1480,15 @@
"Include important files from a folder in the prompt, as determined by similarity search",
"Reference GitHub issues from a repository",
"Retrieve important pages from a documentation website, as determined by similarity search",
"Index and retrieve the contents of any documentation site, using embeddings to find important snippets",
"Display a file tree of the current workspace",
"Include important highlighted sections from your code",
"Include a repo map showing important code objects",
"Displays important snippets of code from the currently open files",
"Displays definition lines from the currently open files",
"References Postgres table schema and sample rows",
"Reference specific functions and classes from throughout your codebase",
"Reference your operating system and cpu",
"Reference the contents of the currently active file",
"Reference the contents of a page at a URL"
"Reference the contents of a page at a URL",
"Reference table schemas",
"Operating system and CPU Information"
],
"type": "string"
},
@ -1626,65 +1627,74 @@
}
},
"then": {
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"params": {
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": [
"postgres",
"mysql",
"sqlite"
]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
"connections": {
"type": "array",
"description": "A list of database connections",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "A unique name for this database connection"
},
"connection_type": {
"type": "string",
"description": "The type of database (e.g., 'postgres', 'mysql')",
"enum": [
"postgres",
"mysql",
"sqlite"
]
},
"connection": {
"type": "object",
"properties": {
"user": {
"type": "string",
"description": "The database user name"
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
}
},
"host": {
"type": "string",
"description": "The host address of the database server"
},
"database": {
"type": "string",
"description": "The name of the database to connect to"
},
"password": {
"type": "string",
"description": "The password for the database user"
},
"port": {
"type": "integer",
"description": "The port number to connect to at the host"
},
"filename": {
"type": "string",
"description": "File location for simple file DB's"
}
},
"required": []
"required": [
"name",
"connection_type",
"connection"
]
}
}
},
"required": [
"name",
"type",
"connection"
"connections"
]
}
},
"required": [
"connections"
"params"
]
}
},

View File

@ -1,12 +1,12 @@
{
"name": "continue",
"version": "0.9.164",
"version": "0.9.165",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "continue",
"version": "0.9.164",
"version": "0.9.165",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
@ -110,7 +110,7 @@
"commander": "^12.0.0",
"comment-json": "^4.2.3",
"dbinfoz": "^0.1.4",
"dotenv": "^16.3.1",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"follow-redirects": "^1.15.5",
"handlebars": "^4.7.8",

View File

@ -1,7 +1,7 @@
{
"name": "continue",
"icon": "media/icon.png",
"version": "0.9.164",
"version": "0.9.165",
"repository": {
"type": "git",
"url": "https://github.com/continuedev/continue"
@ -172,12 +172,6 @@
"title": "Toggle Autocomplete Enabled",
"group": "Continue"
},
{
"command": "continue.shareSession",
"category": "Continue",
"title": "Share Session",
"group": "Continue"
},
{
"command": "continue.selectFilesAsContext",
"category": "Continue",

View File

@ -403,9 +403,6 @@ const commandsMap: (
input: text,
});
},
"continue.shareSession": () => {
sidebar.sendMainUserInput("/share");
},
"continue.selectRange": (startLine: number, endLine: number) => {
if (!vscode.window.activeTextEditor) {
return;

View File

@ -47,7 +47,7 @@ export class VsCodeExtension {
this.extensionContext = context;
this.windowId = uuidv4();
const ideSettings = this.ide.getIdeSettings();
const ideSettings = this.ide.getIdeSettingsSync();
const { remoteConfigServerUrl } = ideSettings;
// Dependencies of core

View File

@ -7,13 +7,13 @@ import type {
FileType,
IDE,
IdeInfo,
IdeSettings,
IndexTag,
Problem,
Thread,
} from "core";
import { Range } from "core";
import { defaultIgnoreFile } from "core/indexing/ignore";
import { IdeSettings } from "core/protocol/ideWebview";
import {
editConfigJson,
getConfigJsonPath,
@ -510,7 +510,7 @@ class VsCodeIde implements IDE {
.map(([name, type]) => [path.join(dir, name), type]) as any;
}
getIdeSettings(): IdeSettings {
getIdeSettingsSync(): IdeSettings {
const settings = vscode.workspace.getConfiguration("continue");
const remoteConfigServerUrl = settings.get<string | undefined>(
"remoteConfigServerUrl",
@ -526,6 +526,10 @@ class VsCodeIde implements IDE {
};
return ideSettings;
}
async getIdeSettings(): Promise<IdeSettings> {
return this.getIdeSettingsSync();
}
}
export { VsCodeIde };

2
gui/package-lock.json generated
View File

@ -103,7 +103,7 @@
"commander": "^12.0.0",
"comment-json": "^4.2.3",
"dbinfoz": "^0.1.4",
"dotenv": "^16.3.1",
"dotenv": "^16.4.5",
"fastest-levenshtein": "^1.0.16",
"follow-redirects": "^1.15.5",
"handlebars": "^4.7.8",

View File

@ -82,4 +82,7 @@ npm run build
Pop-Location
Write-Output "`nInstalling docs dependencies..." -ForegroundColor White
Push-Location docs
npm install

View File

@ -4,10 +4,12 @@
# - Run Task -> Install Dependencies
# - Debug -> Extension
set -e
echo "Installing Core extension dependencies..."
pushd core
npm install
npm link
popd
echo "Installing GUI extension dependencies..."
@ -15,11 +17,12 @@ pushd gui
npm install
npm link @continuedev/core
npm run build
popd
# VSCode Extension (will also package GUI)
echo "Installing VSCode extension dependencies..."
pushd extensions/vscode
# This does way too many things inline but is the common denominator between many of the scripts
npm install
npm link @continuedev/core
@ -31,4 +34,10 @@ popd
echo "Installing binary dependencies..."
pushd binary
npm install
npm run build
npm run build
popd
echo "Installing docs dependencies..."
pushd docs
npm install