Merge pull request #6022 from uinstinct/speedup-builds
chore: speedup builds for binary and prepackage
This commit is contained in:
commit
78426cc224
|
@ -103,6 +103,9 @@
|
|||
"clear": true
|
||||
},
|
||||
"options": {
|
||||
"env": {
|
||||
"SKIP_INSTALLS": "true"
|
||||
},
|
||||
"cwd": "${workspaceFolder}/extensions/vscode"
|
||||
}
|
||||
},
|
||||
|
|
170
binary/build.js
170
binary/build.js
|
@ -3,13 +3,13 @@ const fs = require("fs");
|
|||
const path = require("path");
|
||||
const ncp = require("ncp").ncp;
|
||||
const { rimrafSync } = require("rimraf");
|
||||
const {
|
||||
validateFilesPresent,
|
||||
execCmdSync,
|
||||
autodetectPlatformAndArch,
|
||||
} = require("../scripts/util");
|
||||
const { downloadRipgrep } = require("./utils/ripgrep");
|
||||
const { validateFilesPresent } = require("../scripts/util");
|
||||
const { ALL_TARGETS, TARGET_TO_LANCEDB } = require("./utils/targets");
|
||||
const { fork } = require("child_process");
|
||||
const {
|
||||
installAndCopyNodeModules,
|
||||
} = require("../extensions/vscode/scripts/install-copy-nodemodule");
|
||||
const { bundleBinary } = require("./utils/bundle-binary");
|
||||
|
||||
const bin = path.join(__dirname, "bin");
|
||||
const out = path.join(__dirname, "out");
|
||||
|
@ -29,8 +29,6 @@ function cleanSlate() {
|
|||
const esbuildOutputFile = "out/index.js";
|
||||
let targets = [...ALL_TARGETS];
|
||||
|
||||
const [currentPlatform, currentArch] = autodetectPlatformAndArch();
|
||||
|
||||
const assetBackups = [
|
||||
"node_modules/win-ca/lib/crypt32-ia32.node.bak",
|
||||
"node_modules/win-ca/lib/crypt32-x64.node.bak",
|
||||
|
@ -78,85 +76,6 @@ async function buildWithEsbuild() {
|
|||
});
|
||||
}
|
||||
|
||||
async function installNodeModuleInTempDirAndCopyToCurrent(packageName, toCopy) {
|
||||
console.log(`Copying ${packageName} to ${toCopy}`);
|
||||
// This is a way to install only one package without npm trying to install all the dependencies
|
||||
// Create a temporary directory for installing the package
|
||||
const adjustedName = packageName.replace(/@/g, "").replace("/", "-");
|
||||
const tempDir = path.join(
|
||||
__dirname,
|
||||
"tmp",
|
||||
`continue-node_modules-${adjustedName}`,
|
||||
);
|
||||
const currentDir = process.cwd();
|
||||
|
||||
// // Remove the dir we will be copying to
|
||||
// rimrafSync(`node_modules/${toCopy}`);
|
||||
|
||||
// // Ensure the temporary directory exists
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
|
||||
try {
|
||||
// Move to the temporary directory
|
||||
process.chdir(tempDir);
|
||||
|
||||
// Initialize a new package.json and install the package
|
||||
execCmdSync(`npm init -y && npm i -f ${packageName} --no-save`);
|
||||
|
||||
console.log(
|
||||
`Contents of: ${packageName}`,
|
||||
fs.readdirSync(path.join(tempDir, "node_modules", toCopy)),
|
||||
);
|
||||
|
||||
// Without this it seems the file isn't completely written to disk
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
// Copy the installed package back to the current directory
|
||||
await new Promise((resolve, reject) => {
|
||||
ncp(
|
||||
path.join(tempDir, "node_modules", toCopy),
|
||||
path.join(currentDir, "node_modules", toCopy),
|
||||
{ dereference: true },
|
||||
(error) => {
|
||||
if (error) {
|
||||
console.error(
|
||||
`[error] Error copying ${packageName} package`,
|
||||
error,
|
||||
);
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
} finally {
|
||||
// Clean up the temporary directory
|
||||
// rimrafSync(tempDir);
|
||||
|
||||
// Return to the original directory
|
||||
process.chdir(currentDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads and installs ripgrep binaries for the specified target
|
||||
*
|
||||
* @param {string} target - Target platform-arch (e.g., 'darwin-x64')
|
||||
* @param {string} targetDir - Directory to install ripgrep to
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function downloadRipgrepForTarget(target, targetDir) {
|
||||
console.log(`[info] Downloading ripgrep for ${target}...`);
|
||||
try {
|
||||
await downloadRipgrep(target, targetDir);
|
||||
console.log(`[info] Successfully installed ripgrep for ${target}`);
|
||||
} catch (error) {
|
||||
console.error(`[error] Failed to download ripgrep for ${target}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
(async () => {
|
||||
if (esbuildOnly) {
|
||||
await buildWithEsbuild();
|
||||
|
@ -181,16 +100,21 @@ async function downloadRipgrepForTarget(target, targetDir) {
|
|||
),
|
||||
);
|
||||
|
||||
console.log("[info] Downloading prebuilt lancedb...");
|
||||
const copyLanceDBPromises = [];
|
||||
for (const target of targets) {
|
||||
if (TARGET_TO_LANCEDB[target]) {
|
||||
console.log(`[info] Downloading for ${target}...`);
|
||||
await installNodeModuleInTempDirAndCopyToCurrent(
|
||||
TARGET_TO_LANCEDB[target],
|
||||
"@lancedb",
|
||||
);
|
||||
if (!TARGET_TO_LANCEDB[target]) {
|
||||
continue;
|
||||
}
|
||||
console.log(`[info] Downloading for ${target}...`);
|
||||
copyLanceDBPromises.push(
|
||||
installAndCopyNodeModules(TARGET_TO_LANCEDB[target], "@lancedb"),
|
||||
);
|
||||
}
|
||||
await Promise.all(copyLanceDBPromises).catch(() => {
|
||||
console.error("[error] Failed to copy LanceDB");
|
||||
process.exit(1);
|
||||
});
|
||||
console.log("[info] Copied all LanceDB");
|
||||
|
||||
// tree-sitter-wasm
|
||||
const treeSitterWasmsDir = path.join(out, "tree-sitter-wasms");
|
||||
|
@ -252,59 +176,16 @@ async function downloadRipgrepForTarget(target, targetDir) {
|
|||
"out/llamaTokenizerWorkerPool.mjs",
|
||||
);
|
||||
|
||||
const buildBinaryPromises = [];
|
||||
console.log("[info] Building binaries with pkg...");
|
||||
for (const target of targets) {
|
||||
const targetDir = `bin/${target}`;
|
||||
fs.mkdirSync(targetDir, { recursive: true });
|
||||
console.log(`[info] Building ${target}...`);
|
||||
execCmdSync(
|
||||
`npx pkg --no-bytecode --public-packages "*" --public --compress GZip pkgJson/${target} --out-path ${targetDir}`,
|
||||
);
|
||||
|
||||
// Download and unzip prebuilt sqlite3 binary for the target
|
||||
console.log("[info] Downloading node-sqlite3");
|
||||
|
||||
const downloadUrl =
|
||||
// node-sqlite3 doesn't have a pre-built binary for win32-arm64
|
||||
target === "win32-arm64"
|
||||
? "https://continue-server-binaries.s3.us-west-1.amazonaws.com/win32-arm64/node_sqlite3.tar.gz"
|
||||
: `https://github.com/TryGhost/node-sqlite3/releases/download/v5.1.7/sqlite3-v5.1.7-napi-v6-${
|
||||
target
|
||||
}.tar.gz`;
|
||||
|
||||
execCmdSync(`curl -L -o ${targetDir}/build.tar.gz ${downloadUrl}`);
|
||||
execCmdSync(`cd ${targetDir} && tar -xvzf build.tar.gz`);
|
||||
|
||||
// Copy to build directory for testing
|
||||
try {
|
||||
const [platform, arch] = target.split("-");
|
||||
if (platform === currentPlatform && arch === currentArch) {
|
||||
fs.copyFileSync(
|
||||
`${targetDir}/build/Release/node_sqlite3.node`,
|
||||
`build/node_sqlite3.node`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("[warn] Could not copy node_sqlite to build");
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
fs.unlinkSync(`${targetDir}/build.tar.gz`);
|
||||
|
||||
// copy @lancedb to bin folders
|
||||
console.log("[info] Copying @lancedb files to bin");
|
||||
fs.copyFileSync(
|
||||
`node_modules/${TARGET_TO_LANCEDB[target]}/index.node`,
|
||||
`${targetDir}/index.node`,
|
||||
);
|
||||
|
||||
// Download and install ripgrep for the target
|
||||
await downloadRipgrepForTarget(target, targetDir);
|
||||
|
||||
// Informs the `continue-binary` of where to look for node_sqlite3.node
|
||||
// https://www.npmjs.com/package/bindings#:~:text=The%20searching%20for,file%20is%20found
|
||||
fs.writeFileSync(`${targetDir}/package.json`, "");
|
||||
buildBinaryPromises.push(bundleBinary(target));
|
||||
}
|
||||
await Promise.all(buildBinaryPromises).catch(() => {
|
||||
console.error("[error] Failed to build binaries");
|
||||
process.exit(1);
|
||||
});
|
||||
console.log("[info] All binaries built");
|
||||
|
||||
// Cleanup - this is needed when running locally
|
||||
fs.rmSync("out/package.json");
|
||||
|
@ -331,4 +212,5 @@ async function downloadRipgrepForTarget(target, targetDir) {
|
|||
validateFilesPresent(pathsToVerify);
|
||||
|
||||
console.log("[info] Done!");
|
||||
process.exit(0);
|
||||
})();
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
/**
|
||||
* @file Builds the binary for the specified target. It is also intended to run as a child process.
|
||||
*/
|
||||
|
||||
const {
|
||||
execCmdSync,
|
||||
autodetectPlatformAndArch,
|
||||
} = require("../../scripts/util");
|
||||
const { downloadRipgrep } = require("./ripgrep");
|
||||
const { TARGET_TO_LANCEDB } = require("../utils/targets");
|
||||
const fs = require("fs");
|
||||
const {
|
||||
downloadSqlite,
|
||||
} = require("../../extensions/vscode/scripts/download-copy-sqlite-esbuild");
|
||||
const { fork } = require("child_process");
|
||||
|
||||
async function downloadNodeSqlite(target, targetDir) {
|
||||
const [currentPlatform, currentArch] = autodetectPlatformAndArch();
|
||||
|
||||
// Download and unzip prebuilt sqlite3 binary for the target
|
||||
console.log("[info] Downloading node-sqlite3");
|
||||
|
||||
await downloadSqlite(target, `${targetDir}/build.tar.gz`);
|
||||
|
||||
execCmdSync(`cd ${targetDir} && tar -xvzf build.tar.gz`);
|
||||
|
||||
// Copy to build directory for testing
|
||||
try {
|
||||
const [platform, arch] = target.split("-");
|
||||
if (platform === currentPlatform && arch === currentArch) {
|
||||
fs.copyFileSync(
|
||||
`${targetDir}/build/Release/node_sqlite3.node`,
|
||||
`build/node_sqlite3.node`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("[warn] Could not copy node_sqlite to build");
|
||||
console.log(error);
|
||||
}
|
||||
fs.unlinkSync(`${targetDir}/build.tar.gz`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} target the platform to build for
|
||||
*/
|
||||
async function bundleForBinary(target) {
|
||||
const targetDir = `bin/${target}`;
|
||||
fs.mkdirSync(targetDir, { recursive: true });
|
||||
console.log(`[info] Building ${target}...`);
|
||||
execCmdSync(
|
||||
`npx pkg --no-bytecode --public-packages "*" --public --compress GZip pkgJson/${target} --out-path ${targetDir}`,
|
||||
);
|
||||
|
||||
// copy @lancedb to bin folders
|
||||
console.log("[info] Copying @lancedb files to bin");
|
||||
fs.copyFileSync(
|
||||
`node_modules/${TARGET_TO_LANCEDB[target]}/index.node`,
|
||||
`${targetDir}/index.node`,
|
||||
);
|
||||
|
||||
const downloadPromises = [];
|
||||
downloadPromises.push(downloadRipgrep(target, targetDir));
|
||||
downloadPromises.push(downloadNodeSqlite(target, targetDir));
|
||||
await Promise.all(downloadPromises);
|
||||
|
||||
// Informs the `continue-binary` of where to look for node_sqlite3.node
|
||||
// https://www.npmjs.com/package/bindings#:~:text=The%20searching%20for,file%20is%20found
|
||||
fs.writeFileSync(`${targetDir}/package.json`, "");
|
||||
}
|
||||
|
||||
process.on("message", (msg) => {
|
||||
bundleForBinary(msg.payload.target)
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {string} target the platform to bundle for
|
||||
*/
|
||||
async function bundleBinary(target) {
|
||||
const child = fork(__filename, { stdio: "inherit" });
|
||||
child.send({
|
||||
payload: {
|
||||
target,
|
||||
},
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
child.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
bundleBinary,
|
||||
};
|
|
@ -9,6 +9,7 @@ src/client
|
|||
exe
|
||||
bin
|
||||
assets
|
||||
tmp
|
||||
|
||||
gui/**
|
||||
|
||||
|
|
|
@ -0,0 +1,191 @@
|
|||
const { fork } = require("child_process");
|
||||
const fs = require("fs");
|
||||
const https = require("https");
|
||||
const path = require("path");
|
||||
|
||||
const { rimrafSync } = require("rimraf");
|
||||
|
||||
const { execCmdSync } = require("../../../scripts/util");
|
||||
|
||||
/**
|
||||
* download a file using nodejs http
|
||||
* @param {string} url
|
||||
* @param {string} outputPath
|
||||
* @param {number} maxRedirects
|
||||
*/
|
||||
async function downloadFile(url, outputPath, maxRedirects = 5) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const downloadWithRedirects = (currentUrl, redirectCount = 0) => {
|
||||
if (redirectCount > maxRedirects) {
|
||||
return reject(new Error(`Too many redirects (${maxRedirects})`));
|
||||
}
|
||||
|
||||
const request = https.get(currentUrl, (response) => {
|
||||
if (
|
||||
response.statusCode >= 300 &&
|
||||
response.statusCode < 400 &&
|
||||
response.headers.location
|
||||
) {
|
||||
return downloadWithRedirects(
|
||||
response.headers.location,
|
||||
redirectCount + 1,
|
||||
);
|
||||
}
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
return reject(
|
||||
new Error(`HTTP ${response.statusCode}: ${response.statusMessage}`),
|
||||
);
|
||||
}
|
||||
|
||||
const outputDir = path.dirname(outputPath);
|
||||
if (!fs.existsSync(outputDir)) {
|
||||
fs.mkdirSync(outputDir, { recursive: true });
|
||||
}
|
||||
|
||||
const writeStream = fs.createWriteStream(outputPath);
|
||||
|
||||
const totalSize = parseInt(response.headers["content-length"], 10);
|
||||
let downloadedSize = 0;
|
||||
|
||||
response.on("data", (chunk) => {
|
||||
downloadedSize += chunk.length;
|
||||
if (totalSize) {
|
||||
const percent = ((downloadedSize / totalSize) * 100).toFixed(1);
|
||||
process.stdout.write(
|
||||
`\rDownloading: ${percent}% (${downloadedSize}/${totalSize} bytes)`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
response.pipe(writeStream);
|
||||
|
||||
writeStream.on("finish", () => {
|
||||
console.log(`\nDownload completed: ${outputPath}`);
|
||||
resolve(outputPath);
|
||||
});
|
||||
|
||||
writeStream.on("error", reject);
|
||||
response.on("error", reject);
|
||||
});
|
||||
|
||||
request.on("error", reject);
|
||||
request.setTimeout(30000, () => {
|
||||
request.destroy();
|
||||
reject(new Error("Request timeout"));
|
||||
});
|
||||
};
|
||||
|
||||
downloadWithRedirects(url);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} target platform specific target
|
||||
* @param {string} targetDir the directory to download into
|
||||
*/
|
||||
async function downloadSqlite(target, targetDir) {
|
||||
const downloadUrl =
|
||||
// node-sqlite3 doesn't have a pre-built binary for win32-arm64
|
||||
target === "win32-arm64"
|
||||
? "https://continue-server-binaries.s3.us-west-1.amazonaws.com/win32-arm64/node_sqlite3.tar.gz"
|
||||
: `https://github.com/TryGhost/node-sqlite3/releases/download/v5.1.7/sqlite3-v5.1.7-napi-v6-${
|
||||
target
|
||||
}.tar.gz`;
|
||||
await downloadFile(downloadUrl, targetDir);
|
||||
}
|
||||
|
||||
async function installAndCopySqlite(target) {
|
||||
// Replace the installed with pre-built
|
||||
console.log("[info] Downloading pre-built sqlite3 binary");
|
||||
rimrafSync("../../core/node_modules/sqlite3/build");
|
||||
await downloadSqlite(target, "../../core/node_modules/sqlite3/build.tar.gz");
|
||||
execCmdSync("cd ../../core/node_modules/sqlite3 && tar -xvzf build.tar.gz");
|
||||
fs.unlinkSync("../../core/node_modules/sqlite3/build.tar.gz");
|
||||
}
|
||||
|
||||
async function installAndCopyEsbuild(target) {
|
||||
// Download and unzip esbuild
|
||||
console.log("[info] Downloading pre-built esbuild binary");
|
||||
rimrafSync("node_modules/@esbuild");
|
||||
fs.mkdirSync("node_modules/@esbuild", { recursive: true });
|
||||
await downloadFile(
|
||||
`https://continue-server-binaries.s3.us-west-1.amazonaws.com/${target}/esbuild.zip`,
|
||||
"node_modules/@esbuild/esbuild.zip",
|
||||
);
|
||||
execCmdSync("cd node_modules/@esbuild && unzip esbuild.zip");
|
||||
fs.unlinkSync("node_modules/@esbuild/esbuild.zip");
|
||||
}
|
||||
|
||||
process.on("message", (msg) => {
|
||||
const { operation, target } = msg.payload;
|
||||
if (operation === "sqlite") {
|
||||
installAndCopySqlite(target)
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
}
|
||||
if (operation === "esbuild") {
|
||||
installAndCopyEsbuild(target)
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {string} target the platform to build for
|
||||
*/
|
||||
async function copySqlite(target) {
|
||||
const child = fork(__filename, { stdio: "inherit", cwd: process.cwd() });
|
||||
child.send({
|
||||
payload: {
|
||||
operation: "sqlite",
|
||||
target,
|
||||
},
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
child.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} target the platform to build for
|
||||
*/
|
||||
async function copyEsbuild(target) {
|
||||
const child = fork(__filename, { stdio: "inherit", cwd: process.cwd() });
|
||||
child.send({
|
||||
payload: {
|
||||
operation: "esbuild",
|
||||
target,
|
||||
},
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
child.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
downloadSqlite,
|
||||
copySqlite,
|
||||
copyEsbuild,
|
||||
};
|
|
@ -1,4 +1,5 @@
|
|||
const fs = require("fs");
|
||||
|
||||
const { writeBuildTimestamp } = require("./utils");
|
||||
|
||||
const esbuild = require("esbuild");
|
||||
|
@ -49,7 +50,7 @@ const esbuildConfig = {
|
|||
],
|
||||
};
|
||||
|
||||
(async () => {
|
||||
void (async () => {
|
||||
// Create .buildTimestamp.js before starting the first build
|
||||
writeBuildTimestamp();
|
||||
// Bundles the extension into one file
|
||||
|
|
|
@ -0,0 +1,126 @@
|
|||
/**
|
||||
* @file Generate config.yaml file from template. Also intended to run as a child process.
|
||||
*/
|
||||
|
||||
const { fork } = require("child_process");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const { execCmdSync } = require("../../../scripts/util");
|
||||
|
||||
const { continueDir } = require("./utils");
|
||||
|
||||
async function generateConfigYamlSchema() {
|
||||
process.chdir(path.join(continueDir, "packages", "config-yaml"));
|
||||
execCmdSync("npm install");
|
||||
execCmdSync("npm run build");
|
||||
execCmdSync("npm run generate-schema");
|
||||
fs.copyFileSync(
|
||||
path.join("schema", "config-yaml-schema.json"),
|
||||
path.join(continueDir, "extensions", "vscode", "config-yaml-schema.json"),
|
||||
);
|
||||
console.log("[info] Generated config.yaml schema");
|
||||
}
|
||||
|
||||
async function copyConfigSchema() {
|
||||
process.chdir(path.join(continueDir, "extensions", "vscode"));
|
||||
// Modify and copy for .continuerc.json
|
||||
const schema = JSON.parse(fs.readFileSync("config_schema.json", "utf8"));
|
||||
schema.$defs.SerializedContinueConfig.properties.mergeBehavior = {
|
||||
type: "string",
|
||||
enum: ["merge", "overwrite"],
|
||||
default: "merge",
|
||||
title: "Merge behavior",
|
||||
markdownDescription:
|
||||
"If set to 'merge', .continuerc.json will be applied on top of config.json (arrays and objects are merged). If set to 'overwrite', then every top-level property of .continuerc.json will overwrite that property from config.json.",
|
||||
"x-intellij-html-description":
|
||||
"<p>If set to <code>merge</code>, <code>.continuerc.json</code> will be applied on top of <code>config.json</code> (arrays and objects are merged). If set to <code>overwrite</code>, then every top-level property of <code>.continuerc.json</code> will overwrite that property from <code>config.json</code>.</p>",
|
||||
};
|
||||
fs.writeFileSync("continue_rc_schema.json", JSON.stringify(schema, null, 2));
|
||||
|
||||
// Copy config schemas to intellij
|
||||
fs.copyFileSync(
|
||||
"config_schema.json",
|
||||
path.join(
|
||||
"..",
|
||||
"intellij",
|
||||
"src",
|
||||
"main",
|
||||
"resources",
|
||||
"config_schema.json",
|
||||
),
|
||||
);
|
||||
fs.copyFileSync(
|
||||
"continue_rc_schema.json",
|
||||
path.join(
|
||||
"..",
|
||||
"intellij",
|
||||
"src",
|
||||
"main",
|
||||
"resources",
|
||||
"continue_rc_schema.json",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
process.on("message", (msg) => {
|
||||
const { operation } = msg.payload;
|
||||
if (operation === "generate") {
|
||||
generateConfigYamlSchema()
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
}
|
||||
if (operation === "copy") {
|
||||
copyConfigSchema()
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
async function generateAndCopyConfigYamlSchema() {
|
||||
// Generate and copy over config-yaml-schema.json
|
||||
const generateConfigYamlChild = fork(
|
||||
path.join(__dirname, "generate-copy-config.js"),
|
||||
{
|
||||
stdio: "inherit",
|
||||
},
|
||||
);
|
||||
generateConfigYamlChild.send({ payload: { operation: "generate" } });
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
generateConfigYamlChild.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
// Copy config schemas to intellij
|
||||
const copyConfigSchemaChild = fork(
|
||||
path.join(__dirname, "generate-copy-config.js"),
|
||||
{
|
||||
stdio: "inherit",
|
||||
},
|
||||
);
|
||||
copyConfigSchemaChild.send({ payload: { operation: "copy" } });
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
copyConfigSchemaChild.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
generateAndCopyConfigYamlSchema,
|
||||
};
|
|
@ -0,0 +1,113 @@
|
|||
/**
|
||||
* @file Copy lancedb to the current directory. It is also intended to run as a child process.
|
||||
*/
|
||||
|
||||
const { fork } = require("child_process");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const ncp = require("ncp").ncp;
|
||||
|
||||
const { execCmdSync } = require("../../../scripts/util");
|
||||
|
||||
async function installNodeModuleInTempDirAndCopyToCurrent(packageName, toCopy) {
|
||||
console.log(`Copying ${packageName} to ${toCopy}`);
|
||||
// This is a way to install only one package without npm trying to install all the dependencies
|
||||
// Create a temporary directory for installing the package
|
||||
const adjustedName = packageName.replace(/@/g, "").replace("/", "-");
|
||||
const currentDir = process.cwd();
|
||||
const tempDir = path.join(
|
||||
currentDir,
|
||||
"tmp",
|
||||
`continue-node_modules-${adjustedName}`,
|
||||
);
|
||||
|
||||
// // Remove the dir we will be copying to
|
||||
// rimrafSync(`node_modules/${toCopy}`);
|
||||
|
||||
// // Ensure the temporary directory exists
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
|
||||
try {
|
||||
// Move to the temporary directory
|
||||
process.chdir(tempDir);
|
||||
|
||||
// Initialize a new package.json and install the package
|
||||
execCmdSync(`npm init -y && npm i -f ${packageName} --no-save`);
|
||||
|
||||
console.log(
|
||||
`Contents of: ${packageName}`,
|
||||
fs.readdirSync(path.join(tempDir, "node_modules", toCopy)),
|
||||
);
|
||||
|
||||
// Without this it seems the file isn't completely written to disk
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
// Copy the installed package back to the current directory
|
||||
await new Promise((resolve, reject) => {
|
||||
ncp(
|
||||
path.join(tempDir, "node_modules", toCopy),
|
||||
path.join(currentDir, "node_modules", toCopy),
|
||||
{ dereference: true },
|
||||
(error) => {
|
||||
if (error) {
|
||||
console.error(
|
||||
`[error] Error copying ${packageName} package`,
|
||||
error,
|
||||
);
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
} finally {
|
||||
// Clean up the temporary directory
|
||||
// rimrafSync(tempDir);
|
||||
|
||||
// Return to the original directory
|
||||
process.chdir(currentDir);
|
||||
}
|
||||
}
|
||||
|
||||
process.on("message", (msg) => {
|
||||
installNodeModuleInTempDirAndCopyToCurrent(
|
||||
msg.payload.packageName,
|
||||
msg.payload.toCopy,
|
||||
)
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* invoke a child process to install a node module into temporary directory and copy it over into node modules
|
||||
* @param {string} packageName the module to install and copy
|
||||
* @param {string} toCopy directory to copy into inside node modules
|
||||
*/
|
||||
async function installAndCopyNodeModules(packageName, toCopy) {
|
||||
const child = fork(__filename, { stdio: "inherit", cwd: process.cwd() });
|
||||
child.send({
|
||||
payload: {
|
||||
packageName,
|
||||
toCopy,
|
||||
},
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
child.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
installAndCopyNodeModules,
|
||||
};
|
|
@ -0,0 +1,79 @@
|
|||
/**
|
||||
* @file Install node modules for the VS Code extension and gui. This is also intended to run as a child process.
|
||||
*/
|
||||
|
||||
const { fork } = require("child_process");
|
||||
const path = require("path");
|
||||
|
||||
const { execCmdSync } = require("../../../scripts/util");
|
||||
|
||||
const { continueDir } = require("./utils");
|
||||
|
||||
async function installNodeModulesInGui() {
|
||||
process.chdir(path.join(continueDir, "gui"));
|
||||
execCmdSync("npm install");
|
||||
console.log("[info] npm install in gui completed");
|
||||
}
|
||||
|
||||
async function installNodeModulesInVscode() {
|
||||
process.chdir(path.join(continueDir, "extensions", "vscode"));
|
||||
execCmdSync("npm install");
|
||||
console.log("[info] npm install in extensions/vscode completed");
|
||||
}
|
||||
|
||||
process.on("message", (msg) => {
|
||||
const { targetDir } = msg.payload;
|
||||
if (targetDir === "gui") {
|
||||
installNodeModulesInGui()
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
} else if (targetDir === "vscode") {
|
||||
installNodeModulesInVscode()
|
||||
.then(() => process.send({ done: true }))
|
||||
.catch((error) => {
|
||||
console.error(error); // show the error in the parent process
|
||||
process.send({ error: true });
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
async function npmInstall() {
|
||||
const installVscodeChild = fork(__filename, {
|
||||
stdio: "inherit",
|
||||
});
|
||||
installVscodeChild.send({ payload: { targetDir: "vscode" } });
|
||||
|
||||
const installGuiChild = fork(__filename, {
|
||||
stdio: "inherit",
|
||||
});
|
||||
installGuiChild.send({ payload: { targetDir: "gui" } });
|
||||
|
||||
await Promise.all([
|
||||
new Promise((resolve, reject) => {
|
||||
installVscodeChild.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
}),
|
||||
new Promise((resolve, reject) => {
|
||||
installGuiChild.on("message", (msg) => {
|
||||
if (msg.error) {
|
||||
reject();
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
}),
|
||||
]).catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
npmInstall,
|
||||
};
|
|
@ -10,11 +10,11 @@ const {
|
|||
autodetectPlatformAndArch,
|
||||
} = require("../../../scripts/util/index");
|
||||
|
||||
const {
|
||||
copyConfigSchema,
|
||||
writeBuildTimestamp,
|
||||
generateConfigYamlSchema,
|
||||
} = require("./utils");
|
||||
const { copySqlite, copyEsbuild } = require("./download-copy-sqlite-esbuild");
|
||||
const { generateAndCopyConfigYamlSchema } = require("./generate-copy-config");
|
||||
const { installAndCopyNodeModules } = require("./install-copy-nodemodule");
|
||||
const { npmInstall } = require("./npm-install");
|
||||
const { writeBuildTimestamp, continueDir } = require("./utils");
|
||||
|
||||
// Clear folders that will be packaged to ensure clean slate
|
||||
rimrafSync(path.join(__dirname, "..", "bin"));
|
||||
|
@ -27,6 +27,8 @@ if (!fs.existsSync(guiDist)) {
|
|||
fs.mkdirSync(guiDist, { recursive: true });
|
||||
}
|
||||
|
||||
const skipInstalls = process.env.SKIP_INSTALLS === "true";
|
||||
|
||||
// Get the target to package for
|
||||
let target = undefined;
|
||||
const args = process.argv;
|
||||
|
@ -36,10 +38,10 @@ if (args[2] === "--target") {
|
|||
|
||||
let os;
|
||||
let arch;
|
||||
if (!target) {
|
||||
[os, arch] = autodetectPlatformAndArch();
|
||||
} else {
|
||||
if (target) {
|
||||
[os, arch] = target.split("-");
|
||||
} else {
|
||||
[os, arch] = autodetectPlatformAndArch();
|
||||
}
|
||||
|
||||
if (os === "alpine") {
|
||||
|
@ -67,28 +69,14 @@ const isMacTarget = target?.startsWith("darwin");
|
|||
void (async () => {
|
||||
console.log("[info] Packaging extension for target ", target);
|
||||
|
||||
// Generate and copy over config-yaml-schema.json
|
||||
generateConfigYamlSchema();
|
||||
|
||||
// Copy config schemas to intellij
|
||||
copyConfigSchema();
|
||||
|
||||
if (!process.cwd().endsWith("vscode")) {
|
||||
// This is sometimes run from root dir instead (e.g. in VS Code tasks)
|
||||
process.chdir("extensions/vscode");
|
||||
}
|
||||
|
||||
// Make sure we have an initial timestamp file
|
||||
writeBuildTimestamp();
|
||||
|
||||
// Install node_modules //
|
||||
execCmdSync("npm install");
|
||||
console.log("[info] npm install in extensions/vscode completed");
|
||||
if (!skipInstalls) {
|
||||
await Promise.all([generateAndCopyConfigYamlSchema(), npmInstall()]);
|
||||
}
|
||||
|
||||
process.chdir("../../gui");
|
||||
|
||||
execCmdSync("npm install");
|
||||
console.log("[info] npm install in gui completed");
|
||||
process.chdir(path.join(continueDir, "gui"));
|
||||
|
||||
if (isInGitHubAction) {
|
||||
execCmdSync("npm run build");
|
||||
|
@ -280,122 +268,32 @@ void (async () => {
|
|||
);
|
||||
});
|
||||
|
||||
async function installNodeModuleInTempDirAndCopyToCurrent(
|
||||
packageName,
|
||||
toCopy,
|
||||
) {
|
||||
console.log(`Copying ${packageName} to ${toCopy}`);
|
||||
// This is a way to install only one package without npm trying to install all the dependencies
|
||||
// Create a temporary directory for installing the package
|
||||
const adjustedName = packageName.replace(/@/g, "").replace("/", "-");
|
||||
|
||||
const tempDir = `/tmp/continue-node_modules-${adjustedName}`;
|
||||
const currentDir = process.cwd();
|
||||
|
||||
// Remove the dir we will be copying to
|
||||
rimrafSync(`node_modules/${toCopy}`);
|
||||
|
||||
// Ensure the temporary directory exists
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
|
||||
try {
|
||||
// Move to the temporary directory
|
||||
process.chdir(tempDir);
|
||||
|
||||
// Initialize a new package.json and install the package
|
||||
execCmdSync(`npm init -y && npm i -f ${packageName} --no-save`);
|
||||
|
||||
if (!skipInstalls) {
|
||||
// GitHub Actions doesn't support ARM, so we need to download pre-saved binaries
|
||||
// 02/07/25 - the above comment is out of date, there is now support for ARM runners on GitHub Actions
|
||||
if (isArmTarget) {
|
||||
// lancedb binary
|
||||
const packageToInstall = {
|
||||
"darwin-arm64": "@lancedb/vectordb-darwin-arm64",
|
||||
"linux-arm64": "@lancedb/vectordb-linux-arm64-gnu",
|
||||
"win32-arm64": "@lancedb/vectordb-win32-arm64-msvc",
|
||||
}[target];
|
||||
console.log(
|
||||
`Contents of: ${packageName}`,
|
||||
fs.readdirSync(path.join(tempDir, "node_modules", toCopy)),
|
||||
"[info] Downloading pre-built lancedb binary: " + packageToInstall,
|
||||
);
|
||||
|
||||
// Without this it seems the file isn't completely written to disk
|
||||
// Ideally we validate file integrity in the validation at the end
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
// Copy the installed package back to the current directory
|
||||
await new Promise((resolve, reject) => {
|
||||
ncp(
|
||||
path.join(tempDir, "node_modules", toCopy),
|
||||
path.join(currentDir, "node_modules", toCopy),
|
||||
{ dereference: true },
|
||||
(error) => {
|
||||
if (error) {
|
||||
console.error(
|
||||
`[error] Error copying ${packageName} package`,
|
||||
error,
|
||||
);
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
} finally {
|
||||
// Clean up the temporary directory
|
||||
// rimrafSync(tempDir);
|
||||
|
||||
// Return to the original directory
|
||||
process.chdir(currentDir);
|
||||
await Promise.all([
|
||||
copyEsbuild(target),
|
||||
copySqlite(target),
|
||||
installAndCopyNodeModules(packageToInstall, "@lancedb"),
|
||||
]);
|
||||
} else {
|
||||
// Download esbuild from npm in tmp and copy over
|
||||
console.log("[info] npm installing esbuild binary");
|
||||
await installAndCopyNodeModules("esbuild@0.17.19", "@esbuild");
|
||||
}
|
||||
}
|
||||
|
||||
// GitHub Actions doesn't support ARM, so we need to download pre-saved binaries
|
||||
// 02/07/25 - the above comment is out of date, there is now support for ARM runners on GitHub Actions
|
||||
if (isArmTarget) {
|
||||
// lancedb binary
|
||||
const packageToInstall = {
|
||||
"darwin-arm64": "@lancedb/vectordb-darwin-arm64",
|
||||
"linux-arm64": "@lancedb/vectordb-linux-arm64-gnu",
|
||||
"win32-arm64": "@lancedb/vectordb-win32-arm64-msvc",
|
||||
}[target];
|
||||
console.log(
|
||||
"[info] Downloading pre-built lancedb binary: " + packageToInstall,
|
||||
);
|
||||
|
||||
await installNodeModuleInTempDirAndCopyToCurrent(
|
||||
packageToInstall,
|
||||
"@lancedb",
|
||||
);
|
||||
|
||||
// Replace the installed with pre-built
|
||||
console.log("[info] Downloading pre-built sqlite3 binary");
|
||||
rimrafSync("../../core/node_modules/sqlite3/build");
|
||||
const downloadUrl = {
|
||||
"darwin-arm64":
|
||||
"https://github.com/TryGhost/node-sqlite3/releases/download/v5.1.7/sqlite3-v5.1.7-napi-v6-darwin-arm64.tar.gz",
|
||||
"linux-arm64":
|
||||
"https://github.com/TryGhost/node-sqlite3/releases/download/v5.1.7/sqlite3-v5.1.7-napi-v3-linux-arm64.tar.gz",
|
||||
// node-sqlite3 doesn't have a pre-built binary for win32-arm64
|
||||
"win32-arm64":
|
||||
"https://continue-server-binaries.s3.us-west-1.amazonaws.com/win32-arm64/node_sqlite3.tar.gz",
|
||||
}[target];
|
||||
execCmdSync(
|
||||
`curl -L -o ../../core/node_modules/sqlite3/build.tar.gz ${downloadUrl}`,
|
||||
);
|
||||
execCmdSync("cd ../../core/node_modules/sqlite3 && tar -xvzf build.tar.gz");
|
||||
fs.unlinkSync("../../core/node_modules/sqlite3/build.tar.gz");
|
||||
|
||||
// Download and unzip esbuild
|
||||
console.log("[info] Downloading pre-built esbuild binary");
|
||||
rimrafSync("node_modules/@esbuild");
|
||||
fs.mkdirSync("node_modules/@esbuild", { recursive: true });
|
||||
execCmdSync(
|
||||
`curl -o node_modules/@esbuild/esbuild.zip https://continue-server-binaries.s3.us-west-1.amazonaws.com/${target}/esbuild.zip`,
|
||||
);
|
||||
execCmdSync(`cd node_modules/@esbuild && unzip esbuild.zip`);
|
||||
fs.unlinkSync("node_modules/@esbuild/esbuild.zip");
|
||||
} else {
|
||||
// Download esbuild from npm in tmp and copy over
|
||||
console.log("npm installing esbuild binary");
|
||||
await installNodeModuleInTempDirAndCopyToCurrent(
|
||||
"esbuild@0.17.19",
|
||||
"@esbuild",
|
||||
);
|
||||
}
|
||||
|
||||
console.log("[info] Copying sqlite node binding from core");
|
||||
await new Promise((resolve, reject) => {
|
||||
ncp(
|
||||
|
@ -465,7 +363,7 @@ void (async () => {
|
|||
);
|
||||
|
||||
// delete esbuild/bin because platform-specific @esbuild is downloaded
|
||||
fs.rmdirSync(`out/node_modules/esbuild/bin`, { recursive: true });
|
||||
fs.rmSync(`out/node_modules/esbuild/bin`, { recursive: true });
|
||||
|
||||
console.log(`[info] Copied ${NODE_MODULES_TO_COPY.join(", ")}`);
|
||||
|
||||
|
@ -534,4 +432,6 @@ void (async () => {
|
|||
`out/node_modules/@lancedb/vectordb-${target}${isWinTarget ? "-msvc" : ""}${isLinuxTarget ? "-gnu" : ""}/index.node`,
|
||||
`out/node_modules/esbuild/lib/main.js`,
|
||||
]);
|
||||
|
||||
process.exit(0);
|
||||
})();
|
||||
|
|
|
@ -1,68 +1,13 @@
|
|||
const fs = require("fs");
|
||||
const ncp = require("ncp").ncp;
|
||||
const path = require("path");
|
||||
|
||||
const ncp = require("ncp").ncp;
|
||||
const { rimrafSync } = require("rimraf");
|
||||
const {
|
||||
validateFilesPresent,
|
||||
execCmdSync,
|
||||
autodetectPlatformAndArch,
|
||||
} = require("../../../scripts/util/index");
|
||||
|
||||
const { execCmdSync } = require("../../../scripts/util/index");
|
||||
|
||||
const continueDir = path.join(__dirname, "..", "..", "..");
|
||||
|
||||
function generateConfigYamlSchema() {
|
||||
process.chdir(path.join(continueDir, "packages", "config-yaml"));
|
||||
execCmdSync("npm install");
|
||||
execCmdSync("npm run build");
|
||||
execCmdSync("npm run generate-schema");
|
||||
fs.copyFileSync(
|
||||
path.join("schema", "config-yaml-schema.json"),
|
||||
path.join(continueDir, "extensions", "vscode", "config-yaml-schema.json"),
|
||||
);
|
||||
console.log("[info] Generated config.yaml schema");
|
||||
}
|
||||
|
||||
function copyConfigSchema() {
|
||||
process.chdir(path.join(continueDir, "extensions", "vscode"));
|
||||
// Modify and copy for .continuerc.json
|
||||
const schema = JSON.parse(fs.readFileSync("config_schema.json", "utf8"));
|
||||
schema.$defs.SerializedContinueConfig.properties.mergeBehavior = {
|
||||
type: "string",
|
||||
enum: ["merge", "overwrite"],
|
||||
default: "merge",
|
||||
title: "Merge behavior",
|
||||
markdownDescription:
|
||||
"If set to 'merge', .continuerc.json will be applied on top of config.json (arrays and objects are merged). If set to 'overwrite', then every top-level property of .continuerc.json will overwrite that property from config.json.",
|
||||
"x-intellij-html-description":
|
||||
"<p>If set to <code>merge</code>, <code>.continuerc.json</code> will be applied on top of <code>config.json</code> (arrays and objects are merged). If set to <code>overwrite</code>, then every top-level property of <code>.continuerc.json</code> will overwrite that property from <code>config.json</code>.</p>",
|
||||
};
|
||||
fs.writeFileSync("continue_rc_schema.json", JSON.stringify(schema, null, 2));
|
||||
|
||||
// Copy config schemas to intellij
|
||||
fs.copyFileSync(
|
||||
"config_schema.json",
|
||||
path.join(
|
||||
"..",
|
||||
"intellij",
|
||||
"src",
|
||||
"main",
|
||||
"resources",
|
||||
"config_schema.json",
|
||||
),
|
||||
);
|
||||
fs.copyFileSync(
|
||||
"continue_rc_schema.json",
|
||||
path.join(
|
||||
"..",
|
||||
"intellij",
|
||||
"src",
|
||||
"main",
|
||||
"resources",
|
||||
"continue_rc_schema.json",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
function copyTokenizers() {
|
||||
fs.copyFileSync(
|
||||
path.join(__dirname, "../../../core/llm/llamaTokenizerWorkerPool.mjs"),
|
||||
|
@ -77,22 +22,6 @@ function copyTokenizers() {
|
|||
console.log("[info] Copied llamaTokenizer");
|
||||
}
|
||||
|
||||
function installNodeModules() {
|
||||
// Make sure we are in the right directory
|
||||
if (!process.cwd().endsWith("vscode")) {
|
||||
process.chdir(path.join(continueDir, "extensions", "vscode"));
|
||||
}
|
||||
|
||||
// Install node_modules //
|
||||
execCmdSync("npm install");
|
||||
console.log("[info] npm install in extensions/vscode completed");
|
||||
|
||||
process.chdir(path.join(continueDir, "gui"));
|
||||
|
||||
execCmdSync("npm install");
|
||||
console.log("[info] npm install in gui completed");
|
||||
}
|
||||
|
||||
async function buildGui(isGhAction) {
|
||||
// Make sure we are in the right directory
|
||||
if (!process.cwd().endsWith("gui")) {
|
||||
|
@ -536,15 +465,13 @@ async function copyScripts() {
|
|||
// in the build
|
||||
function writeBuildTimestamp() {
|
||||
fs.writeFileSync(
|
||||
"src/.buildTimestamp.ts",
|
||||
path.join(continueDir, "extensions/vscode", "src/.buildTimestamp.ts"),
|
||||
`export default "${new Date().toISOString()}";\n`,
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
generateConfigYamlSchema,
|
||||
copyConfigSchema,
|
||||
installNodeModules,
|
||||
continueDir,
|
||||
buildGui,
|
||||
copyOnnxRuntimeFromNodeModules,
|
||||
copyTreeSitterWasms,
|
||||
|
|
|
@ -57,7 +57,7 @@ pushd extensions/vscode
|
|||
# This does way too many things inline but is the common denominator between many of the scripts
|
||||
npm install
|
||||
npm link @continuedev/core
|
||||
npm run prepackage
|
||||
# npm run prepackage # not required since npm run package has prescript of prepackage
|
||||
npm run package
|
||||
popd
|
||||
|
||||
|
|
Loading…
Reference in New Issue