From e76c50f20e3201af2aa98bfcd4496e885204dfd2 Mon Sep 17 00:00:00 2001 From: Aman Karmani Date: Wed, 22 Jan 2025 11:12:39 -0800 Subject: [PATCH] build: switch `build/tsconfig.json` to `module: nodenext` (#238426) * build/tsconfig: switch to module: nodenext for bun compat * build: rewrite imports for nodenext compat * build: re-generate --- .../common/computeBuiltInDepsCacheKey.js | 13 +- .../common/computeBuiltInDepsCacheKey.ts | 6 +- .../common/computeNodeModulesCacheKey.js | 29 +-- .../common/computeNodeModulesCacheKey.ts | 6 +- .../azure-pipelines/common/listNodeModules.js | 15 +- .../azure-pipelines/common/listNodeModules.ts | 4 +- build/azure-pipelines/common/publish.js | 124 ++++++---- build/azure-pipelines/common/publish.ts | 12 +- build/azure-pipelines/common/sign-win32.js | 9 +- build/azure-pipelines/common/sign-win32.ts | 2 +- build/azure-pipelines/common/sign.js | 39 ++-- build/azure-pipelines/common/sign.ts | 10 +- build/azure-pipelines/distro/mixin-npm.js | 17 +- build/azure-pipelines/distro/mixin-npm.ts | 4 +- build/azure-pipelines/distro/mixin-quality.js | 21 +- build/azure-pipelines/distro/mixin-quality.ts | 4 +- .../publish-types/check-version.js | 7 +- .../publish-types/check-version.ts | 2 +- .../publish-types/update-types.js | 19 +- .../publish-types/update-types.ts | 6 +- build/azure-pipelines/upload-cdn.js | 60 ++++- build/azure-pipelines/upload-cdn.ts | 6 +- build/azure-pipelines/upload-nlsmetadata.js | 50 +++- build/azure-pipelines/upload-nlsmetadata.ts | 4 +- build/azure-pipelines/upload-sourcemaps.js | 66 ++++-- build/azure-pipelines/upload-sourcemaps.ts | 8 +- build/darwin/create-universal-app.js | 29 +-- build/darwin/create-universal-app.ts | 6 +- build/darwin/sign.js | 57 ++--- build/darwin/sign.ts | 6 +- build/darwin/verify-macho.js | 46 +++- build/darwin/verify-macho.ts | 2 +- build/lib/asar.js | 31 +-- build/lib/asar.ts | 8 +- build/lib/builtInExtensions.js | 126 ++++++---- build/lib/builtInExtensions.ts | 18 +- build/lib/builtInExtensionsCG.js | 35 +-- build/lib/builtInExtensionsCG.ts | 8 +- build/lib/bundle.js | 17 +- build/lib/bundle.ts | 6 +- build/lib/compilation.js | 134 +++++++---- build/lib/compilation.ts | 16 +- build/lib/date.js | 19 +- build/lib/date.ts | 4 +- build/lib/dependencies.js | 21 +- build/lib/dependencies.ts | 6 +- build/lib/electron.js | 66 ++++-- build/lib/electron.ts | 8 +- build/lib/extensions.js | 220 ++++++++++-------- build/lib/extensions.ts | 26 +-- build/lib/fetch.js | 39 ++-- build/lib/fetch.ts | 12 +- build/lib/formatter.js | 21 +- build/lib/formatter.ts | 6 +- build/lib/getVersion.js | 35 ++- build/lib/git.js | 21 +- build/lib/git.ts | 4 +- build/lib/i18n.js | 97 ++++---- build/lib/i18n.ts | 18 +- build/lib/inlineMeta.js | 7 +- build/lib/inlineMeta.ts | 4 +- build/lib/layersChecker.js | 19 +- build/lib/layersChecker.ts | 2 +- build/lib/mangle/index.js | 101 ++++---- build/lib/mangle/index.ts | 10 +- build/lib/mangle/renameWorker.js | 11 +- build/lib/mangle/renameWorker.ts | 4 +- build/lib/mangle/staticLanguageServiceHost.js | 31 +-- build/lib/mangle/staticLanguageServiceHost.ts | 4 +- build/lib/monaco-api.js | 33 +-- build/lib/monaco-api.ts | 8 +- build/lib/nls.js | 72 ++++-- build/lib/nls.ts | 6 +- build/lib/node.js | 15 +- build/lib/node.ts | 4 +- build/lib/optimize.js | 72 ++++-- build/lib/optimize.ts | 8 +- build/lib/policies.js | 27 ++- build/lib/policies.ts | 6 +- build/lib/postcss.js | 11 +- build/lib/postcss.ts | 6 +- build/lib/preLaunch.js | 9 +- build/lib/preLaunch.ts | 2 +- build/lib/reporter.js | 27 ++- build/lib/reporter.ts | 10 +- build/lib/snapshotLoader.js | 4 +- build/lib/snapshotLoader.ts | 2 +- build/lib/standalone.js | 110 ++++++--- build/lib/standalone.ts | 4 +- build/lib/stats.js | 27 ++- build/lib/stats.ts | 8 +- build/lib/stylelint/validateVariableNames.js | 7 +- build/lib/stylelint/validateVariableNames.ts | 2 +- build/lib/task.js | 11 +- build/lib/task.ts | 4 +- build/lib/test/i18n.test.js | 62 +++-- build/lib/test/i18n.test.ts | 4 +- build/lib/treeshaking.js | 37 +-- build/lib/treeshaking.ts | 4 +- build/lib/tsb/builder.js | 98 +++++--- build/lib/tsb/builder.ts | 12 +- build/lib/tsb/index.js | 62 +++-- build/lib/tsb/index.ts | 8 +- build/lib/tsb/transpiler.js | 37 +-- build/lib/tsb/transpiler.ts | 8 +- build/lib/typings/event-stream.d.ts | 2 +- build/lib/util.js | 121 +++++----- build/lib/util.ts | 20 +- build/lib/watch/index.js | 1 + build/lib/watch/watch-win32.js | 39 ++-- build/lib/watch/watch-win32.ts | 12 +- build/linux/debian/calculate-deps.js | 11 +- build/linux/debian/calculate-deps.ts | 4 +- build/linux/debian/install-sysroot.js | 67 +++--- build/linux/debian/install-sysroot.ts | 8 +- build/linux/dependencies-generator.js | 15 +- build/linux/dependencies-generator.ts | 2 +- build/linux/libcxx-fetcher.js | 33 +-- build/linux/libcxx-fetcher.ts | 8 +- build/tsconfig.json | 2 +- build/win32/explorer-appx-fetcher.js | 25 +- build/win32/explorer-appx-fetcher.ts | 8 +- 122 files changed, 1907 insertions(+), 1202 deletions(-) diff --git a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js index 2d747f56cc7..10fa9087454 100644 --- a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js +++ b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js @@ -3,12 +3,15 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); -const crypto = require("crypto"); -const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../product.json'), 'utf8')); -const shasum = crypto.createHash('sha256'); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const crypto_1 = __importDefault(require("crypto")); +const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../../product.json'), 'utf8')); +const shasum = crypto_1.default.createHash('sha256'); for (const ext of productjson.builtInExtensions) { shasum.update(`${ext.name}@${ext.version}`); } diff --git a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts index 53d6c501ea9..8abaaccb654 100644 --- a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts +++ b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as crypto from 'crypto'; +import fs from 'fs'; +import path from 'path'; +import crypto from 'crypto'; const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../product.json'), 'utf8')); const shasum = crypto.createHash('sha256'); diff --git a/build/azure-pipelines/common/computeNodeModulesCacheKey.js b/build/azure-pipelines/common/computeNodeModulesCacheKey.js index 976e096fad2..c09c13be9d4 100644 --- a/build/azure-pipelines/common/computeNodeModulesCacheKey.js +++ b/build/azure-pipelines/common/computeNodeModulesCacheKey.js @@ -3,21 +3,24 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); -const crypto = require("crypto"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const crypto_1 = __importDefault(require("crypto")); const { dirs } = require('../../npm/dirs'); -const ROOT = path.join(__dirname, '../../../'); -const shasum = crypto.createHash('sha256'); -shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt'))); -shasum.update(fs.readFileSync(path.join(ROOT, '.npmrc'))); -shasum.update(fs.readFileSync(path.join(ROOT, 'build', '.npmrc'))); -shasum.update(fs.readFileSync(path.join(ROOT, 'remote', '.npmrc'))); +const ROOT = path_1.default.join(__dirname, '../../../'); +const shasum = crypto_1.default.createHash('sha256'); +shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build/.cachesalt'))); +shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, '.npmrc'))); +shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build', '.npmrc'))); +shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'remote', '.npmrc'))); // Add `package.json` and `package-lock.json` files for (const dir of dirs) { - const packageJsonPath = path.join(ROOT, dir, 'package.json'); - const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString()); + const packageJsonPath = path_1.default.join(ROOT, dir, 'package.json'); + const packageJson = JSON.parse(fs_1.default.readFileSync(packageJsonPath).toString()); const relevantPackageJsonSections = { dependencies: packageJson.dependencies, devDependencies: packageJson.devDependencies, @@ -26,8 +29,8 @@ for (const dir of dirs) { distro: packageJson.distro }; shasum.update(JSON.stringify(relevantPackageJsonSections)); - const packageLockPath = path.join(ROOT, dir, 'package-lock.json'); - shasum.update(fs.readFileSync(packageLockPath)); + const packageLockPath = path_1.default.join(ROOT, dir, 'package-lock.json'); + shasum.update(fs_1.default.readFileSync(packageLockPath)); } // Add any other command line arguments for (let i = 2; i < process.argv.length; i++) { diff --git a/build/azure-pipelines/common/computeNodeModulesCacheKey.ts b/build/azure-pipelines/common/computeNodeModulesCacheKey.ts index 0940c929b54..57b35dc78de 100644 --- a/build/azure-pipelines/common/computeNodeModulesCacheKey.ts +++ b/build/azure-pipelines/common/computeNodeModulesCacheKey.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as crypto from 'crypto'; +import fs from 'fs'; +import path from 'path'; +import crypto from 'crypto'; const { dirs } = require('../../npm/dirs'); const ROOT = path.join(__dirname, '../../../'); diff --git a/build/azure-pipelines/common/listNodeModules.js b/build/azure-pipelines/common/listNodeModules.js index aaa44c51a12..301b5f930b6 100644 --- a/build/azure-pipelines/common/listNodeModules.js +++ b/build/azure-pipelines/common/listNodeModules.js @@ -3,16 +3,19 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); if (process.argv.length !== 3) { console.error('Usage: node listNodeModules.js OUTPUT_FILE'); process.exit(-1); } -const ROOT = path.join(__dirname, '../../../'); +const ROOT = path_1.default.join(__dirname, '../../../'); function findNodeModulesFiles(location, inNodeModules, result) { - const entries = fs.readdirSync(path.join(ROOT, location)); + const entries = fs_1.default.readdirSync(path_1.default.join(ROOT, location)); for (const entry of entries) { const entryPath = `${location}/${entry}`; if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) { @@ -20,7 +23,7 @@ function findNodeModulesFiles(location, inNodeModules, result) { } let stat; try { - stat = fs.statSync(path.join(ROOT, entryPath)); + stat = fs_1.default.statSync(path_1.default.join(ROOT, entryPath)); } catch (err) { continue; @@ -37,5 +40,5 @@ function findNodeModulesFiles(location, inNodeModules, result) { } const result = []; findNodeModulesFiles('', false, result); -fs.writeFileSync(process.argv[2], result.join('\n') + '\n'); +fs_1.default.writeFileSync(process.argv[2], result.join('\n') + '\n'); //# sourceMappingURL=listNodeModules.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/listNodeModules.ts b/build/azure-pipelines/common/listNodeModules.ts index aca461f8b5f..fb85b25cfd1 100644 --- a/build/azure-pipelines/common/listNodeModules.ts +++ b/build/azure-pipelines/common/listNodeModules.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; +import fs from 'fs'; +import path from 'path'; if (process.argv.length !== 3) { console.error('Usage: node listNodeModules.js OUTPUT_FILE'); diff --git a/build/azure-pipelines/common/publish.js b/build/azure-pipelines/common/publish.js index bcebd076c28..48093086c34 100644 --- a/build/azure-pipelines/common/publish.js +++ b/build/azure-pipelines/common/publish.js @@ -3,21 +3,57 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); const stream_1 = require("stream"); const promises_1 = require("node:stream/promises"); -const yauzl = require("yauzl"); -const crypto = require("crypto"); +const yauzl_1 = __importDefault(require("yauzl")); +const crypto_1 = __importDefault(require("crypto")); const retry_1 = require("./retry"); const cosmos_1 = require("@azure/cosmos"); -const cp = require("child_process"); -const os = require("os"); +const child_process_1 = __importDefault(require("child_process")); +const os_1 = __importDefault(require("os")); const node_worker_threads_1 = require("node:worker_threads"); const msal_node_1 = require("@azure/msal-node"); const storage_blob_1 = require("@azure/storage-blob"); -const jws = require("jws"); +const jws = __importStar(require("jws")); const node_timers_1 = require("node:timers"); function e(name) { const result = process.env[name]; @@ -28,7 +64,7 @@ function e(name) { } function hashStream(hashName, stream) { return new Promise((c, e) => { - const shasum = crypto.createHash(hashName); + const shasum = crypto_1.default.createHash(hashName); stream .on('data', shasum.update.bind(shasum)) .on('error', e) @@ -50,38 +86,38 @@ function getCertificateBuffer(input) { } function getThumbprint(input, algorithm) { const buffer = getCertificateBuffer(input); - return crypto.createHash(algorithm).update(buffer).digest(); + return crypto_1.default.createHash(algorithm).update(buffer).digest(); } function getKeyFromPFX(pfx) { - const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx'); - const pemKeyPath = path.join(os.tmpdir(), 'key.pem'); + const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx'); + const pemKeyPath = path_1.default.join(os_1.default.tmpdir(), 'key.pem'); try { const pfxCertificate = Buffer.from(pfx, 'base64'); - fs.writeFileSync(pfxCertificatePath, pfxCertificate); - cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`); - const raw = fs.readFileSync(pemKeyPath, 'utf-8'); + fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate); + child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`); + const raw = fs_1.default.readFileSync(pemKeyPath, 'utf-8'); const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)[0]; return result; } finally { - fs.rmSync(pfxCertificatePath, { force: true }); - fs.rmSync(pemKeyPath, { force: true }); + fs_1.default.rmSync(pfxCertificatePath, { force: true }); + fs_1.default.rmSync(pemKeyPath, { force: true }); } } function getCertificatesFromPFX(pfx) { - const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx'); - const pemCertificatePath = path.join(os.tmpdir(), 'cert.pem'); + const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx'); + const pemCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pem'); try { const pfxCertificate = Buffer.from(pfx, 'base64'); - fs.writeFileSync(pfxCertificatePath, pfxCertificate); - cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`); - const raw = fs.readFileSync(pemCertificatePath, 'utf-8'); + fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate); + child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`); + const raw = fs_1.default.readFileSync(pemCertificatePath, 'utf-8'); const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g); return matches ? matches.reverse() : []; } finally { - fs.rmSync(pfxCertificatePath, { force: true }); - fs.rmSync(pemCertificatePath, { force: true }); + fs_1.default.rmSync(pfxCertificatePath, { force: true }); + fs_1.default.rmSync(pemCertificatePath, { force: true }); } } class ESRPReleaseService { @@ -122,7 +158,7 @@ class ESRPReleaseService { this.containerClient = containerClient; } async createRelease(version, filePath, friendlyFileName) { - const correlationId = crypto.randomUUID(); + const correlationId = crypto_1.default.randomUUID(); const blobClient = this.containerClient.getBlockBlobClient(correlationId); this.log(`Uploading ${filePath} to ${blobClient.url}`); await blobClient.uploadFile(filePath); @@ -161,8 +197,8 @@ class ESRPReleaseService { } } async submitRelease(version, filePath, friendlyFileName, correlationId, blobClient) { - const size = fs.statSync(filePath).size; - const hash = await hashStream('sha256', fs.createReadStream(filePath)); + const size = fs_1.default.statSync(filePath).size; + const hash = await hashStream('sha256', fs_1.default.createReadStream(filePath)); const message = { customerCorrelationId: correlationId, esrpCorrelationId: correlationId, @@ -192,7 +228,7 @@ class ESRPReleaseService { intent: 'filedownloadlinkgeneration' }, files: [{ - name: path.basename(filePath), + name: path_1.default.basename(filePath), friendlyFileName, tenantFileLocation: blobClient.url, tenantFileLocationType: 'AzureBlob', @@ -268,19 +304,19 @@ class State { set = new Set(); constructor() { const pipelineWorkspacePath = e('PIPELINE_WORKSPACE'); - const previousState = fs.readdirSync(pipelineWorkspacePath) + const previousState = fs_1.default.readdirSync(pipelineWorkspacePath) .map(name => /^artifacts_processed_(\d+)$/.exec(name)) .filter((match) => !!match) .map(match => ({ name: match[0], attempt: Number(match[1]) })) .sort((a, b) => b.attempt - a.attempt)[0]; if (previousState) { - const previousStatePath = path.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt'); - fs.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name)); + const previousStatePath = path_1.default.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt'); + fs_1.default.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name)); } const stageAttempt = e('SYSTEM_STAGEATTEMPT'); - this.statePath = path.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`); - fs.mkdirSync(path.dirname(this.statePath), { recursive: true }); - fs.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join('')); + this.statePath = path_1.default.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`); + fs_1.default.mkdirSync(path_1.default.dirname(this.statePath), { recursive: true }); + fs_1.default.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join('')); } get size() { return this.set.size; @@ -290,7 +326,7 @@ class State { } add(name) { this.set.add(name); - fs.appendFileSync(this.statePath, `${name}\n`); + fs_1.default.appendFileSync(this.statePath, `${name}\n`); } [Symbol.iterator]() { return this.set[Symbol.iterator](); @@ -336,7 +372,7 @@ async function downloadArtifact(artifact, downloadPath) { if (!res.ok) { throw new Error(`Unexpected status code: ${res.status}`); } - await (0, promises_1.pipeline)(stream_1.Readable.fromWeb(res.body), fs.createWriteStream(downloadPath)); + await (0, promises_1.pipeline)(stream_1.Readable.fromWeb(res.body), fs_1.default.createWriteStream(downloadPath)); } finally { clearTimeout(timeout); @@ -344,7 +380,7 @@ async function downloadArtifact(artifact, downloadPath) { } async function unzip(packagePath, outputPath) { return new Promise((resolve, reject) => { - yauzl.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => { + yauzl_1.default.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => { if (err) { return reject(err); } @@ -358,9 +394,9 @@ async function unzip(packagePath, outputPath) { if (err) { return reject(err); } - const filePath = path.join(outputPath, entry.fileName); - fs.mkdirSync(path.dirname(filePath), { recursive: true }); - const ostream = fs.createWriteStream(filePath); + const filePath = path_1.default.join(outputPath, entry.fileName); + fs_1.default.mkdirSync(path_1.default.dirname(filePath), { recursive: true }); + const ostream = fs_1.default.createWriteStream(filePath); ostream.on('finish', () => { result.push(filePath); zipfile.readEntry(); @@ -523,7 +559,7 @@ async function processArtifact(artifact, filePath) { const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS')); const quality = e('VSCODE_QUALITY'); const version = e('BUILD_SOURCEVERSION'); - const friendlyFileName = `${quality}/${version}/${path.basename(filePath)}`; + const friendlyFileName = `${quality}/${version}/${path_1.default.basename(filePath)}`; const blobServiceClient = new storage_blob_1.BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken }); const leasesContainerClient = blobServiceClient.getContainerClient('leases'); await leasesContainerClient.createIfNotExists(); @@ -546,8 +582,8 @@ async function processArtifact(artifact, filePath) { const isLegacy = artifact.name.includes('_legacy'); const platform = getPlatform(product, os, arch, unprocessedType, isLegacy); const type = getRealType(unprocessedType); - const size = fs.statSync(filePath).size; - const stream = fs.createReadStream(filePath); + const size = fs_1.default.statSync(filePath).size; + const stream = fs_1.default.createReadStream(filePath); const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256 const asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true }; log('Creating asset...'); @@ -627,12 +663,12 @@ async function main() { continue; } console.log(`[${artifact.name}] Found new artifact`); - const artifactZipPath = path.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`); + const artifactZipPath = path_1.default.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`); await (0, retry_1.retry)(async (attempt) => { const start = Date.now(); console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`); await downloadArtifact(artifact, artifactZipPath); - const archiveSize = fs.statSync(artifactZipPath).size; + const archiveSize = fs_1.default.statSync(artifactZipPath).size; const downloadDurationS = (Date.now() - start) / 1000; const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS); console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`); diff --git a/build/azure-pipelines/common/publish.ts b/build/azure-pipelines/common/publish.ts index b8b99c3855b..79444bebf13 100644 --- a/build/azure-pipelines/common/publish.ts +++ b/build/azure-pipelines/common/publish.ts @@ -3,17 +3,17 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; +import fs from 'fs'; +import path from 'path'; import { Readable } from 'stream'; import type { ReadableStream } from 'stream/web'; import { pipeline } from 'node:stream/promises'; -import * as yauzl from 'yauzl'; -import * as crypto from 'crypto'; +import yauzl from 'yauzl'; +import crypto from 'crypto'; import { retry } from './retry'; import { CosmosClient } from '@azure/cosmos'; -import * as cp from 'child_process'; -import * as os from 'os'; +import cp from 'child_process'; +import os from 'os'; import { Worker, isMainThread, workerData } from 'node:worker_threads'; import { ConfidentialClientApplication } from '@azure/msal-node'; import { BlobClient, BlobServiceClient, BlockBlobClient, ContainerClient } from '@azure/storage-blob'; diff --git a/build/azure-pipelines/common/sign-win32.js b/build/azure-pipelines/common/sign-win32.js index aa197bb1198..f4e3f27c1f2 100644 --- a/build/azure-pipelines/common/sign-win32.js +++ b/build/azure-pipelines/common/sign-win32.js @@ -3,13 +3,16 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); const sign_1 = require("./sign"); -const path = require("path"); +const path_1 = __importDefault(require("path")); (0, sign_1.main)([ process.env['EsrpCliDllPath'], 'sign-windows', - path.dirname(process.argv[2]), - path.basename(process.argv[2]) + path_1.default.dirname(process.argv[2]), + path_1.default.basename(process.argv[2]) ]); //# sourceMappingURL=sign-win32.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/sign-win32.ts b/build/azure-pipelines/common/sign-win32.ts index c2f3dbda151..ad88435b5a3 100644 --- a/build/azure-pipelines/common/sign-win32.ts +++ b/build/azure-pipelines/common/sign-win32.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { main } from './sign'; -import * as path from 'path'; +import path from 'path'; main([ process.env['EsrpCliDllPath']!, diff --git a/build/azure-pipelines/common/sign.js b/build/azure-pipelines/common/sign.js index df25de29399..fd87772b3b8 100644 --- a/build/azure-pipelines/common/sign.js +++ b/build/azure-pipelines/common/sign.js @@ -3,25 +3,28 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.Temp = void 0; exports.main = main; -const cp = require("child_process"); -const fs = require("fs"); -const crypto = require("crypto"); -const path = require("path"); -const os = require("os"); +const child_process_1 = __importDefault(require("child_process")); +const fs_1 = __importDefault(require("fs")); +const crypto_1 = __importDefault(require("crypto")); +const path_1 = __importDefault(require("path")); +const os_1 = __importDefault(require("os")); class Temp { _files = []; tmpNameSync() { - const file = path.join(os.tmpdir(), crypto.randomBytes(20).toString('hex')); + const file = path_1.default.join(os_1.default.tmpdir(), crypto_1.default.randomBytes(20).toString('hex')); this._files.push(file); return file; } dispose() { for (const file of this._files) { try { - fs.unlinkSync(file); + fs_1.default.unlinkSync(file); } catch (err) { // noop @@ -126,20 +129,20 @@ function getParams(type) { function main([esrpCliPath, type, folderPath, pattern]) { const tmp = new Temp(); process.on('exit', () => tmp.dispose()); - const key = crypto.randomBytes(32); - const iv = crypto.randomBytes(16); - const cipher = crypto.createCipheriv('aes-256-cbc', key, iv); + const key = crypto_1.default.randomBytes(32); + const iv = crypto_1.default.randomBytes(16); + const cipher = crypto_1.default.createCipheriv('aes-256-cbc', key, iv); const encryptedToken = cipher.update(process.env['SYSTEM_ACCESSTOKEN'].trim(), 'utf8', 'hex') + cipher.final('hex'); const encryptionDetailsPath = tmp.tmpNameSync(); - fs.writeFileSync(encryptionDetailsPath, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') })); + fs_1.default.writeFileSync(encryptionDetailsPath, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') })); const encryptedTokenPath = tmp.tmpNameSync(); - fs.writeFileSync(encryptedTokenPath, encryptedToken); + fs_1.default.writeFileSync(encryptedTokenPath, encryptedToken); const patternPath = tmp.tmpNameSync(); - fs.writeFileSync(patternPath, pattern); + fs_1.default.writeFileSync(patternPath, pattern); const paramsPath = tmp.tmpNameSync(); - fs.writeFileSync(paramsPath, JSON.stringify(getParams(type))); - const dotnetVersion = cp.execSync('dotnet --version', { encoding: 'utf8' }).trim(); - const adoTaskVersion = path.basename(path.dirname(path.dirname(esrpCliPath))); + fs_1.default.writeFileSync(paramsPath, JSON.stringify(getParams(type))); + const dotnetVersion = child_process_1.default.execSync('dotnet --version', { encoding: 'utf8' }).trim(); + const adoTaskVersion = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(esrpCliPath))); const federatedTokenData = { jobId: process.env['SYSTEM_JOBID'], planId: process.env['SYSTEM_PLANID'], @@ -149,7 +152,7 @@ function main([esrpCliPath, type, folderPath, pattern]) { managedIdentityId: process.env['VSCODE_ESRP_CLIENT_ID'], managedIdentityTenantId: process.env['VSCODE_ESRP_TENANT_ID'], serviceConnectionId: process.env['VSCODE_ESRP_SERVICE_CONNECTION_ID'], - tempDirectory: os.tmpdir(), + tempDirectory: os_1.default.tmpdir(), systemAccessToken: encryptedTokenPath, encryptionKey: encryptionDetailsPath }; @@ -188,7 +191,7 @@ function main([esrpCliPath, type, folderPath, pattern]) { '-federatedTokenData', JSON.stringify(federatedTokenData) ]; try { - cp.execFileSync('dotnet', args, { stdio: 'inherit' }); + child_process_1.default.execFileSync('dotnet', args, { stdio: 'inherit' }); } catch (err) { console.error('ESRP failed'); diff --git a/build/azure-pipelines/common/sign.ts b/build/azure-pipelines/common/sign.ts index e5f42e87da2..19a288483c8 100644 --- a/build/azure-pipelines/common/sign.ts +++ b/build/azure-pipelines/common/sign.ts @@ -3,11 +3,11 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as cp from 'child_process'; -import * as fs from 'fs'; -import * as crypto from 'crypto'; -import * as path from 'path'; -import * as os from 'os'; +import cp from 'child_process'; +import fs from 'fs'; +import crypto from 'crypto'; +import path from 'path'; +import os from 'os'; export class Temp { private _files: string[] = []; diff --git a/build/azure-pipelines/distro/mixin-npm.js b/build/azure-pipelines/distro/mixin-npm.js index 0c61bb3dcf4..87958a5d449 100644 --- a/build/azure-pipelines/distro/mixin-npm.js +++ b/build/azure-pipelines/distro/mixin-npm.js @@ -3,24 +3,27 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); const { dirs } = require('../../npm/dirs'); function log(...args) { console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); } function mixin(mixinPath) { - if (!fs.existsSync(`${mixinPath}/node_modules`)) { + if (!fs_1.default.existsSync(`${mixinPath}/node_modules`)) { log(`Skipping distro npm dependencies: ${mixinPath} (no node_modules)`); return; } log(`Mixing in distro npm dependencies: ${mixinPath}`); - const distroPackageJson = JSON.parse(fs.readFileSync(`${mixinPath}/package.json`, 'utf8')); - const targetPath = path.relative('.build/distro/npm', mixinPath); + const distroPackageJson = JSON.parse(fs_1.default.readFileSync(`${mixinPath}/package.json`, 'utf8')); + const targetPath = path_1.default.relative('.build/distro/npm', mixinPath); for (const dependency of Object.keys(distroPackageJson.dependencies)) { - fs.rmSync(`./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true }); - fs.cpSync(`${mixinPath}/node_modules/${dependency}`, `./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true, dereference: true }); + fs_1.default.rmSync(`./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true }); + fs_1.default.cpSync(`${mixinPath}/node_modules/${dependency}`, `./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true, dereference: true }); } log(`Mixed in distro npm dependencies: ${mixinPath} ✔︎`); } diff --git a/build/azure-pipelines/distro/mixin-npm.ts b/build/azure-pipelines/distro/mixin-npm.ts index da5eb24ca28..6e32f10db50 100644 --- a/build/azure-pipelines/distro/mixin-npm.ts +++ b/build/azure-pipelines/distro/mixin-npm.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; +import fs from 'fs'; +import path from 'path'; const { dirs } = require('../../npm/dirs') as { dirs: string[] }; function log(...args: any[]): void { diff --git a/build/azure-pipelines/distro/mixin-quality.js b/build/azure-pipelines/distro/mixin-quality.js index 6e011b5a1e9..335f63ca1fc 100644 --- a/build/azure-pipelines/distro/mixin-quality.js +++ b/build/azure-pipelines/distro/mixin-quality.js @@ -3,9 +3,12 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); function log(...args) { console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); } @@ -16,12 +19,12 @@ function main() { } log(`Mixing in distro quality...`); const basePath = `.build/distro/mixin/${quality}`; - for (const name of fs.readdirSync(basePath)) { - const distroPath = path.join(basePath, name); - const ossPath = path.relative(basePath, distroPath); + for (const name of fs_1.default.readdirSync(basePath)) { + const distroPath = path_1.default.join(basePath, name); + const ossPath = path_1.default.relative(basePath, distroPath); if (ossPath === 'product.json') { - const distro = JSON.parse(fs.readFileSync(distroPath, 'utf8')); - const oss = JSON.parse(fs.readFileSync(ossPath, 'utf8')); + const distro = JSON.parse(fs_1.default.readFileSync(distroPath, 'utf8')); + const oss = JSON.parse(fs_1.default.readFileSync(ossPath, 'utf8')); let builtInExtensions = oss.builtInExtensions; if (Array.isArray(distro.builtInExtensions)) { log('Overwriting built-in extensions:', distro.builtInExtensions.map(e => e.name)); @@ -41,10 +44,10 @@ function main() { log('Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name)); } const result = { webBuiltInExtensions: oss.webBuiltInExtensions, ...distro, builtInExtensions }; - fs.writeFileSync(ossPath, JSON.stringify(result, null, '\t'), 'utf8'); + fs_1.default.writeFileSync(ossPath, JSON.stringify(result, null, '\t'), 'utf8'); } else { - fs.cpSync(distroPath, ossPath, { force: true, recursive: true }); + fs_1.default.cpSync(distroPath, ossPath, { force: true, recursive: true }); } log(distroPath, '✔︎'); } diff --git a/build/azure-pipelines/distro/mixin-quality.ts b/build/azure-pipelines/distro/mixin-quality.ts index b9b3c4f6c42..29c90f00a65 100644 --- a/build/azure-pipelines/distro/mixin-quality.ts +++ b/build/azure-pipelines/distro/mixin-quality.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; +import fs from 'fs'; +import path from 'path'; interface IBuiltInExtension { readonly name: string; diff --git a/build/azure-pipelines/publish-types/check-version.js b/build/azure-pipelines/publish-types/check-version.js index 9e93a7fa4c9..5bd80a69bbf 100644 --- a/build/azure-pipelines/publish-types/check-version.js +++ b/build/azure-pipelines/publish-types/check-version.js @@ -3,11 +3,14 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const cp = require("child_process"); +const child_process_1 = __importDefault(require("child_process")); let tag = ''; try { - tag = cp + tag = child_process_1.default .execSync('git describe --tags `git rev-list --tags --max-count=1`') .toString() .trim(); diff --git a/build/azure-pipelines/publish-types/check-version.ts b/build/azure-pipelines/publish-types/check-version.ts index 35c5a511593..4496ed93af1 100644 --- a/build/azure-pipelines/publish-types/check-version.ts +++ b/build/azure-pipelines/publish-types/check-version.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as cp from 'child_process'; +import cp from 'child_process'; let tag = ''; try { diff --git a/build/azure-pipelines/publish-types/update-types.js b/build/azure-pipelines/publish-types/update-types.js index ed2deded3fc..29f9bfcf66e 100644 --- a/build/azure-pipelines/publish-types/update-types.js +++ b/build/azure-pipelines/publish-types/update-types.js @@ -3,19 +3,22 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const cp = require("child_process"); -const path = require("path"); +const fs_1 = __importDefault(require("fs")); +const child_process_1 = __importDefault(require("child_process")); +const path_1 = __importDefault(require("path")); let tag = ''; try { - tag = cp + tag = child_process_1.default .execSync('git describe --tags `git rev-list --tags --max-count=1`') .toString() .trim(); const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vscode-dts/vscode.d.ts`; - const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts'); - cp.execSync(`curl ${dtsUri} --output ${outPath}`); + const outPath = path_1.default.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts'); + child_process_1.default.execSync(`curl ${dtsUri} --output ${outPath}`); updateDTSFile(outPath, tag); console.log(`Done updating vscode.d.ts at ${outPath}`); } @@ -25,9 +28,9 @@ catch (err) { process.exit(1); } function updateDTSFile(outPath, tag) { - const oldContent = fs.readFileSync(outPath, 'utf-8'); + const oldContent = fs_1.default.readFileSync(outPath, 'utf-8'); const newContent = getNewFileContent(oldContent, tag); - fs.writeFileSync(outPath, newContent); + fs_1.default.writeFileSync(outPath, newContent); } function repeat(str, times) { const result = new Array(times); diff --git a/build/azure-pipelines/publish-types/update-types.ts b/build/azure-pipelines/publish-types/update-types.ts index a727647e64a..0f99b07cf9a 100644 --- a/build/azure-pipelines/publish-types/update-types.ts +++ b/build/azure-pipelines/publish-types/update-types.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as cp from 'child_process'; -import * as path from 'path'; +import fs from 'fs'; +import cp from 'child_process'; +import path from 'path'; let tag = ''; try { diff --git a/build/azure-pipelines/upload-cdn.js b/build/azure-pipelines/upload-cdn.js index 8ec40a0108e..a0ec9d93516 100644 --- a/build/azure-pipelines/upload-cdn.js +++ b/build/azure-pipelines/upload-cdn.js @@ -3,13 +3,49 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const es = require("event-stream"); -const Vinyl = require("vinyl"); -const vfs = require("vinyl-fs"); -const filter = require("gulp-filter"); -const gzip = require("gulp-gzip"); -const mime = require("mime"); +const es = __importStar(require("event-stream")); +const vinyl_1 = __importDefault(require("vinyl")); +const vfs = __importStar(require("vinyl-fs")); +const gulp_filter_1 = __importDefault(require("gulp-filter")); +const gulp_gzip_1 = __importDefault(require("gulp-gzip")); +const mime = __importStar(require("mime")); const identity_1 = require("@azure/identity"); const azure = require('gulp-azure-storage'); const commit = process.env['BUILD_SOURCEVERSION']; @@ -83,13 +119,13 @@ async function main() { } }); const all = vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true }) - .pipe(filter(f => !f.isDirectory())); + .pipe((0, gulp_filter_1.default)(f => !f.isDirectory())); const compressed = all - .pipe(filter(f => MimeTypesToCompress.has(mime.lookup(f.path)))) - .pipe(gzip({ append: false })) + .pipe((0, gulp_filter_1.default)(f => MimeTypesToCompress.has(mime.lookup(f.path)))) + .pipe((0, gulp_gzip_1.default)({ append: false })) .pipe(azure.upload(options(true))); const uncompressed = all - .pipe(filter(f => !MimeTypesToCompress.has(mime.lookup(f.path)))) + .pipe((0, gulp_filter_1.default)(f => !MimeTypesToCompress.has(mime.lookup(f.path)))) .pipe(azure.upload(options(false))); const out = es.merge(compressed, uncompressed) .pipe(es.through(function (f) { @@ -99,13 +135,13 @@ async function main() { })); console.log(`Uploading files to CDN...`); // debug await wait(out); - const listing = new Vinyl({ + const listing = new vinyl_1.default({ path: 'files.txt', contents: Buffer.from(files.join('\n')), stat: { mode: 0o666 } }); const filesOut = es.readArray([listing]) - .pipe(gzip({ append: false })) + .pipe((0, gulp_gzip_1.default)({ append: false })) .pipe(azure.upload(options(true))); console.log(`Uploading: files.txt (${files.length} files)`); // debug await wait(filesOut); diff --git a/build/azure-pipelines/upload-cdn.ts b/build/azure-pipelines/upload-cdn.ts index a4a5857afe5..719ecd09c36 100644 --- a/build/azure-pipelines/upload-cdn.ts +++ b/build/azure-pipelines/upload-cdn.ts @@ -4,10 +4,10 @@ *--------------------------------------------------------------------------------------------*/ import * as es from 'event-stream'; -import * as Vinyl from 'vinyl'; +import Vinyl from 'vinyl'; import * as vfs from 'vinyl-fs'; -import * as filter from 'gulp-filter'; -import * as gzip from 'gulp-gzip'; +import filter from 'gulp-filter'; +import gzip from 'gulp-gzip'; import * as mime from 'mime'; import { ClientAssertionCredential } from '@azure/identity'; const azure = require('gulp-azure-storage'); diff --git a/build/azure-pipelines/upload-nlsmetadata.js b/build/azure-pipelines/upload-nlsmetadata.js index de75dcb8b3a..aac93a05732 100644 --- a/build/azure-pipelines/upload-nlsmetadata.js +++ b/build/azure-pipelines/upload-nlsmetadata.js @@ -3,11 +3,47 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const es = require("event-stream"); -const vfs = require("vinyl-fs"); -const merge = require("gulp-merge-json"); -const gzip = require("gulp-gzip"); +const es = __importStar(require("event-stream")); +const vfs = __importStar(require("vinyl-fs")); +const gulp_merge_json_1 = __importDefault(require("gulp-merge-json")); +const gulp_gzip_1 = __importDefault(require("gulp-gzip")); const identity_1 = require("@azure/identity"); const path = require("path"); const fs_1 = require("fs"); @@ -21,7 +57,7 @@ function main() { // it includes metadata for translators for `keys`. but for our purpose // we want only the `keys` and `messages` as `string`. es.merge(vfs.src('out-build/nls.keys.json', { base: 'out-build' }), vfs.src('out-build/nls.messages.json', { base: 'out-build' })) - .pipe(merge({ + .pipe((0, gulp_merge_json_1.default)({ fileName: 'vscode.json', jsonSpace: '', concatArrays: true, @@ -37,7 +73,7 @@ function main() { } })), // extensions - vfs.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' })).pipe(merge({ + vfs.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' })).pipe((0, gulp_merge_json_1.default)({ fileName: 'combined.nls.metadata.json', jsonSpace: '', concatArrays: true, @@ -95,7 +131,7 @@ function main() { })); const nlsMessagesJs = vfs.src('out-build/nls.messages.js', { base: 'out-build' }); es.merge(combinedMetadataJson, nlsMessagesJs) - .pipe(gzip({ append: false })) + .pipe((0, gulp_gzip_1.default)({ append: false })) .pipe(vfs.dest('./nlsMetadata')) .pipe(es.through(function (data) { console.log(`Uploading ${data.path}`); diff --git a/build/azure-pipelines/upload-nlsmetadata.ts b/build/azure-pipelines/upload-nlsmetadata.ts index 89a9eb6c536..5c13f73a006 100644 --- a/build/azure-pipelines/upload-nlsmetadata.ts +++ b/build/azure-pipelines/upload-nlsmetadata.ts @@ -6,8 +6,8 @@ import * as es from 'event-stream'; import * as Vinyl from 'vinyl'; import * as vfs from 'vinyl-fs'; -import * as merge from 'gulp-merge-json'; -import * as gzip from 'gulp-gzip'; +import merge from 'gulp-merge-json'; +import gzip from 'gulp-gzip'; import { ClientAssertionCredential } from '@azure/identity'; import path = require('path'); import { readFileSync } from 'fs'; diff --git a/build/azure-pipelines/upload-sourcemaps.js b/build/azure-pipelines/upload-sourcemaps.js index 6f5f73fb8b0..68ee13dcf2d 100644 --- a/build/azure-pipelines/upload-sourcemaps.js +++ b/build/azure-pipelines/upload-sourcemaps.js @@ -3,23 +3,59 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const es = require("event-stream"); -const vfs = require("vinyl-fs"); -const util = require("../lib/util"); +const path_1 = __importDefault(require("path")); +const event_stream_1 = __importDefault(require("event-stream")); +const vinyl_fs_1 = __importDefault(require("vinyl-fs")); +const util = __importStar(require("../lib/util")); // @ts-ignore -const deps = require("../lib/dependencies"); +const deps = __importStar(require("../lib/dependencies")); const identity_1 = require("@azure/identity"); const azure = require('gulp-azure-storage'); -const root = path.dirname(path.dirname(__dirname)); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); const commit = process.env['BUILD_SOURCEVERSION']; const credential = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN'])); // optionally allow to pass in explicit base/maps to upload const [, , base, maps] = process.argv; function src(base, maps = `${base}/**/*.map`) { - return vfs.src(maps, { base }) - .pipe(es.mapSync((f) => { + return vinyl_fs_1.default.src(maps, { base }) + .pipe(event_stream_1.default.mapSync((f) => { f.path = `${f.base}/core/${f.relative}`; return f; })); @@ -31,12 +67,12 @@ function main() { const vs = src('out-vscode-min'); // client source-maps only sources.push(vs); const productionDependencies = deps.getProductionDependencies(root); - const productionDependenciesSrc = productionDependencies.map(d => path.relative(root, d)).map(d => `./${d}/**/*.map`); - const nodeModules = vfs.src(productionDependenciesSrc, { base: '.' }) - .pipe(util.cleanNodeModules(path.join(root, 'build', '.moduleignore'))) - .pipe(util.cleanNodeModules(path.join(root, 'build', `.moduleignore.${process.platform}`))); + const productionDependenciesSrc = productionDependencies.map(d => path_1.default.relative(root, d)).map(d => `./${d}/**/*.map`); + const nodeModules = vinyl_fs_1.default.src(productionDependenciesSrc, { base: '.' }) + .pipe(util.cleanNodeModules(path_1.default.join(root, 'build', '.moduleignore'))) + .pipe(util.cleanNodeModules(path_1.default.join(root, 'build', `.moduleignore.${process.platform}`))); sources.push(nodeModules); - const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' }); + const extensionsOut = vinyl_fs_1.default.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' }); sources.push(extensionsOut); } // specific client base/maps @@ -44,8 +80,8 @@ function main() { sources.push(src(base, maps)); } return new Promise((c, e) => { - es.merge(...sources) - .pipe(es.through(function (data) { + event_stream_1.default.merge(...sources) + .pipe(event_stream_1.default.through(function (data) { console.log('Uploading Sourcemap', data.relative); // debug this.emit('data', data); })) diff --git a/build/azure-pipelines/upload-sourcemaps.ts b/build/azure-pipelines/upload-sourcemaps.ts index 2eb5e696983..b4a9f38e129 100644 --- a/build/azure-pipelines/upload-sourcemaps.ts +++ b/build/azure-pipelines/upload-sourcemaps.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as es from 'event-stream'; -import * as Vinyl from 'vinyl'; -import * as vfs from 'vinyl-fs'; +import path from 'path'; +import es from 'event-stream'; +import Vinyl from 'vinyl'; +import vfs from 'vinyl-fs'; import * as util from '../lib/util'; // @ts-ignore import * as deps from '../lib/dependencies'; diff --git a/build/darwin/create-universal-app.js b/build/darwin/create-universal-app.js index bced5a7166f..535d46eb174 100644 --- a/build/darwin/create-universal-app.js +++ b/build/darwin/create-universal-app.js @@ -3,24 +3,27 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fs = require("fs"); -const minimatch = require("minimatch"); +const path_1 = __importDefault(require("path")); +const fs_1 = __importDefault(require("fs")); +const minimatch_1 = __importDefault(require("minimatch")); const vscode_universal_bundler_1 = require("vscode-universal-bundler"); -const root = path.dirname(path.dirname(__dirname)); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); async function main(buildDir) { const arch = process.env['VSCODE_ARCH']; if (!buildDir) { throw new Error('Build dir not provided'); } - const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); + const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8')); const appName = product.nameLong + '.app'; - const x64AppPath = path.join(buildDir, 'VSCode-darwin-x64', appName); - const arm64AppPath = path.join(buildDir, 'VSCode-darwin-arm64', appName); - const asarRelativePath = path.join('Contents', 'Resources', 'app', 'node_modules.asar'); - const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName); - const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json'); + const x64AppPath = path_1.default.join(buildDir, 'VSCode-darwin-x64', appName); + const arm64AppPath = path_1.default.join(buildDir, 'VSCode-darwin-arm64', appName); + const asarRelativePath = path_1.default.join('Contents', 'Resources', 'app', 'node_modules.asar'); + const outAppPath = path_1.default.join(buildDir, `VSCode-darwin-${arch}`, appName); + const productJsonPath = path_1.default.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json'); const filesToSkip = [ '**/CodeResources', '**/Credits.rtf', @@ -37,18 +40,18 @@ async function main(buildDir) { x64ArchFiles: '*/kerberos.node', filesToSkipComparison: (file) => { for (const expected of filesToSkip) { - if (minimatch(file, expected)) { + if ((0, minimatch_1.default)(file, expected)) { return true; } } return false; } }); - const productJson = JSON.parse(fs.readFileSync(productJsonPath, 'utf8')); + const productJson = JSON.parse(fs_1.default.readFileSync(productJsonPath, 'utf8')); Object.assign(productJson, { darwinUniversalAssetId: 'darwin-universal' }); - fs.writeFileSync(productJsonPath, JSON.stringify(productJson, null, '\t')); + fs_1.default.writeFileSync(productJsonPath, JSON.stringify(productJson, null, '\t')); } if (require.main === module) { main(process.argv[2]).catch(err => { diff --git a/build/darwin/create-universal-app.ts b/build/darwin/create-universal-app.ts index e05f780b38d..9e013cdb10c 100644 --- a/build/darwin/create-universal-app.ts +++ b/build/darwin/create-universal-app.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as fs from 'fs'; -import * as minimatch from 'minimatch'; +import path from 'path'; +import fs from 'fs'; +import minimatch from 'minimatch'; import { makeUniversalApp } from 'vscode-universal-bundler'; const root = path.dirname(path.dirname(__dirname)); diff --git a/build/darwin/sign.js b/build/darwin/sign.js index feb5834ff85..dff30fd0e18 100644 --- a/build/darwin/sign.js +++ b/build/darwin/sign.js @@ -3,14 +3,17 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); -const codesign = require("electron-osx-sign"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const electron_osx_sign_1 = __importDefault(require("electron-osx-sign")); const cross_spawn_promise_1 = require("@malept/cross-spawn-promise"); -const root = path.dirname(path.dirname(__dirname)); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); function getElectronVersion() { - const npmrc = fs.readFileSync(path.join(root, '.npmrc'), 'utf8'); + const npmrc = fs_1.default.readFileSync(path_1.default.join(root, '.npmrc'), 'utf8'); const target = /^target="(.*)"$/m.exec(npmrc)[1]; return target; } @@ -24,25 +27,25 @@ async function main(buildDir) { if (!tempDir) { throw new Error('$AGENT_TEMPDIRECTORY not set'); } - const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); - const baseDir = path.dirname(__dirname); - const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`); + const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8')); + const baseDir = path_1.default.dirname(__dirname); + const appRoot = path_1.default.join(buildDir, `VSCode-darwin-${arch}`); const appName = product.nameLong + '.app'; - const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks'); + const appFrameworkPath = path_1.default.join(appRoot, appName, 'Contents', 'Frameworks'); const helperAppBaseName = product.nameShort; const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app'; const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app'; const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app'; - const infoPlistPath = path.resolve(appRoot, appName, 'Contents', 'Info.plist'); + const infoPlistPath = path_1.default.resolve(appRoot, appName, 'Contents', 'Info.plist'); const defaultOpts = { - app: path.join(appRoot, appName), + app: path_1.default.join(appRoot, appName), platform: 'darwin', - entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'), - 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'), + entitlements: path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'), + 'entitlements-inherit': path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'), hardenedRuntime: true, 'pre-auto-entitlements': false, 'pre-embed-provisioning-profile': false, - keychain: path.join(tempDir, 'buildagent.keychain'), + keychain: path_1.default.join(tempDir, 'buildagent.keychain'), version: getElectronVersion(), identity, 'gatekeeper-assess': false @@ -58,21 +61,21 @@ async function main(buildDir) { }; const gpuHelperOpts = { ...defaultOpts, - app: path.join(appFrameworkPath, gpuHelperAppName), - entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), - 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), + app: path_1.default.join(appFrameworkPath, gpuHelperAppName), + entitlements: path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), + 'entitlements-inherit': path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), }; const rendererHelperOpts = { ...defaultOpts, - app: path.join(appFrameworkPath, rendererHelperAppName), - entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), - 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), + app: path_1.default.join(appFrameworkPath, rendererHelperAppName), + entitlements: path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), + 'entitlements-inherit': path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), }; const pluginHelperOpts = { ...defaultOpts, - app: path.join(appFrameworkPath, pluginHelperAppName), - entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'), - 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'), + app: path_1.default.join(appFrameworkPath, pluginHelperAppName), + entitlements: path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'), + 'entitlements-inherit': path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'), }; // Only overwrite plist entries for x64 and arm64 builds, // universal will get its copy from the x64 build. @@ -99,10 +102,10 @@ async function main(buildDir) { `${infoPlistPath}` ]); } - await codesign.signAsync(gpuHelperOpts); - await codesign.signAsync(rendererHelperOpts); - await codesign.signAsync(pluginHelperOpts); - await codesign.signAsync(appOpts); + await electron_osx_sign_1.default.signAsync(gpuHelperOpts); + await electron_osx_sign_1.default.signAsync(rendererHelperOpts); + await electron_osx_sign_1.default.signAsync(pluginHelperOpts); + await electron_osx_sign_1.default.signAsync(appOpts); } if (require.main === module) { main(process.argv[2]).catch(err => { diff --git a/build/darwin/sign.ts b/build/darwin/sign.ts index 5b3413b79e1..ecf162743ef 100644 --- a/build/darwin/sign.ts +++ b/build/darwin/sign.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as codesign from 'electron-osx-sign'; +import fs from 'fs'; +import path from 'path'; +import codesign from 'electron-osx-sign'; import { spawn } from '@malept/cross-spawn-promise'; const root = path.dirname(path.dirname(__dirname)); diff --git a/build/darwin/verify-macho.js b/build/darwin/verify-macho.js index 947184324e2..2df99a35142 100644 --- a/build/darwin/verify-macho.js +++ b/build/darwin/verify-macho.js @@ -3,9 +3,45 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); +const assert_1 = __importDefault(require("assert")); +const path = __importStar(require("path")); const promises_1 = require("fs/promises"); const cross_spawn_promise_1 = require("@malept/cross-spawn-promise"); const MACHO_PREFIX = 'Mach-O '; @@ -78,7 +114,7 @@ async function checkMachOFiles(appPath, arch) { } else if (header_magic === MACHO_UNIVERSAL_MAGIC_LE) { const num_binaries = header.readUInt32BE(4); - assert.equal(num_binaries, 2); + assert_1.default.equal(num_binaries, 2); const file_entries_size = file_header_entry_size * num_binaries; const file_entries = Buffer.alloc(file_entries_size); read(p, file_entries, 0, file_entries_size, 8).then(_ => { @@ -103,8 +139,8 @@ async function checkMachOFiles(appPath, arch) { return invalidFiles; } const archToCheck = process.argv[2]; -assert(process.env['APP_PATH'], 'APP_PATH not set'); -assert(archToCheck === 'x64' || archToCheck === 'arm64' || archToCheck === 'universal', `Invalid architecture ${archToCheck} to check`); +(0, assert_1.default)(process.env['APP_PATH'], 'APP_PATH not set'); +(0, assert_1.default)(archToCheck === 'x64' || archToCheck === 'arm64' || archToCheck === 'universal', `Invalid architecture ${archToCheck} to check`); checkMachOFiles(process.env['APP_PATH'], archToCheck).then(invalidFiles => { if (invalidFiles.length > 0) { console.error('\x1b[31mThe following files are built for the wrong architecture:\x1b[0m'); diff --git a/build/darwin/verify-macho.ts b/build/darwin/verify-macho.ts index f418c44a230..9e2f4b518f2 100644 --- a/build/darwin/verify-macho.ts +++ b/build/darwin/verify-macho.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as assert from 'assert'; +import assert from 'assert'; import * as path from 'path'; import { open, stat, readdir, realpath } from 'fs/promises'; import { spawn, ExitCodeError } from '@malept/cross-spawn-promise'; diff --git a/build/lib/asar.js b/build/lib/asar.js index 19285ef7100..20c982a6621 100644 --- a/build/lib/asar.js +++ b/build/lib/asar.js @@ -3,18 +3,21 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.createAsar = createAsar; -const path = require("path"); -const es = require("event-stream"); +const path_1 = __importDefault(require("path")); +const event_stream_1 = __importDefault(require("event-stream")); const pickle = require('chromium-pickle-js'); const Filesystem = require('asar/lib/filesystem'); -const VinylFile = require("vinyl"); -const minimatch = require("minimatch"); +const vinyl_1 = __importDefault(require("vinyl")); +const minimatch_1 = __importDefault(require("minimatch")); function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFilename) { const shouldUnpackFile = (file) => { for (let i = 0; i < unpackGlobs.length; i++) { - if (minimatch(file.relative, unpackGlobs[i])) { + if ((0, minimatch_1.default)(file.relative, unpackGlobs[i])) { return true; } } @@ -22,7 +25,7 @@ function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFile }; const shouldSkipFile = (file) => { for (const skipGlob of skipGlobs) { - if (minimatch(file.relative, skipGlob)) { + if ((0, minimatch_1.default)(file.relative, skipGlob)) { return true; } } @@ -32,7 +35,7 @@ function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFile // node_modules.asar and node_modules const shouldDuplicateFile = (file) => { for (const duplicateGlob of duplicateGlobs) { - if (minimatch(file.relative, duplicateGlob)) { + if ((0, minimatch_1.default)(file.relative, duplicateGlob)) { return true; } } @@ -75,7 +78,7 @@ function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFile // Create a closure capturing `onFileInserted`. filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted()); }; - return es.through(function (file) { + return event_stream_1.default.through(function (file) { if (file.stat.isDirectory()) { return; } @@ -83,7 +86,7 @@ function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFile throw new Error(`unknown item in stream!`); } if (shouldSkipFile(file)) { - this.queue(new VinylFile({ + this.queue(new vinyl_1.default({ base: '.', path: file.path, stat: file.stat, @@ -92,7 +95,7 @@ function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFile return; } if (shouldDuplicateFile(file)) { - this.queue(new VinylFile({ + this.queue(new vinyl_1.default({ base: '.', path: file.path, stat: file.stat, @@ -103,10 +106,10 @@ function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFile insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack); if (shouldUnpack) { // The file goes outside of xx.asar, in a folder xx.asar.unpacked - const relative = path.relative(folderPath, file.path); - this.queue(new VinylFile({ + const relative = path_1.default.relative(folderPath, file.path); + this.queue(new vinyl_1.default({ base: '.', - path: path.join(destFilename + '.unpacked', relative), + path: path_1.default.join(destFilename + '.unpacked', relative), stat: file.stat, contents: file.contents })); @@ -129,7 +132,7 @@ function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFile } const contents = Buffer.concat(out); out.length = 0; - this.queue(new VinylFile({ + this.queue(new vinyl_1.default({ base: '.', path: destFilename, contents: contents diff --git a/build/lib/asar.ts b/build/lib/asar.ts index 0b225ab1624..5f2df925bde 100644 --- a/build/lib/asar.ts +++ b/build/lib/asar.ts @@ -3,12 +3,12 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as es from 'event-stream'; +import path from 'path'; +import es from 'event-stream'; const pickle = require('chromium-pickle-js'); const Filesystem = require('asar/lib/filesystem'); -import * as VinylFile from 'vinyl'; -import * as minimatch from 'minimatch'; +import VinylFile from 'vinyl'; +import minimatch from 'minimatch'; declare class AsarFilesystem { readonly header: unknown; diff --git a/build/lib/builtInExtensions.js b/build/lib/builtInExtensions.js index ac784c03506..400ca6885a8 100644 --- a/build/lib/builtInExtensions.js +++ b/build/lib/builtInExtensions.js @@ -3,39 +3,75 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.getExtensionStream = getExtensionStream; exports.getBuiltInExtensions = getBuiltInExtensions; -const fs = require("fs"); -const path = require("path"); -const os = require("os"); -const rimraf = require("rimraf"); -const es = require("event-stream"); -const rename = require("gulp-rename"); -const vfs = require("vinyl-fs"); -const ext = require("./extensions"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); -const root = path.dirname(path.dirname(__dirname)); -const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const os_1 = __importDefault(require("os")); +const rimraf_1 = __importDefault(require("rimraf")); +const event_stream_1 = __importDefault(require("event-stream")); +const gulp_rename_1 = __importDefault(require("gulp-rename")); +const vinyl_fs_1 = __importDefault(require("vinyl-fs")); +const ext = __importStar(require("./extensions")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); +const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../product.json'), 'utf8')); const builtInExtensions = productjson.builtInExtensions || []; const webBuiltInExtensions = productjson.webBuiltInExtensions || []; -const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json'); +const controlFilePath = path_1.default.join(os_1.default.homedir(), '.vscode-oss-dev', 'extensions', 'control.json'); const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE']; function log(...messages) { if (ENABLE_LOGGING) { - fancyLog(...messages); + (0, fancy_log_1.default)(...messages); } } function getExtensionPath(extension) { - return path.join(root, '.build', 'builtInExtensions', extension.name); + return path_1.default.join(root, '.build', 'builtInExtensions', extension.name); } function isUpToDate(extension) { - const packagePath = path.join(getExtensionPath(extension), 'package.json'); - if (!fs.existsSync(packagePath)) { + const packagePath = path_1.default.join(getExtensionPath(extension), 'package.json'); + if (!fs_1.default.existsSync(packagePath)) { return false; } - const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' }); + const packageContents = fs_1.default.readFileSync(packagePath, { encoding: 'utf8' }); try { const diskVersion = JSON.parse(packageContents).version; return (diskVersion === extension.version); @@ -47,71 +83,71 @@ function isUpToDate(extension) { function getExtensionDownloadStream(extension) { const galleryServiceUrl = productjson.extensionsGallery?.serviceUrl; return (galleryServiceUrl ? ext.fromMarketplace(galleryServiceUrl, extension) : ext.fromGithub(extension)) - .pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`)); + .pipe((0, gulp_rename_1.default)(p => p.dirname = `${extension.name}/${p.dirname}`)); } function getExtensionStream(extension) { // if the extension exists on disk, use those files instead of downloading anew if (isUpToDate(extension)) { - log('[extensions]', `${extension.name}@${extension.version} up to date`, ansiColors.green('✔︎')); - return vfs.src(['**'], { cwd: getExtensionPath(extension), dot: true }) - .pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`)); + log('[extensions]', `${extension.name}@${extension.version} up to date`, ansi_colors_1.default.green('✔︎')); + return vinyl_fs_1.default.src(['**'], { cwd: getExtensionPath(extension), dot: true }) + .pipe((0, gulp_rename_1.default)(p => p.dirname = `${extension.name}/${p.dirname}`)); } return getExtensionDownloadStream(extension); } function syncMarketplaceExtension(extension) { const galleryServiceUrl = productjson.extensionsGallery?.serviceUrl; - const source = ansiColors.blue(galleryServiceUrl ? '[marketplace]' : '[github]'); + const source = ansi_colors_1.default.blue(galleryServiceUrl ? '[marketplace]' : '[github]'); if (isUpToDate(extension)) { - log(source, `${extension.name}@${extension.version}`, ansiColors.green('✔︎')); - return es.readArray([]); + log(source, `${extension.name}@${extension.version}`, ansi_colors_1.default.green('✔︎')); + return event_stream_1.default.readArray([]); } - rimraf.sync(getExtensionPath(extension)); + rimraf_1.default.sync(getExtensionPath(extension)); return getExtensionDownloadStream(extension) - .pipe(vfs.dest('.build/builtInExtensions')) - .on('end', () => log(source, extension.name, ansiColors.green('✔︎'))); + .pipe(vinyl_fs_1.default.dest('.build/builtInExtensions')) + .on('end', () => log(source, extension.name, ansi_colors_1.default.green('✔︎'))); } function syncExtension(extension, controlState) { if (extension.platforms) { const platforms = new Set(extension.platforms); if (!platforms.has(process.platform)) { - log(ansiColors.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansiColors.green('✔︎')); - return es.readArray([]); + log(ansi_colors_1.default.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansi_colors_1.default.green('✔︎')); + return event_stream_1.default.readArray([]); } } switch (controlState) { case 'disabled': - log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name)); - return es.readArray([]); + log(ansi_colors_1.default.blue('[disabled]'), ansi_colors_1.default.gray(extension.name)); + return event_stream_1.default.readArray([]); case 'marketplace': return syncMarketplaceExtension(extension); default: - if (!fs.existsSync(controlState)) { - log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`)); - return es.readArray([]); + if (!fs_1.default.existsSync(controlState)) { + log(ansi_colors_1.default.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`)); + return event_stream_1.default.readArray([]); } - else if (!fs.existsSync(path.join(controlState, 'package.json'))) { - log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`)); - return es.readArray([]); + else if (!fs_1.default.existsSync(path_1.default.join(controlState, 'package.json'))) { + log(ansi_colors_1.default.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`)); + return event_stream_1.default.readArray([]); } - log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎')); - return es.readArray([]); + log(ansi_colors_1.default.blue('[local]'), `${extension.name}: ${ansi_colors_1.default.cyan(controlState)}`, ansi_colors_1.default.green('✔︎')); + return event_stream_1.default.readArray([]); } } function readControlFile() { try { - return JSON.parse(fs.readFileSync(controlFilePath, 'utf8')); + return JSON.parse(fs_1.default.readFileSync(controlFilePath, 'utf8')); } catch (err) { return {}; } } function writeControlFile(control) { - fs.mkdirSync(path.dirname(controlFilePath), { recursive: true }); - fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2)); + fs_1.default.mkdirSync(path_1.default.dirname(controlFilePath), { recursive: true }); + fs_1.default.writeFileSync(controlFilePath, JSON.stringify(control, null, 2)); } function getBuiltInExtensions() { log('Synchronizing built-in extensions...'); - log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`); + log(`You can manage built-in extensions with the ${ansi_colors_1.default.cyan('--builtin')} flag`); const control = readControlFile(); const streams = []; for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) { @@ -121,7 +157,7 @@ function getBuiltInExtensions() { } writeControlFile(control); return new Promise((resolve, reject) => { - es.merge(streams) + event_stream_1.default.merge(streams) .on('error', reject) .on('end', resolve); }); diff --git a/build/lib/builtInExtensions.ts b/build/lib/builtInExtensions.ts index 8b831d42d44..9b1ec7356ef 100644 --- a/build/lib/builtInExtensions.ts +++ b/build/lib/builtInExtensions.ts @@ -3,16 +3,16 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as os from 'os'; -import * as rimraf from 'rimraf'; -import * as es from 'event-stream'; -import * as rename from 'gulp-rename'; -import * as vfs from 'vinyl-fs'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; +import rimraf from 'rimraf'; +import es from 'event-stream'; +import rename from 'gulp-rename'; +import vfs from 'vinyl-fs'; import * as ext from './extensions'; -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; import { Stream } from 'stream'; export interface IExtensionDefinition { diff --git a/build/lib/builtInExtensionsCG.js b/build/lib/builtInExtensionsCG.js index 6a1e5ea539e..3dc0ae27f0a 100644 --- a/build/lib/builtInExtensionsCG.js +++ b/build/lib/builtInExtensionsCG.js @@ -3,14 +3,17 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("fs"); -const path = require("path"); -const url = require("url"); -const ansiColors = require("ansi-colors"); -const root = path.dirname(path.dirname(__dirname)); -const rootCG = path.join(root, 'extensionsCG'); -const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const url_1 = __importDefault(require("url")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); +const rootCG = path_1.default.join(root, 'extensionsCG'); +const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../product.json'), 'utf8')); const builtInExtensions = productjson.builtInExtensions || []; const webBuiltInExtensions = productjson.webBuiltInExtensions || []; const token = process.env['GITHUB_TOKEN']; @@ -18,7 +21,7 @@ const contentBasePath = 'raw.githubusercontent.com'; const contentFileNames = ['package.json', 'package-lock.json']; async function downloadExtensionDetails(extension) { const extensionLabel = `${extension.name}@${extension.version}`; - const repository = url.parse(extension.repo).path.substr(1); + const repository = url_1.default.parse(extension.repo).path.substr(1); const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`; async function getContent(fileName) { try { @@ -42,16 +45,16 @@ async function downloadExtensionDetails(extension) { const results = await Promise.all(promises); for (const result of results) { if (result.body) { - const extensionFolder = path.join(rootCG, extension.name); - fs.mkdirSync(extensionFolder, { recursive: true }); - fs.writeFileSync(path.join(extensionFolder, result.fileName), result.body); - console.log(` - ${result.fileName} ${ansiColors.green('✔︎')}`); + const extensionFolder = path_1.default.join(rootCG, extension.name); + fs_1.default.mkdirSync(extensionFolder, { recursive: true }); + fs_1.default.writeFileSync(path_1.default.join(extensionFolder, result.fileName), result.body); + console.log(` - ${result.fileName} ${ansi_colors_1.default.green('✔︎')}`); } else if (result.body === undefined) { - console.log(` - ${result.fileName} ${ansiColors.yellow('⚠️')}`); + console.log(` - ${result.fileName} ${ansi_colors_1.default.yellow('⚠️')}`); } else { - console.log(` - ${result.fileName} ${ansiColors.red('🛑')}`); + console.log(` - ${result.fileName} ${ansi_colors_1.default.red('🛑')}`); } } // Validation @@ -68,10 +71,10 @@ async function main() { } } main().then(() => { - console.log(`Built-in extensions component data downloaded ${ansiColors.green('✔︎')}`); + console.log(`Built-in extensions component data downloaded ${ansi_colors_1.default.green('✔︎')}`); process.exit(0); }, err => { - console.log(`Built-in extensions component data could not be downloaded ${ansiColors.red('🛑')}`); + console.log(`Built-in extensions component data could not be downloaded ${ansi_colors_1.default.red('🛑')}`); console.error(err); process.exit(1); }); diff --git a/build/lib/builtInExtensionsCG.ts b/build/lib/builtInExtensionsCG.ts index 9d11dea3dca..4628b365a2e 100644 --- a/build/lib/builtInExtensionsCG.ts +++ b/build/lib/builtInExtensionsCG.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as url from 'url'; -import ansiColors = require('ansi-colors'); +import fs from 'fs'; +import path from 'path'; +import url from 'url'; +import ansiColors from 'ansi-colors'; import { IExtensionDefinition } from './builtInExtensions'; const root = path.dirname(path.dirname(__dirname)); diff --git a/build/lib/bundle.js b/build/lib/bundle.js index 7f7e55963ac..f1490f4ad4b 100644 --- a/build/lib/bundle.js +++ b/build/lib/bundle.js @@ -3,12 +3,15 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.bundle = bundle; exports.removeAllTSBoilerplate = removeAllTSBoilerplate; -const fs = require("fs"); -const path = require("path"); -const vm = require("vm"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const vm_1 = __importDefault(require("vm")); /** * Bundle `entryPoints` given config `config`. */ @@ -30,8 +33,8 @@ function bundle(entryPoints, config, callback) { allMentionedModulesMap[excludedModule] = true; }); }); - const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js')); - const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});'); + const code = require('fs').readFileSync(path_1.default.join(__dirname, '../../src/vs/loader.js')); + const r = vm_1.default.runInThisContext('(function(require, module, exports) { ' + code + '\n});'); const loaderModule = { exports: {} }; r.call({}, require, loaderModule, loaderModule.exports); const loader = loaderModule.exports; @@ -149,7 +152,7 @@ function extractStrings(destFiles) { _path = pieces[0]; } if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) { - const res = path.join(path.dirname(module), _path).replace(/\\/g, '/'); + const res = path_1.default.join(path_1.default.dirname(module), _path).replace(/\\/g, '/'); return prefix + res; } return prefix + _path; @@ -359,7 +362,7 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend, } function readFileAndRemoveBOM(path) { const BOM_CHAR_CODE = 65279; - let contents = fs.readFileSync(path, 'utf8'); + let contents = fs_1.default.readFileSync(path, 'utf8'); // Remove BOM if (contents.charCodeAt(0) === BOM_CHAR_CODE) { contents = contents.substring(1); diff --git a/build/lib/bundle.ts b/build/lib/bundle.ts index 47a686d0047..68182e6b85d 100644 --- a/build/lib/bundle.ts +++ b/build/lib/bundle.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as vm from 'vm'; +import fs from 'fs'; +import path from 'path'; +import vm from 'vm'; interface IPosition { line: number; diff --git a/build/lib/compilation.js b/build/lib/compilation.js index 7b9d73facbb..841dbe13ecf 100644 --- a/build/lib/compilation.js +++ b/build/lib/compilation.js @@ -3,24 +3,60 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.watchApiProposalNamesTask = exports.compileApiProposalNamesTask = void 0; exports.transpileTask = transpileTask; exports.compileTask = compileTask; exports.watchTask = watchTask; -const es = require("event-stream"); -const fs = require("fs"); -const gulp = require("gulp"); -const path = require("path"); -const monacodts = require("./monaco-api"); -const nls = require("./nls"); +const event_stream_1 = __importDefault(require("event-stream")); +const fs_1 = __importDefault(require("fs")); +const gulp_1 = __importDefault(require("gulp")); +const path_1 = __importDefault(require("path")); +const monacodts = __importStar(require("./monaco-api")); +const nls = __importStar(require("./nls")); const reporter_1 = require("./reporter"); -const util = require("./util"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); -const os = require("os"); -const File = require("vinyl"); -const task = require("./task"); +const util = __importStar(require("./util")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const os_1 = __importDefault(require("os")); +const vinyl_1 = __importDefault(require("vinyl")); +const task = __importStar(require("./task")); const index_1 = require("./mangle/index"); const postcss_1 = require("./postcss"); const ts = require("typescript"); @@ -28,7 +64,7 @@ const watch = require('./watch'); // --- gulp-tsb: compile and transpile -------------------------------- const reporter = (0, reporter_1.createReporter)(); function getTypeScriptCompilerOptions(src) { - const rootDir = path.join(__dirname, `../../${src}`); + const rootDir = path_1.default.join(__dirname, `../../${src}`); const options = {}; options.verbose = false; options.sourceMap = true; @@ -38,13 +74,13 @@ function getTypeScriptCompilerOptions(src) { options.rootDir = rootDir; options.baseUrl = rootDir; options.sourceRoot = util.toFileUri(rootDir); - options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1; + options.newLine = /\r\n/.test(fs_1.default.readFileSync(__filename, 'utf8')) ? 0 : 1; return options; } function createCompile(src, { build, emitError, transpileOnly, preserveEnglish }) { const tsb = require('./tsb'); const sourcemaps = require('gulp-sourcemaps'); - const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json'); + const projectPath = path_1.default.join(__dirname, '../../', src, 'tsconfig.json'); const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) }; if (!build) { overrideOptions.inlineSourceMap = true; @@ -62,7 +98,7 @@ function createCompile(src, { build, emitError, transpileOnly, preserveEnglish } const isCSS = (f) => f.path.endsWith('.css') && !f.path.includes('fixtures'); const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path))); const postcssNesting = require('postcss-nesting'); - const input = es.through(); + const input = event_stream_1.default.through(); const output = input .pipe(util.$if(isUtf8Test, bom())) // this is required to preserve BOM in test files that loose it otherwise .pipe(util.$if(!build && isRuntimeJs, util.appendOwnPathSourceURL())) @@ -80,7 +116,7 @@ function createCompile(src, { build, emitError, transpileOnly, preserveEnglish } }))) .pipe(tsFilter.restore) .pipe(reporter.end(!!emitError)); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } pipeline.tsProjectSrc = () => { return compilation.src({ base: src }); @@ -91,31 +127,31 @@ function createCompile(src, { build, emitError, transpileOnly, preserveEnglish } function transpileTask(src, out, esbuild) { const task = () => { const transpile = createCompile(src, { build: false, emitError: true, transpileOnly: { esbuild }, preserveEnglish: false }); - const srcPipe = gulp.src(`${src}/**`, { base: `${src}` }); + const srcPipe = gulp_1.default.src(`${src}/**`, { base: `${src}` }); return srcPipe .pipe(transpile()) - .pipe(gulp.dest(out)); + .pipe(gulp_1.default.dest(out)); }; - task.taskName = `transpile-${path.basename(src)}`; + task.taskName = `transpile-${path_1.default.basename(src)}`; return task; } function compileTask(src, out, build, options = {}) { const task = () => { - if (os.totalmem() < 4_000_000_000) { + if (os_1.default.totalmem() < 4_000_000_000) { throw new Error('compilation requires 4GB of RAM'); } const compile = createCompile(src, { build, emitError: true, transpileOnly: false, preserveEnglish: !!options.preserveEnglish }); - const srcPipe = gulp.src(`${src}/**`, { base: `${src}` }); + const srcPipe = gulp_1.default.src(`${src}/**`, { base: `${src}` }); const generator = new MonacoGenerator(false); if (src === 'src') { generator.execute(); } // mangle: TypeScript to TypeScript - let mangleStream = es.through(); + let mangleStream = event_stream_1.default.through(); if (build && !options.disableMangle) { - let ts2tsMangler = new index_1.Mangler(compile.projectPath, (...data) => fancyLog(ansiColors.blue('[mangler]'), ...data), { mangleExports: true, manglePrivateFields: true }); + let ts2tsMangler = new index_1.Mangler(compile.projectPath, (...data) => (0, fancy_log_1.default)(ansi_colors_1.default.blue('[mangler]'), ...data), { mangleExports: true, manglePrivateFields: true }); const newContentsByFileName = ts2tsMangler.computeNewFileContents(new Set(['saveState'])); - mangleStream = es.through(async function write(data) { + mangleStream = event_stream_1.default.through(async function write(data) { const tsNormalPath = ts.normalizePath(data.path); const newContents = (await newContentsByFileName).get(tsNormalPath); if (newContents !== undefined) { @@ -134,27 +170,27 @@ function compileTask(src, out, build, options = {}) { .pipe(mangleStream) .pipe(generator.stream) .pipe(compile()) - .pipe(gulp.dest(out)); + .pipe(gulp_1.default.dest(out)); }; - task.taskName = `compile-${path.basename(src)}`; + task.taskName = `compile-${path_1.default.basename(src)}`; return task; } function watchTask(out, build, srcPath = 'src') { const task = () => { const compile = createCompile(srcPath, { build, emitError: false, transpileOnly: false, preserveEnglish: false }); - const src = gulp.src(`${srcPath}/**`, { base: srcPath }); + const src = gulp_1.default.src(`${srcPath}/**`, { base: srcPath }); const watchSrc = watch(`${srcPath}/**`, { base: srcPath, readDelay: 200 }); const generator = new MonacoGenerator(true); generator.execute(); return watchSrc .pipe(generator.stream) .pipe(util.incremental(compile, src, true)) - .pipe(gulp.dest(out)); + .pipe(gulp_1.default.dest(out)); }; - task.taskName = `watch-${path.basename(out)}`; + task.taskName = `watch-${path_1.default.basename(out)}`; return task; } -const REPO_SRC_FOLDER = path.join(__dirname, '../../src'); +const REPO_SRC_FOLDER = path_1.default.join(__dirname, '../../src'); class MonacoGenerator { _isWatch; stream; @@ -163,7 +199,7 @@ class MonacoGenerator { _declarationResolver; constructor(isWatch) { this._isWatch = isWatch; - this.stream = es.through(); + this.stream = event_stream_1.default.through(); this._watchedFiles = {}; const onWillReadFile = (moduleId, filePath) => { if (!this._isWatch) { @@ -173,7 +209,7 @@ class MonacoGenerator { return; } this._watchedFiles[filePath] = true; - fs.watchFile(filePath, () => { + fs_1.default.watchFile(filePath, () => { this._declarationResolver.invalidateCache(moduleId); this._executeSoon(); }); @@ -186,7 +222,7 @@ class MonacoGenerator { }; this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider); if (this._isWatch) { - fs.watchFile(monacodts.RECIPE_PATH, () => { + fs_1.default.watchFile(monacodts.RECIPE_PATH, () => { this._executeSoon(); }); } @@ -211,7 +247,7 @@ class MonacoGenerator { return r; } _log(message, ...rest) { - fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest); + (0, fancy_log_1.default)(ansi_colors_1.default.cyan('[monaco.d.ts]'), message, ...rest); } execute() { const startTime = Date.now(); @@ -223,8 +259,8 @@ class MonacoGenerator { if (result.isTheSame) { return; } - fs.writeFileSync(result.filePath, result.content); - fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums); + fs_1.default.writeFileSync(result.filePath, result.content); + fs_1.default.writeFileSync(path_1.default.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums); this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`); if (!this._isWatch) { this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.'); @@ -234,21 +270,21 @@ class MonacoGenerator { function generateApiProposalNames() { let eol; try { - const src = fs.readFileSync('src/vs/platform/extensions/common/extensionsApiProposals.ts', 'utf-8'); + const src = fs_1.default.readFileSync('src/vs/platform/extensions/common/extensionsApiProposals.ts', 'utf-8'); const match = /\r?\n/m.exec(src); - eol = match ? match[0] : os.EOL; + eol = match ? match[0] : os_1.default.EOL; } catch { - eol = os.EOL; + eol = os_1.default.EOL; } const pattern = /vscode\.proposed\.([a-zA-Z\d]+)\.d\.ts$/; const versionPattern = /^\s*\/\/\s*version\s*:\s*(\d+)\s*$/mi; const proposals = new Map(); - const input = es.through(); + const input = event_stream_1.default.through(); const output = input .pipe(util.filter((f) => pattern.test(f.path))) - .pipe(es.through((f) => { - const name = path.basename(f.path); + .pipe(event_stream_1.default.through((f) => { + const name = path_1.default.basename(f.path); const match = pattern.exec(name); if (!match) { return; @@ -281,27 +317,27 @@ function generateApiProposalNames() { 'export type ApiProposalName = keyof typeof _allApiProposals;', '', ].join(eol); - this.emit('data', new File({ + this.emit('data', new vinyl_1.default({ path: 'vs/platform/extensions/common/extensionsApiProposals.ts', contents: Buffer.from(contents) })); this.emit('end'); })); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } const apiProposalNamesReporter = (0, reporter_1.createReporter)('api-proposal-names'); exports.compileApiProposalNamesTask = task.define('compile-api-proposal-names', () => { - return gulp.src('src/vscode-dts/**') + return gulp_1.default.src('src/vscode-dts/**') .pipe(generateApiProposalNames()) - .pipe(gulp.dest('src')) + .pipe(gulp_1.default.dest('src')) .pipe(apiProposalNamesReporter.end(true)); }); exports.watchApiProposalNamesTask = task.define('watch-api-proposal-names', () => { - const task = () => gulp.src('src/vscode-dts/**') + const task = () => gulp_1.default.src('src/vscode-dts/**') .pipe(generateApiProposalNames()) .pipe(apiProposalNamesReporter.end(true)); return watch('src/vscode-dts/**', { readDelay: 200 }) .pipe(util.debounce(task)) - .pipe(gulp.dest('src')); + .pipe(gulp_1.default.dest('src')); }); //# sourceMappingURL=compilation.js.map \ No newline at end of file diff --git a/build/lib/compilation.ts b/build/lib/compilation.ts index 124bcc17c17..6e1fcab5186 100644 --- a/build/lib/compilation.ts +++ b/build/lib/compilation.ts @@ -3,18 +3,18 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as es from 'event-stream'; -import * as fs from 'fs'; -import * as gulp from 'gulp'; -import * as path from 'path'; +import es from 'event-stream'; +import fs from 'fs'; +import gulp from 'gulp'; +import path from 'path'; import * as monacodts from './monaco-api'; import * as nls from './nls'; import { createReporter } from './reporter'; import * as util from './util'; -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; -import * as os from 'os'; -import * as File from 'vinyl'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; +import os from 'os'; +import File from 'vinyl'; import * as task from './task'; import { Mangler } from './mangle/index'; import { RawSourceMap } from 'source-map'; diff --git a/build/lib/date.js b/build/lib/date.js index 77fff0e5073..1ed884fb7ee 100644 --- a/build/lib/date.js +++ b/build/lib/date.js @@ -3,12 +3,15 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.writeISODate = writeISODate; exports.readISODate = readISODate; -const path = require("path"); -const fs = require("fs"); -const root = path.join(__dirname, '..', '..'); +const path_1 = __importDefault(require("path")); +const fs_1 = __importDefault(require("fs")); +const root = path_1.default.join(__dirname, '..', '..'); /** * Writes a `outDir/date` file with the contents of the build * so that other tasks during the build process can use it and @@ -16,17 +19,17 @@ const root = path.join(__dirname, '..', '..'); */ function writeISODate(outDir) { const result = () => new Promise((resolve, _) => { - const outDirectory = path.join(root, outDir); - fs.mkdirSync(outDirectory, { recursive: true }); + const outDirectory = path_1.default.join(root, outDir); + fs_1.default.mkdirSync(outDirectory, { recursive: true }); const date = new Date().toISOString(); - fs.writeFileSync(path.join(outDirectory, 'date'), date, 'utf8'); + fs_1.default.writeFileSync(path_1.default.join(outDirectory, 'date'), date, 'utf8'); resolve(); }); result.taskName = 'build-date-file'; return result; } function readISODate(outDir) { - const outDirectory = path.join(root, outDir); - return fs.readFileSync(path.join(outDirectory, 'date'), 'utf8'); + const outDirectory = path_1.default.join(root, outDir); + return fs_1.default.readFileSync(path_1.default.join(outDirectory, 'date'), 'utf8'); } //# sourceMappingURL=date.js.map \ No newline at end of file diff --git a/build/lib/date.ts b/build/lib/date.ts index 998e89f8e6a..8a933178952 100644 --- a/build/lib/date.ts +++ b/build/lib/date.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as fs from 'fs'; +import path from 'path'; +import fs from 'fs'; const root = path.join(__dirname, '..', '..'); diff --git a/build/lib/dependencies.js b/build/lib/dependencies.js index 9bcd1204eab..04a09f98708 100644 --- a/build/lib/dependencies.js +++ b/build/lib/dependencies.js @@ -3,16 +3,19 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.getProductionDependencies = getProductionDependencies; -const fs = require("fs"); -const path = require("path"); -const cp = require("child_process"); -const root = fs.realpathSync(path.dirname(path.dirname(__dirname))); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const child_process_1 = __importDefault(require("child_process")); +const root = fs_1.default.realpathSync(path_1.default.dirname(path_1.default.dirname(__dirname))); function getNpmProductionDependencies(folder) { let raw; try { - raw = cp.execSync('npm ls --all --omit=dev --parseable', { cwd: folder, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, null] }); + raw = child_process_1.default.execSync('npm ls --all --omit=dev --parseable', { cwd: folder, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, null] }); } catch (err) { const regex = /^npm ERR! .*$/gm; @@ -34,16 +37,16 @@ function getNpmProductionDependencies(folder) { raw = err.stdout; } return raw.split(/\r?\n/).filter(line => { - return !!line.trim() && path.relative(root, line) !== path.relative(root, folder); + return !!line.trim() && path_1.default.relative(root, line) !== path_1.default.relative(root, folder); }); } function getProductionDependencies(folderPath) { const result = getNpmProductionDependencies(folderPath); // Account for distro npm dependencies - const realFolderPath = fs.realpathSync(folderPath); - const relativeFolderPath = path.relative(root, realFolderPath); + const realFolderPath = fs_1.default.realpathSync(folderPath); + const relativeFolderPath = path_1.default.relative(root, realFolderPath); const distroFolderPath = `${root}/.build/distro/npm/${relativeFolderPath}`; - if (fs.existsSync(distroFolderPath)) { + if (fs_1.default.existsSync(distroFolderPath)) { result.push(...getNpmProductionDependencies(distroFolderPath)); } return [...new Set(result)]; diff --git a/build/lib/dependencies.ts b/build/lib/dependencies.ts index 45368ffd26d..a5bc70088a7 100644 --- a/build/lib/dependencies.ts +++ b/build/lib/dependencies.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as cp from 'child_process'; +import fs from 'fs'; +import path from 'path'; +import cp from 'child_process'; const root = fs.realpathSync(path.dirname(path.dirname(__dirname))); function getNpmProductionDependencies(folder: string): string[] { diff --git a/build/lib/electron.js b/build/lib/electron.js index 99252e4e64a..f0eb583f2cb 100644 --- a/build/lib/electron.js +++ b/build/lib/electron.js @@ -3,19 +3,55 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.config = void 0; -const fs = require("fs"); -const path = require("path"); -const vfs = require("vinyl-fs"); -const filter = require("gulp-filter"); -const util = require("./util"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const vinyl_fs_1 = __importDefault(require("vinyl-fs")); +const gulp_filter_1 = __importDefault(require("gulp-filter")); +const util = __importStar(require("./util")); const getVersion_1 = require("./getVersion"); function isDocumentSuffix(str) { return str === 'document' || str === 'script' || str === 'file' || str === 'source code'; } -const root = path.dirname(path.dirname(__dirname)); -const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); +const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8')); const commit = (0, getVersion_1.getVersion)(root); function createTemplate(input) { return (params) => { @@ -24,7 +60,7 @@ function createTemplate(input) { }); }; } -const darwinCreditsTemplate = product.darwinCredits && createTemplate(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8')); +const darwinCreditsTemplate = product.darwinCredits && createTemplate(fs_1.default.readFileSync(path_1.default.join(root, product.darwinCredits), 'utf8')); /** * Generate a `DarwinDocumentType` given a list of file extensions, an icon name, and an optional suffix or file type name. * @param extensions A list of file extensions, such as `['bat', 'cmd']` @@ -183,7 +219,7 @@ exports.config = { token: process.env['GITHUB_TOKEN'], repo: product.electronRepository || undefined, validateChecksum: true, - checksumFile: path.join(root, 'build', 'checksums', 'electron.txt'), + checksumFile: path_1.default.join(root, 'build', 'checksums', 'electron.txt'), }; function getElectron(arch) { return () => { @@ -196,18 +232,18 @@ function getElectron(arch) { ffmpegChromium: false, keepDefaultApp: true }; - return vfs.src('package.json') + return vinyl_fs_1.default.src('package.json') .pipe(json({ name: product.nameShort })) .pipe(electron(electronOpts)) - .pipe(filter(['**', '!**/app/package.json'])) - .pipe(vfs.dest('.build/electron')); + .pipe((0, gulp_filter_1.default)(['**', '!**/app/package.json'])) + .pipe(vinyl_fs_1.default.dest('.build/electron')); }; } async function main(arch = process.arch) { const version = electronVersion; - const electronPath = path.join(root, '.build', 'electron'); - const versionFile = path.join(electronPath, 'version'); - const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`; + const electronPath = path_1.default.join(root, '.build', 'electron'); + const versionFile = path_1.default.join(electronPath, 'version'); + const isUpToDate = fs_1.default.existsSync(versionFile) && fs_1.default.readFileSync(versionFile, 'utf8') === `${version}`; if (!isUpToDate) { await util.rimraf(electronPath)(); await util.streamToPromise(getElectron(arch)()); diff --git a/build/lib/electron.ts b/build/lib/electron.ts index da2387f68f6..57b27022df8 100644 --- a/build/lib/electron.ts +++ b/build/lib/electron.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as vfs from 'vinyl-fs'; -import * as filter from 'gulp-filter'; +import fs from 'fs'; +import path from 'path'; +import vfs from 'vinyl-fs'; +import filter from 'gulp-filter'; import * as util from './util'; import { getVersion } from './getVersion'; diff --git a/build/lib/extensions.js b/build/lib/extensions.js index 8630c8fa061..6afa72e5bfa 100644 --- a/build/lib/extensions.js +++ b/build/lib/extensions.js @@ -3,6 +3,42 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.fromMarketplace = fromMarketplace; exports.fromGithub = fromGithub; @@ -14,35 +50,35 @@ exports.scanBuiltinExtensions = scanBuiltinExtensions; exports.translatePackageJSON = translatePackageJSON; exports.webpackExtensions = webpackExtensions; exports.buildExtensionMedia = buildExtensionMedia; -const es = require("event-stream"); -const fs = require("fs"); -const cp = require("child_process"); -const glob = require("glob"); -const gulp = require("gulp"); -const path = require("path"); -const File = require("vinyl"); +const event_stream_1 = __importDefault(require("event-stream")); +const fs_1 = __importDefault(require("fs")); +const child_process_1 = __importDefault(require("child_process")); +const glob_1 = __importDefault(require("glob")); +const gulp_1 = __importDefault(require("gulp")); +const path_1 = __importDefault(require("path")); +const vinyl_1 = __importDefault(require("vinyl")); const stats_1 = require("./stats"); -const util2 = require("./util"); +const util2 = __importStar(require("./util")); const vzip = require('gulp-vinyl-zip'); -const filter = require("gulp-filter"); -const rename = require("gulp-rename"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); -const buffer = require('gulp-buffer'); -const jsoncParser = require("jsonc-parser"); +const gulp_filter_1 = __importDefault(require("gulp-filter")); +const gulp_rename_1 = __importDefault(require("gulp-rename")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const gulp_buffer_1 = __importDefault(require("gulp-buffer")); +const jsoncParser = __importStar(require("jsonc-parser")); const dependencies_1 = require("./dependencies"); const builtInExtensions_1 = require("./builtInExtensions"); const getVersion_1 = require("./getVersion"); const fetch_1 = require("./fetch"); -const root = path.dirname(path.dirname(__dirname)); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); const commit = (0, getVersion_1.getVersion)(root); const sourceMappingURLBase = `https://main.vscode-cdn.net/sourcemaps/${commit}`; function minifyExtensionResources(input) { - const jsonFilter = filter(['**/*.json', '**/*.code-snippets'], { restore: true }); + const jsonFilter = (0, gulp_filter_1.default)(['**/*.json', '**/*.code-snippets'], { restore: true }); return input .pipe(jsonFilter) - .pipe(buffer()) - .pipe(es.mapSync((f) => { + .pipe((0, gulp_buffer_1.default)()) + .pipe(event_stream_1.default.mapSync((f) => { const errors = []; const value = jsoncParser.parse(f.contents.toString('utf8'), errors, { allowTrailingComma: true }); if (errors.length === 0) { @@ -54,11 +90,11 @@ function minifyExtensionResources(input) { .pipe(jsonFilter.restore); } function updateExtensionPackageJSON(input, update) { - const packageJsonFilter = filter('extensions/*/package.json', { restore: true }); + const packageJsonFilter = (0, gulp_filter_1.default)('extensions/*/package.json', { restore: true }); return input .pipe(packageJsonFilter) - .pipe(buffer()) - .pipe(es.mapSync((f) => { + .pipe((0, gulp_buffer_1.default)()) + .pipe(event_stream_1.default.mapSync((f) => { const data = JSON.parse(f.contents.toString('utf8')); f.contents = Buffer.from(JSON.stringify(update(data))); return f; @@ -67,7 +103,7 @@ function updateExtensionPackageJSON(input, update) { } function fromLocal(extensionPath, forWeb, disableMangle) { const webpackConfigFileName = forWeb ? 'extension-browser.webpack.config.js' : 'extension.webpack.config.js'; - const isWebPacked = fs.existsSync(path.join(extensionPath, webpackConfigFileName)); + const isWebPacked = fs_1.default.existsSync(path_1.default.join(extensionPath, webpackConfigFileName)); let input = isWebPacked ? fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) : fromLocalNormal(extensionPath); @@ -88,11 +124,11 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) { const vsce = require('@vscode/vsce'); const webpack = require('webpack'); const webpackGulp = require('webpack-stream'); - const result = es.through(); + const result = event_stream_1.default.through(); const packagedDependencies = []; - const packageJsonConfig = require(path.join(extensionPath, 'package.json')); + const packageJsonConfig = require(path_1.default.join(extensionPath, 'package.json')); if (packageJsonConfig.dependencies) { - const webpackRootConfig = require(path.join(extensionPath, webpackConfigFileName)); + const webpackRootConfig = require(path_1.default.join(extensionPath, webpackConfigFileName)); for (const key in webpackRootConfig.externals) { if (key in packageJsonConfig.dependencies) { packagedDependencies.push(key); @@ -106,19 +142,19 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) { // as a temporary workaround. vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.None, packagedDependencies }).then(fileNames => { const files = fileNames - .map(fileName => path.join(extensionPath, fileName)) - .map(filePath => new File({ + .map(fileName => path_1.default.join(extensionPath, fileName)) + .map(filePath => new vinyl_1.default({ path: filePath, - stat: fs.statSync(filePath), + stat: fs_1.default.statSync(filePath), base: extensionPath, - contents: fs.createReadStream(filePath) + contents: fs_1.default.createReadStream(filePath) })); // check for a webpack configuration files, then invoke webpack // and merge its output with the files stream. - const webpackConfigLocations = glob.sync(path.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] }); + const webpackConfigLocations = glob_1.default.sync(path_1.default.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] }); const webpackStreams = webpackConfigLocations.flatMap(webpackConfigPath => { const webpackDone = (err, stats) => { - fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`); + (0, fancy_log_1.default)(`Bundled extension: ${ansi_colors_1.default.yellow(path_1.default.join(path_1.default.basename(extensionPath), path_1.default.relative(extensionPath, webpackConfigPath)))}...`); if (err) { result.emit('error', err); } @@ -149,28 +185,28 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) { } } } - const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path); + const relativeOutputPath = path_1.default.relative(extensionPath, webpackConfig.output.path); return webpackGulp(webpackConfig, webpack, webpackDone) - .pipe(es.through(function (data) { + .pipe(event_stream_1.default.through(function (data) { data.stat = data.stat || {}; data.base = extensionPath; this.emit('data', data); })) - .pipe(es.through(function (data) { + .pipe(event_stream_1.default.through(function (data) { // source map handling: // * rewrite sourceMappingURL // * save to disk so that upload-task picks this up - if (path.extname(data.basename) === '.js') { + if (path_1.default.extname(data.basename) === '.js') { const contents = data.contents.toString('utf8'); data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) { - return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`; + return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path_1.default.basename(extensionPath)}/${relativeOutputPath}/${g1}`; }), 'utf8'); } this.emit('data', data); })); }); }); - es.merge(...webpackStreams, es.readArray(files)) + event_stream_1.default.merge(...webpackStreams, event_stream_1.default.readArray(files)) // .pipe(es.through(function (data) { // // debug // console.log('out', data.path, data.contents.length); @@ -182,25 +218,25 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) { console.error(packagedDependencies); result.emit('error', err); }); - return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath))); + return result.pipe((0, stats_1.createStatsStream)(path_1.default.basename(extensionPath))); } function fromLocalNormal(extensionPath) { const vsce = require('@vscode/vsce'); - const result = es.through(); + const result = event_stream_1.default.through(); vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Npm }) .then(fileNames => { const files = fileNames - .map(fileName => path.join(extensionPath, fileName)) - .map(filePath => new File({ + .map(fileName => path_1.default.join(extensionPath, fileName)) + .map(filePath => new vinyl_1.default({ path: filePath, - stat: fs.statSync(filePath), + stat: fs_1.default.statSync(filePath), base: extensionPath, - contents: fs.createReadStream(filePath) + contents: fs_1.default.createReadStream(filePath) })); - es.readArray(files).pipe(result); + event_stream_1.default.readArray(files).pipe(result); }) .catch(err => result.emit('error', err)); - return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath))); + return result.pipe((0, stats_1.createStatsStream)(path_1.default.basename(extensionPath))); } const userAgent = 'VSCode Build'; const baseHeaders = { @@ -212,8 +248,8 @@ function fromMarketplace(serviceUrl, { name: extensionName, version, sha256, met const json = require('gulp-json-editor'); const [publisher, name] = extensionName.split('.'); const url = `${serviceUrl}/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`; - fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...'); - const packageJsonFilter = filter('package.json', { restore: true }); + (0, fancy_log_1.default)('Downloading extension:', ansi_colors_1.default.yellow(`${extensionName}@${version}`), '...'); + const packageJsonFilter = (0, gulp_filter_1.default)('package.json', { restore: true }); return (0, fetch_1.fetchUrls)('', { base: url, nodeFetchOptions: { @@ -222,28 +258,28 @@ function fromMarketplace(serviceUrl, { name: extensionName, version, sha256, met checksumSha256: sha256 }) .pipe(vzip.src()) - .pipe(filter('extension/**')) - .pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) + .pipe((0, gulp_filter_1.default)('extension/**')) + .pipe((0, gulp_rename_1.default)(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) .pipe(packageJsonFilter) - .pipe(buffer()) + .pipe((0, gulp_buffer_1.default)()) .pipe(json({ __metadata: metadata })) .pipe(packageJsonFilter.restore); } function fromGithub({ name, version, repo, sha256, metadata }) { const json = require('gulp-json-editor'); - fancyLog('Downloading extension from GH:', ansiColors.yellow(`${name}@${version}`), '...'); - const packageJsonFilter = filter('package.json', { restore: true }); + (0, fancy_log_1.default)('Downloading extension from GH:', ansi_colors_1.default.yellow(`${name}@${version}`), '...'); + const packageJsonFilter = (0, gulp_filter_1.default)('package.json', { restore: true }); return (0, fetch_1.fetchGithub)(new URL(repo).pathname, { version, name: name => name.endsWith('.vsix'), checksumSha256: sha256 }) - .pipe(buffer()) + .pipe((0, gulp_buffer_1.default)()) .pipe(vzip.src()) - .pipe(filter('extension/**')) - .pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) + .pipe((0, gulp_filter_1.default)('extension/**')) + .pipe((0, gulp_rename_1.default)(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) .pipe(packageJsonFilter) - .pipe(buffer()) + .pipe((0, gulp_buffer_1.default)()) .pipe(json({ __metadata: metadata })) .pipe(packageJsonFilter.restore); } @@ -269,7 +305,7 @@ const marketplaceWebExtensionsExclude = new Set([ 'ms-vscode.js-debug', 'ms-vscode.vscode-js-profile-table' ]); -const productJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')); +const productJson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../product.json'), 'utf8')); const builtInExtensions = productJson.builtInExtensions || []; const webBuiltInExtensions = productJson.webBuiltInExtensions || []; /** @@ -326,7 +362,7 @@ function packageNativeLocalExtensionsStream(forWeb, disableMangle) { * @returns a stream */ function packageAllLocalExtensionsStream(forWeb, disableMangle) { - return es.merge([ + return event_stream_1.default.merge([ packageNonNativeLocalExtensionsStream(forWeb, disableMangle), packageNativeLocalExtensionsStream(forWeb, disableMangle) ]); @@ -338,20 +374,20 @@ function packageAllLocalExtensionsStream(forWeb, disableMangle) { */ function doPackageLocalExtensionsStream(forWeb, disableMangle, native) { const nativeExtensionsSet = new Set(nativeExtensions); - const localExtensionsDescriptions = (glob.sync('extensions/*/package.json') + const localExtensionsDescriptions = (glob_1.default.sync('extensions/*/package.json') .map(manifestPath => { - const absoluteManifestPath = path.join(root, manifestPath); - const extensionPath = path.dirname(path.join(root, manifestPath)); - const extensionName = path.basename(extensionPath); + const absoluteManifestPath = path_1.default.join(root, manifestPath); + const extensionPath = path_1.default.dirname(path_1.default.join(root, manifestPath)); + const extensionName = path_1.default.basename(extensionPath); return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath }; }) .filter(({ name }) => native ? nativeExtensionsSet.has(name) : !nativeExtensionsSet.has(name)) .filter(({ name }) => excludedExtensions.indexOf(name) === -1) .filter(({ name }) => builtInExtensions.every(b => b.name !== name)) .filter(({ manifestPath }) => (forWeb ? isWebExtension(require(manifestPath)) : true))); - const localExtensionsStream = minifyExtensionResources(es.merge(...localExtensionsDescriptions.map(extension => { + const localExtensionsStream = minifyExtensionResources(event_stream_1.default.merge(...localExtensionsDescriptions.map(extension => { return fromLocal(extension.path, forWeb, disableMangle) - .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`)); + .pipe((0, gulp_rename_1.default)(p => p.dirname = `extensions/${extension.name}/${p.dirname}`)); }))); let result; if (forWeb) { @@ -360,10 +396,10 @@ function doPackageLocalExtensionsStream(forWeb, disableMangle, native) { else { // also include shared production node modules const productionDependencies = (0, dependencies_1.getProductionDependencies)('extensions/'); - const dependenciesSrc = productionDependencies.map(d => path.relative(root, d)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]).flat(); - result = es.merge(localExtensionsStream, gulp.src(dependenciesSrc, { base: '.' }) - .pipe(util2.cleanNodeModules(path.join(root, 'build', '.moduleignore'))) - .pipe(util2.cleanNodeModules(path.join(root, 'build', `.moduleignore.${process.platform}`)))); + const dependenciesSrc = productionDependencies.map(d => path_1.default.relative(root, d)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]).flat(); + result = event_stream_1.default.merge(localExtensionsStream, gulp_1.default.src(dependenciesSrc, { base: '.' }) + .pipe(util2.cleanNodeModules(path_1.default.join(root, 'build', '.moduleignore'))) + .pipe(util2.cleanNodeModules(path_1.default.join(root, 'build', `.moduleignore.${process.platform}`)))); } return (result .pipe(util2.setExecutableBit(['**/*.sh']))); @@ -373,9 +409,9 @@ function packageMarketplaceExtensionsStream(forWeb) { ...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)), ...(forWeb ? webBuiltInExtensions : []) ]; - const marketplaceExtensionsStream = minifyExtensionResources(es.merge(...marketplaceExtensionsDescriptions + const marketplaceExtensionsStream = minifyExtensionResources(event_stream_1.default.merge(...marketplaceExtensionsDescriptions .map(extension => { - const src = (0, builtInExtensions_1.getExtensionStream)(extension).pipe(rename(p => p.dirname = `extensions/${p.dirname}`)); + const src = (0, builtInExtensions_1.getExtensionStream)(extension).pipe((0, gulp_rename_1.default)(p => p.dirname = `extensions/${p.dirname}`)); return updateExtensionPackageJSON(src, (data) => { delete data.scripts; delete data.dependencies; @@ -389,30 +425,30 @@ function packageMarketplaceExtensionsStream(forWeb) { function scanBuiltinExtensions(extensionsRoot, exclude = []) { const scannedExtensions = []; try { - const extensionsFolders = fs.readdirSync(extensionsRoot); + const extensionsFolders = fs_1.default.readdirSync(extensionsRoot); for (const extensionFolder of extensionsFolders) { if (exclude.indexOf(extensionFolder) >= 0) { continue; } - const packageJSONPath = path.join(extensionsRoot, extensionFolder, 'package.json'); - if (!fs.existsSync(packageJSONPath)) { + const packageJSONPath = path_1.default.join(extensionsRoot, extensionFolder, 'package.json'); + if (!fs_1.default.existsSync(packageJSONPath)) { continue; } - const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8')); + const packageJSON = JSON.parse(fs_1.default.readFileSync(packageJSONPath).toString('utf8')); if (!isWebExtension(packageJSON)) { continue; } - const children = fs.readdirSync(path.join(extensionsRoot, extensionFolder)); + const children = fs_1.default.readdirSync(path_1.default.join(extensionsRoot, extensionFolder)); const packageNLSPath = children.filter(child => child === 'package.nls.json')[0]; - const packageNLS = packageNLSPath ? JSON.parse(fs.readFileSync(path.join(extensionsRoot, extensionFolder, packageNLSPath)).toString()) : undefined; + const packageNLS = packageNLSPath ? JSON.parse(fs_1.default.readFileSync(path_1.default.join(extensionsRoot, extensionFolder, packageNLSPath)).toString()) : undefined; const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0]; const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0]; scannedExtensions.push({ extensionPath: extensionFolder, packageJSON, packageNLS, - readmePath: readme ? path.join(extensionFolder, readme) : undefined, - changelogPath: changelog ? path.join(extensionFolder, changelog) : undefined, + readmePath: readme ? path_1.default.join(extensionFolder, readme) : undefined, + changelogPath: changelog ? path_1.default.join(extensionFolder, changelog) : undefined, }); } return scannedExtensions; @@ -423,7 +459,7 @@ function scanBuiltinExtensions(extensionsRoot, exclude = []) { } function translatePackageJSON(packageJSON, packageNLSPath) { const CharCode_PC = '%'.charCodeAt(0); - const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString()); + const packageNls = JSON.parse(fs_1.default.readFileSync(packageNLSPath).toString()); const translate = (obj) => { for (const key in obj) { const val = obj[key]; @@ -444,7 +480,7 @@ function translatePackageJSON(packageJSON, packageNLSPath) { translate(packageJSON); return packageJSON; } -const extensionsPath = path.join(root, 'extensions'); +const extensionsPath = path_1.default.join(root, 'extensions'); // Additional projects to run esbuild on. These typically build code for webviews const esbuildMediaScripts = [ 'markdown-language-features/esbuild-notebook.js', @@ -463,7 +499,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) { for (const configOrFn of Array.isArray(configOrFnOrArray) ? configOrFnOrArray : [configOrFnOrArray]) { const config = typeof configOrFn === 'function' ? configOrFn({}, {}) : configOrFn; if (outputRoot) { - config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path)); + config.output.path = path_1.default.join(outputRoot, path_1.default.relative(path_1.default.dirname(configPath), config.output.path)); } webpackConfigs.push(config); } @@ -475,18 +511,18 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) { for (const stats of fullStats.children) { const outputPath = stats.outputPath; if (outputPath) { - const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/'); + const relativePath = path_1.default.relative(extensionsPath, outputPath).replace(/\\/g, '/'); const match = relativePath.match(/[^\/]+(\/server|\/client)?/); - fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`); + (0, fancy_log_1.default)(`Finished ${ansi_colors_1.default.green(taskName)} ${ansi_colors_1.default.cyan(match[0])} with ${stats.errors.length} errors.`); } if (Array.isArray(stats.errors)) { stats.errors.forEach((error) => { - fancyLog.error(error); + fancy_log_1.default.error(error); }); } if (Array.isArray(stats.warnings)) { stats.warnings.forEach((warning) => { - fancyLog.warn(warning); + fancy_log_1.default.warn(warning); }); } } @@ -506,7 +542,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) { else { webpack(webpackConfigs).run((err, stats) => { if (err) { - fancyLog.error(err); + fancy_log_1.default.error(err); reject(); } else { @@ -520,9 +556,9 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) { async function esbuildExtensions(taskName, isWatch, scripts) { function reporter(stdError, script) { const matches = (stdError || '').match(/\> (.+): error: (.+)?/g); - fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`); + (0, fancy_log_1.default)(`Finished ${ansi_colors_1.default.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`); for (const match of matches || []) { - fancyLog.error(match); + fancy_log_1.default.error(match); } } const tasks = scripts.map(({ script, outputRoot }) => { @@ -534,7 +570,7 @@ async function esbuildExtensions(taskName, isWatch, scripts) { if (outputRoot) { args.push('--outputRoot', outputRoot); } - const proc = cp.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => { + const proc = child_process_1.default.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => { if (error) { return reject(error); } @@ -542,7 +578,7 @@ async function esbuildExtensions(taskName, isWatch, scripts) { return resolve(); }); proc.stdout.on('data', (data) => { - fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`); + (0, fancy_log_1.default)(`${ansi_colors_1.default.green(taskName)}: ${data.toString('utf8')}`); }); }); }); @@ -550,8 +586,8 @@ async function esbuildExtensions(taskName, isWatch, scripts) { } async function buildExtensionMedia(isWatch, outputRoot) { return esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({ - script: path.join(extensionsPath, p), - outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined + script: path_1.default.join(extensionsPath, p), + outputRoot: outputRoot ? path_1.default.join(root, outputRoot, path_1.default.dirname(p)) : undefined }))); } //# sourceMappingURL=extensions.js.map \ No newline at end of file diff --git a/build/lib/extensions.ts b/build/lib/extensions.ts index a881d3153da..7ddfbb03587 100644 --- a/build/lib/extensions.ts +++ b/build/lib/extensions.ts @@ -3,24 +3,24 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as es from 'event-stream'; -import * as fs from 'fs'; -import * as cp from 'child_process'; -import * as glob from 'glob'; -import * as gulp from 'gulp'; -import * as path from 'path'; +import es from 'event-stream'; +import fs from 'fs'; +import cp from 'child_process'; +import glob from 'glob'; +import gulp from 'gulp'; +import path from 'path'; import { Stream } from 'stream'; -import * as File from 'vinyl'; +import File from 'vinyl'; import { createStatsStream } from './stats'; import * as util2 from './util'; const vzip = require('gulp-vinyl-zip'); -import filter = require('gulp-filter'); -import rename = require('gulp-rename'); -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; -const buffer = require('gulp-buffer'); +import filter from 'gulp-filter'; +import rename from 'gulp-rename'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; +import buffer from 'gulp-buffer'; import * as jsoncParser from 'jsonc-parser'; -import webpack = require('webpack'); +import webpack from 'webpack'; import { getProductionDependencies } from './dependencies'; import { IExtensionDefinition, getExtensionStream } from './builtInExtensions'; import { getVersion } from './getVersion'; diff --git a/build/lib/fetch.js b/build/lib/fetch.js index b7da65f4af2..078706cdd00 100644 --- a/build/lib/fetch.js +++ b/build/lib/fetch.js @@ -3,16 +3,19 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.fetchUrls = fetchUrls; exports.fetchUrl = fetchUrl; exports.fetchGithub = fetchGithub; -const es = require("event-stream"); -const VinylFile = require("vinyl"); -const log = require("fancy-log"); -const ansiColors = require("ansi-colors"); -const crypto = require("crypto"); -const through2 = require("through2"); +const event_stream_1 = __importDefault(require("event-stream")); +const vinyl_1 = __importDefault(require("vinyl")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const crypto_1 = __importDefault(require("crypto")); +const through2_1 = __importDefault(require("through2")); function fetchUrls(urls, options) { if (options === undefined) { options = {}; @@ -23,7 +26,7 @@ function fetchUrls(urls, options) { if (!Array.isArray(urls)) { urls = [urls]; } - return es.readArray(urls).pipe(es.map((data, cb) => { + return event_stream_1.default.readArray(urls).pipe(event_stream_1.default.map((data, cb) => { const url = [options.base, data].join(''); fetchUrl(url, options).then(file => { cb(undefined, file); @@ -37,7 +40,7 @@ async function fetchUrl(url, options, retries = 10, retryDelay = 1000) { try { let startTime = 0; if (verbose) { - log(`Start fetching ${ansiColors.magenta(url)}${retries !== 10 ? ` (${10 - retries} retry)` : ''}`); + (0, fancy_log_1.default)(`Start fetching ${ansi_colors_1.default.magenta(url)}${retries !== 10 ? ` (${10 - retries} retry)` : ''}`); startTime = new Date().getTime(); } const controller = new AbortController(); @@ -48,33 +51,33 @@ async function fetchUrl(url, options, retries = 10, retryDelay = 1000) { signal: controller.signal /* Typings issue with lib.dom.d.ts */ }); if (verbose) { - log(`Fetch completed: Status ${response.status}. Took ${ansiColors.magenta(`${new Date().getTime() - startTime} ms`)}`); + (0, fancy_log_1.default)(`Fetch completed: Status ${response.status}. Took ${ansi_colors_1.default.magenta(`${new Date().getTime() - startTime} ms`)}`); } if (response.ok && (response.status >= 200 && response.status < 300)) { const contents = Buffer.from(await response.arrayBuffer()); if (options.checksumSha256) { - const actualSHA256Checksum = crypto.createHash('sha256').update(contents).digest('hex'); + const actualSHA256Checksum = crypto_1.default.createHash('sha256').update(contents).digest('hex'); if (actualSHA256Checksum !== options.checksumSha256) { - throw new Error(`Checksum mismatch for ${ansiColors.cyan(url)} (expected ${options.checksumSha256}, actual ${actualSHA256Checksum}))`); + throw new Error(`Checksum mismatch for ${ansi_colors_1.default.cyan(url)} (expected ${options.checksumSha256}, actual ${actualSHA256Checksum}))`); } else if (verbose) { - log(`Verified SHA256 checksums match for ${ansiColors.cyan(url)}`); + (0, fancy_log_1.default)(`Verified SHA256 checksums match for ${ansi_colors_1.default.cyan(url)}`); } } else if (verbose) { - log(`Skipping checksum verification for ${ansiColors.cyan(url)} because no expected checksum was provided`); + (0, fancy_log_1.default)(`Skipping checksum verification for ${ansi_colors_1.default.cyan(url)} because no expected checksum was provided`); } if (verbose) { - log(`Fetched response body buffer: ${ansiColors.magenta(`${contents.byteLength} bytes`)}`); + (0, fancy_log_1.default)(`Fetched response body buffer: ${ansi_colors_1.default.magenta(`${contents.byteLength} bytes`)}`); } - return new VinylFile({ + return new vinyl_1.default({ cwd: '/', base: options.base, path: url, contents }); } - let err = `Request ${ansiColors.magenta(url)} failed with status code: ${response.status}`; + let err = `Request ${ansi_colors_1.default.magenta(url)} failed with status code: ${response.status}`; if (response.status === 403) { err += ' (you may be rate limited)'; } @@ -86,7 +89,7 @@ async function fetchUrl(url, options, retries = 10, retryDelay = 1000) { } catch (e) { if (verbose) { - log(`Fetching ${ansiColors.cyan(url)} failed: ${e}`); + (0, fancy_log_1.default)(`Fetching ${ansi_colors_1.default.cyan(url)} failed: ${e}`); } if (retries > 0) { await new Promise(resolve => setTimeout(resolve, retryDelay)); @@ -117,7 +120,7 @@ function fetchGithub(repo, options) { base: 'https://api.github.com', verbose: options.verbose, nodeFetchOptions: { headers: ghApiHeaders } - }).pipe(through2.obj(async function (file, _enc, callback) { + }).pipe(through2_1.default.obj(async function (file, _enc, callback) { const assetFilter = typeof options.name === 'string' ? (name) => name === options.name : options.name; const asset = JSON.parse(file.contents.toString()).assets.find((a) => assetFilter(a.name)); if (!asset) { diff --git a/build/lib/fetch.ts b/build/lib/fetch.ts index 0c44b8e567f..47a65b88fb5 100644 --- a/build/lib/fetch.ts +++ b/build/lib/fetch.ts @@ -3,12 +3,12 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as es from 'event-stream'; -import * as VinylFile from 'vinyl'; -import * as log from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; -import * as crypto from 'crypto'; -import * as through2 from 'through2'; +import es from 'event-stream'; +import VinylFile from 'vinyl'; +import log from 'fancy-log'; +import ansiColors from 'ansi-colors'; +import crypto from 'crypto'; +import through2 from 'through2'; import { Stream } from 'stream'; export interface IFetchOptions { diff --git a/build/lib/formatter.js b/build/lib/formatter.js index 29f265c8289..1085ea8f488 100644 --- a/build/lib/formatter.js +++ b/build/lib/formatter.js @@ -1,17 +1,20 @@ "use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.format = format; /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -const fs = require("fs"); -const path = require("path"); -const ts = require("typescript"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const typescript_1 = __importDefault(require("typescript")); class LanguageServiceHost { files = {}; addFile(fileName, text) { - this.files[fileName] = ts.ScriptSnapshot.fromString(text); + this.files[fileName] = typescript_1.default.ScriptSnapshot.fromString(text); } fileExists(path) { return !!this.files[path]; @@ -20,18 +23,18 @@ class LanguageServiceHost { return this.files[path]?.getText(0, this.files[path].getLength()); } // for ts.LanguageServiceHost - getCompilationSettings = () => ts.getDefaultCompilerOptions(); + getCompilationSettings = () => typescript_1.default.getDefaultCompilerOptions(); getScriptFileNames = () => Object.keys(this.files); getScriptVersion = (_fileName) => '0'; getScriptSnapshot = (fileName) => this.files[fileName]; getCurrentDirectory = () => process.cwd(); - getDefaultLibFileName = (options) => ts.getDefaultLibFilePath(options); + getDefaultLibFileName = (options) => typescript_1.default.getDefaultLibFilePath(options); } const defaults = { baseIndentSize: 0, indentSize: 4, tabSize: 4, - indentStyle: ts.IndentStyle.Smart, + indentStyle: typescript_1.default.IndentStyle.Smart, newLineCharacter: '\r\n', convertTabsToSpaces: false, insertSpaceAfterCommaDelimiter: true, @@ -54,14 +57,14 @@ const defaults = { const getOverrides = (() => { let value; return () => { - value ??= JSON.parse(fs.readFileSync(path.join(__dirname, '..', '..', 'tsfmt.json'), 'utf8')); + value ??= JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '..', '..', 'tsfmt.json'), 'utf8')); return value; }; })(); function format(fileName, text) { const host = new LanguageServiceHost(); host.addFile(fileName, text); - const languageService = ts.createLanguageService(host); + const languageService = typescript_1.default.createLanguageService(host); const edits = languageService.getFormattingEditsForDocument(fileName, { ...defaults, ...getOverrides() }); edits .sort((a, b) => a.span.start - b.span.start) diff --git a/build/lib/formatter.ts b/build/lib/formatter.ts index 0d9035b3d87..993722e5f92 100644 --- a/build/lib/formatter.ts +++ b/build/lib/formatter.ts @@ -2,9 +2,9 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as ts from 'typescript'; +import fs from 'fs'; +import path from 'path'; +import ts from 'typescript'; class LanguageServiceHost implements ts.LanguageServiceHost { diff --git a/build/lib/getVersion.js b/build/lib/getVersion.js index b50ead538a2..7606c17ab14 100644 --- a/build/lib/getVersion.js +++ b/build/lib/getVersion.js @@ -3,9 +3,42 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", { value: true }); exports.getVersion = getVersion; -const git = require("./git"); +const git = __importStar(require("./git")); function getVersion(root) { let version = process.env['BUILD_SOURCEVERSION']; if (!version || !/^[0-9a-f]{40}$/i.test(version.trim())) { diff --git a/build/lib/git.js b/build/lib/git.js index 798a408bdb9..30de97ed6e3 100644 --- a/build/lib/git.js +++ b/build/lib/git.js @@ -1,21 +1,24 @@ "use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.getVersion = getVersion; /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -const path = require("path"); -const fs = require("fs"); +const path_1 = __importDefault(require("path")); +const fs_1 = __importDefault(require("fs")); /** * Returns the sha1 commit version of a repository or undefined in case of failure. */ function getVersion(repo) { - const git = path.join(repo, '.git'); - const headPath = path.join(git, 'HEAD'); + const git = path_1.default.join(repo, '.git'); + const headPath = path_1.default.join(git, 'HEAD'); let head; try { - head = fs.readFileSync(headPath, 'utf8').trim(); + head = fs_1.default.readFileSync(headPath, 'utf8').trim(); } catch (e) { return undefined; @@ -28,17 +31,17 @@ function getVersion(repo) { return undefined; } const ref = refMatch[1]; - const refPath = path.join(git, ref); + const refPath = path_1.default.join(git, ref); try { - return fs.readFileSync(refPath, 'utf8').trim(); + return fs_1.default.readFileSync(refPath, 'utf8').trim(); } catch (e) { // noop } - const packedRefsPath = path.join(git, 'packed-refs'); + const packedRefsPath = path_1.default.join(git, 'packed-refs'); let refsRaw; try { - refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim(); + refsRaw = fs_1.default.readFileSync(packedRefsPath, 'utf8').trim(); } catch (e) { return undefined; diff --git a/build/lib/git.ts b/build/lib/git.ts index dbb424f21df..a3c23d8c29b 100644 --- a/build/lib/git.ts +++ b/build/lib/git.ts @@ -2,8 +2,8 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as fs from 'fs'; +import path from 'path'; +import fs from 'fs'; /** * Returns the sha1 commit version of a repository or undefined in case of failure. diff --git a/build/lib/i18n.js b/build/lib/i18n.js index 6964616291b..9483d319a50 100644 --- a/build/lib/i18n.js +++ b/build/lib/i18n.js @@ -3,6 +3,9 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.EXTERNAL_EXTENSIONS = exports.XLF = exports.Line = exports.extraLanguages = exports.defaultLanguages = void 0; exports.processNlsFiles = processNlsFiles; @@ -12,20 +15,20 @@ exports.createXlfFilesForExtensions = createXlfFilesForExtensions; exports.createXlfFilesForIsl = createXlfFilesForIsl; exports.prepareI18nPackFiles = prepareI18nPackFiles; exports.prepareIslFiles = prepareIslFiles; -const path = require("path"); -const fs = require("fs"); +const path_1 = __importDefault(require("path")); +const fs_1 = __importDefault(require("fs")); const event_stream_1 = require("event-stream"); -const jsonMerge = require("gulp-merge-json"); -const File = require("vinyl"); -const xml2js = require("xml2js"); -const gulp = require("gulp"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); -const iconv = require("@vscode/iconv-lite-umd"); +const gulp_merge_json_1 = __importDefault(require("gulp-merge-json")); +const vinyl_1 = __importDefault(require("vinyl")); +const xml2js_1 = __importDefault(require("xml2js")); +const gulp_1 = __importDefault(require("gulp")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const iconv_lite_umd_1 = __importDefault(require("@vscode/iconv-lite-umd")); const l10n_dev_1 = require("@vscode/l10n-dev"); -const REPO_ROOT_PATH = path.join(__dirname, '../..'); +const REPO_ROOT_PATH = path_1.default.join(__dirname, '../..'); function log(message, ...rest) { - fancyLog(ansiColors.green('[i18n]'), message, ...rest); + (0, fancy_log_1.default)(ansi_colors_1.default.green('[i18n]'), message, ...rest); } exports.defaultLanguages = [ { id: 'zh-tw', folderName: 'cht', translationId: 'zh-hant' }, @@ -188,7 +191,7 @@ class XLF { } static parse = function (xlfString) { return new Promise((resolve, reject) => { - const parser = new xml2js.Parser(); + const parser = new xml2js_1.default.Parser(); const files = []; parser.parseString(xlfString, function (err, result) { if (err) { @@ -278,8 +281,8 @@ function stripComments(content) { return result; } function processCoreBundleFormat(base, fileHeader, languages, json, emitter) { - const languageDirectory = path.join(REPO_ROOT_PATH, '..', 'vscode-loc', 'i18n'); - if (!fs.existsSync(languageDirectory)) { + const languageDirectory = path_1.default.join(REPO_ROOT_PATH, '..', 'vscode-loc', 'i18n'); + if (!fs_1.default.existsSync(languageDirectory)) { log(`No VS Code localization repository found. Looking at ${languageDirectory}`); log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`); } @@ -289,10 +292,10 @@ function processCoreBundleFormat(base, fileHeader, languages, json, emitter) { log(`Generating nls bundles for: ${language.id}`); } const languageFolderName = language.translationId || language.id; - const i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json'); + const i18nFile = path_1.default.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json'); let allMessages; - if (fs.existsSync(i18nFile)) { - const content = stripComments(fs.readFileSync(i18nFile, 'utf8')); + if (fs_1.default.existsSync(i18nFile)) { + const content = stripComments(fs_1.default.readFileSync(i18nFile, 'utf8')); allMessages = JSON.parse(content); } let nlsIndex = 0; @@ -304,7 +307,7 @@ function processCoreBundleFormat(base, fileHeader, languages, json, emitter) { nlsIndex++; } } - emitter.queue(new File({ + emitter.queue(new vinyl_1.default({ contents: Buffer.from(`${fileHeader} globalThis._VSCODE_NLS_MESSAGES=${JSON.stringify(nlsResult)}; globalThis._VSCODE_NLS_LANGUAGE=${JSON.stringify(language.id)};`), @@ -315,10 +318,10 @@ globalThis._VSCODE_NLS_LANGUAGE=${JSON.stringify(language.id)};`), } function processNlsFiles(opts) { return (0, event_stream_1.through)(function (file) { - const fileName = path.basename(file.path); + const fileName = path_1.default.basename(file.path); if (fileName === 'bundleInfo.json') { // pick a root level file to put the core bundles (TODO@esm this file is not created anymore, pick another) try { - const json = JSON.parse(fs.readFileSync(path.join(REPO_ROOT_PATH, opts.out, 'nls.keys.json')).toString()); + const json = JSON.parse(fs_1.default.readFileSync(path_1.default.join(REPO_ROOT_PATH, opts.out, 'nls.keys.json')).toString()); if (NLSKeysFormat.is(json)) { processCoreBundleFormat(file.base, opts.fileHeader, opts.languages, json, this); } @@ -366,7 +369,7 @@ function getResource(sourceFile) { } function createXlfFilesForCoreBundle() { return (0, event_stream_1.through)(function (file) { - const basename = path.basename(file.path); + const basename = path_1.default.basename(file.path); if (basename === 'nls.metadata.json') { if (file.isBuffer()) { const xlfs = Object.create(null); @@ -393,7 +396,7 @@ function createXlfFilesForCoreBundle() { for (const resource in xlfs) { const xlf = xlfs[resource]; const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`; - const xlfFile = new File({ + const xlfFile = new vinyl_1.default({ path: filePath, contents: Buffer.from(xlf.toString(), 'utf8') }); @@ -413,7 +416,7 @@ function createXlfFilesForCoreBundle() { } function createL10nBundleForExtension(extensionFolderName, prefixWithBuildFolder) { const prefix = prefixWithBuildFolder ? '.build/' : ''; - return gulp + return gulp_1.default .src([ // For source code of extensions `${prefix}extensions/${extensionFolderName}/{src,client,server}/**/*.{ts,tsx}`, @@ -429,12 +432,12 @@ function createL10nBundleForExtension(extensionFolderName, prefixWithBuildFolder callback(); return; } - const extension = path.extname(file.relative); + const extension = path_1.default.extname(file.relative); if (extension !== '.json') { const contents = file.contents.toString('utf8'); (0, l10n_dev_1.getL10nJson)([{ contents, extension }]) .then((json) => { - callback(undefined, new File({ + callback(undefined, new vinyl_1.default({ path: `extensions/${extensionFolderName}/bundle.l10n.json`, contents: Buffer.from(JSON.stringify(json), 'utf8') })); @@ -464,7 +467,7 @@ function createL10nBundleForExtension(extensionFolderName, prefixWithBuildFolder } callback(undefined, file); })) - .pipe(jsonMerge({ + .pipe((0, gulp_merge_json_1.default)({ fileName: `extensions/${extensionFolderName}/bundle.l10n.json`, jsonSpace: '', concatArrays: true @@ -481,16 +484,16 @@ function createXlfFilesForExtensions() { let folderStreamEndEmitted = false; return (0, event_stream_1.through)(function (extensionFolder) { const folderStream = this; - const stat = fs.statSync(extensionFolder.path); + const stat = fs_1.default.statSync(extensionFolder.path); if (!stat.isDirectory()) { return; } - const extensionFolderName = path.basename(extensionFolder.path); + const extensionFolderName = path_1.default.basename(extensionFolder.path); if (extensionFolderName === 'node_modules') { return; } // Get extension id and use that as the id - const manifest = fs.readFileSync(path.join(extensionFolder.path, 'package.json'), 'utf-8'); + const manifest = fs_1.default.readFileSync(path_1.default.join(extensionFolder.path, 'package.json'), 'utf-8'); const manifestJson = JSON.parse(manifest); const extensionId = manifestJson.publisher + '.' + manifestJson.name; counter++; @@ -501,17 +504,17 @@ function createXlfFilesForExtensions() { } return _l10nMap; } - (0, event_stream_1.merge)(gulp.src([`.build/extensions/${extensionFolderName}/package.nls.json`, `.build/extensions/${extensionFolderName}/**/nls.metadata.json`], { allowEmpty: true }), createL10nBundleForExtension(extensionFolderName, exports.EXTERNAL_EXTENSIONS.includes(extensionId))).pipe((0, event_stream_1.through)(function (file) { + (0, event_stream_1.merge)(gulp_1.default.src([`.build/extensions/${extensionFolderName}/package.nls.json`, `.build/extensions/${extensionFolderName}/**/nls.metadata.json`], { allowEmpty: true }), createL10nBundleForExtension(extensionFolderName, exports.EXTERNAL_EXTENSIONS.includes(extensionId))).pipe((0, event_stream_1.through)(function (file) { if (file.isBuffer()) { const buffer = file.contents; - const basename = path.basename(file.path); + const basename = path_1.default.basename(file.path); if (basename === 'package.nls.json') { const json = JSON.parse(buffer.toString('utf8')); getL10nMap().set(`extensions/${extensionId}/package`, json); } else if (basename === 'nls.metadata.json') { const json = JSON.parse(buffer.toString('utf8')); - const relPath = path.relative(`.build/extensions/${extensionFolderName}`, path.dirname(file.path)); + const relPath = path_1.default.relative(`.build/extensions/${extensionFolderName}`, path_1.default.dirname(file.path)); for (const file in json) { const fileContent = json[file]; const info = Object.create(null); @@ -536,8 +539,8 @@ function createXlfFilesForExtensions() { } }, function () { if (_l10nMap?.size > 0) { - const xlfFile = new File({ - path: path.join(extensionsProject, extensionId + '.xlf'), + const xlfFile = new vinyl_1.default({ + path: path_1.default.join(extensionsProject, extensionId + '.xlf'), contents: Buffer.from((0, l10n_dev_1.getL10nXlf)(_l10nMap), 'utf8') }); folderStream.queue(xlfFile); @@ -560,7 +563,7 @@ function createXlfFilesForExtensions() { function createXlfFilesForIsl() { return (0, event_stream_1.through)(function (file) { let projectName, resourceFile; - if (path.basename(file.path) === 'messages.en.isl') { + if (path_1.default.basename(file.path) === 'messages.en.isl') { projectName = setupProject; resourceFile = 'messages.xlf'; } @@ -602,8 +605,8 @@ function createXlfFilesForIsl() { const originalPath = file.path.substring(file.cwd.length + 1, file.path.split('.')[0].length).replace(/\\/g, '/'); xlf.addFile(originalPath, keys, messages); // Emit only upon all ISL files combined into single XLF instance - const newFilePath = path.join(projectName, resourceFile); - const xlfFile = new File({ path: newFilePath, contents: Buffer.from(xlf.toString(), 'utf-8') }); + const newFilePath = path_1.default.join(projectName, resourceFile); + const xlfFile = new vinyl_1.default({ path: newFilePath, contents: Buffer.from(xlf.toString(), 'utf-8') }); this.queue(xlfFile); }); } @@ -623,8 +626,8 @@ function createI18nFile(name, messages) { if (process.platform === 'win32') { content = content.replace(/\n/g, '\r\n'); } - return new File({ - path: path.join(name + '.i18n.json'), + return new vinyl_1.default({ + path: path_1.default.join(name + '.i18n.json'), contents: Buffer.from(content, 'utf8') }); } @@ -643,9 +646,9 @@ function prepareI18nPackFiles(resultingTranslationPaths) { const extensionsPacks = {}; const errors = []; return (0, event_stream_1.through)(function (xlf) { - let project = path.basename(path.dirname(path.dirname(xlf.relative))); + let project = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(xlf.relative))); // strip `-new` since vscode-extensions-loc uses the `-new` suffix to indicate that it's from the new loc pipeline - const resource = path.basename(path.basename(xlf.relative, '.xlf'), '-new'); + const resource = path_1.default.basename(path_1.default.basename(xlf.relative, '.xlf'), '-new'); if (exports.EXTERNAL_EXTENSIONS.find(e => e === resource)) { project = extensionsProject; } @@ -720,11 +723,11 @@ function prepareIslFiles(language, innoSetupConfig) { function createIslFile(name, messages, language, innoSetup) { const content = []; let originalContent; - if (path.basename(name) === 'Default') { - originalContent = new TextModel(fs.readFileSync(name + '.isl', 'utf8')); + if (path_1.default.basename(name) === 'Default') { + originalContent = new TextModel(fs_1.default.readFileSync(name + '.isl', 'utf8')); } else { - originalContent = new TextModel(fs.readFileSync(name + '.en.isl', 'utf8')); + originalContent = new TextModel(fs_1.default.readFileSync(name + '.en.isl', 'utf8')); } originalContent.lines.forEach(line => { if (line.length > 0) { @@ -746,10 +749,10 @@ function createIslFile(name, messages, language, innoSetup) { } } }); - const basename = path.basename(name); + const basename = path_1.default.basename(name); const filePath = `${basename}.${language.id}.isl`; - const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage); - return new File({ + const encoded = iconv_lite_umd_1.default.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage); + return new vinyl_1.default({ path: filePath, contents: Buffer.from(encoded), }); diff --git a/build/lib/i18n.ts b/build/lib/i18n.ts index cd7e522ad36..d2904ccf0fb 100644 --- a/build/lib/i18n.ts +++ b/build/lib/i18n.ts @@ -3,17 +3,17 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as fs from 'fs'; +import path from 'path'; +import fs from 'fs'; import { map, merge, through, ThroughStream } from 'event-stream'; -import * as jsonMerge from 'gulp-merge-json'; -import * as File from 'vinyl'; -import * as xml2js from 'xml2js'; -import * as gulp from 'gulp'; -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; -import * as iconv from '@vscode/iconv-lite-umd'; +import jsonMerge from 'gulp-merge-json'; +import File from 'vinyl'; +import xml2js from 'xml2js'; +import gulp from 'gulp'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; +import iconv from '@vscode/iconv-lite-umd'; import { l10nJsonFormat, getL10nXlf, l10nJsonDetails, getL10nFilesFromXlf, getL10nJson } from '@vscode/l10n-dev'; const REPO_ROOT_PATH = path.join(__dirname, '../..'); diff --git a/build/lib/inlineMeta.js b/build/lib/inlineMeta.js index 5ec7e9e9c07..3b473ae091e 100644 --- a/build/lib/inlineMeta.js +++ b/build/lib/inlineMeta.js @@ -3,9 +3,12 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.inlineMeta = inlineMeta; -const es = require("event-stream"); +const event_stream_1 = __importDefault(require("event-stream")); const path_1 = require("path"); const packageJsonMarkerId = 'BUILD_INSERT_PACKAGE_CONFIGURATION'; // TODO in order to inline `product.json`, more work is @@ -16,7 +19,7 @@ const packageJsonMarkerId = 'BUILD_INSERT_PACKAGE_CONFIGURATION'; // - a `target` is added in `gulpfile.vscode.win32.js` // const productJsonMarkerId = 'BUILD_INSERT_PRODUCT_CONFIGURATION'; function inlineMeta(result, ctx) { - return result.pipe(es.through(function (file) { + return result.pipe(event_stream_1.default.through(function (file) { if (matchesFile(file, ctx)) { let content = file.contents.toString(); let markerFound = false; diff --git a/build/lib/inlineMeta.ts b/build/lib/inlineMeta.ts index dc061aca8d1..2a0db13d06e 100644 --- a/build/lib/inlineMeta.ts +++ b/build/lib/inlineMeta.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as es from 'event-stream'; +import es from 'event-stream'; import { basename } from 'path'; -import * as File from 'vinyl'; +import File from 'vinyl'; export interface IInlineMetaContext { readonly targetPaths: string[]; diff --git a/build/lib/layersChecker.js b/build/lib/layersChecker.js index e52525bf61d..5cf5c58402c 100644 --- a/build/lib/layersChecker.js +++ b/build/lib/layersChecker.js @@ -3,8 +3,11 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const ts = require("typescript"); +const typescript_1 = __importDefault(require("typescript")); const fs_1 = require("fs"); const path_1 = require("path"); const minimatch_1 = require("minimatch"); @@ -295,8 +298,8 @@ let hasErrors = false; function checkFile(program, sourceFile, rule) { checkNode(sourceFile); function checkNode(node) { - if (node.kind !== ts.SyntaxKind.Identifier) { - return ts.forEachChild(node, checkNode); // recurse down + if (node.kind !== typescript_1.default.SyntaxKind.Identifier) { + return typescript_1.default.forEachChild(node, checkNode); // recurse down } const checker = program.getTypeChecker(); const symbol = checker.getSymbolAtLocation(node); @@ -352,11 +355,11 @@ function checkFile(program, sourceFile, rule) { } } function createProgram(tsconfigPath) { - const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile); - const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' }; - const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true }); - const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true); - return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost); + const tsConfig = typescript_1.default.readConfigFile(tsconfigPath, typescript_1.default.sys.readFile); + const configHostParser = { fileExists: fs_1.existsSync, readDirectory: typescript_1.default.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' }; + const tsConfigParsed = typescript_1.default.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true }); + const compilerHost = typescript_1.default.createCompilerHost(tsConfigParsed.options, true); + return typescript_1.default.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost); } // // Create program and start checking diff --git a/build/lib/layersChecker.ts b/build/lib/layersChecker.ts index e16a775b23e..63377328928 100644 --- a/build/lib/layersChecker.ts +++ b/build/lib/layersChecker.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as ts from 'typescript'; +import ts from 'typescript'; import { readFileSync, existsSync } from 'fs'; import { resolve, dirname, join } from 'path'; import { match } from 'minimatch'; diff --git a/build/lib/mangle/index.js b/build/lib/mangle/index.js index 1c2c8cc3dd3..b93003221a4 100644 --- a/build/lib/mangle/index.js +++ b/build/lib/mangle/index.js @@ -3,16 +3,19 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.Mangler = void 0; -const v8 = require("node:v8"); -const fs = require("fs"); -const path = require("path"); +const node_v8_1 = __importDefault(require("node:v8")); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); const process_1 = require("process"); const source_map_1 = require("source-map"); -const ts = require("typescript"); +const typescript_1 = __importDefault(require("typescript")); const url_1 = require("url"); -const workerpool = require("workerpool"); +const workerpool_1 = __importDefault(require("workerpool")); const staticLanguageServiceHost_1 = require("./staticLanguageServiceHost"); const buildfile = require('../../buildfile'); class ShortIdent { @@ -66,29 +69,29 @@ class ClassData { this.node = node; const candidates = []; for (const member of node.members) { - if (ts.isMethodDeclaration(member)) { + if (typescript_1.default.isMethodDeclaration(member)) { // method `foo() {}` candidates.push(member); } - else if (ts.isPropertyDeclaration(member)) { + else if (typescript_1.default.isPropertyDeclaration(member)) { // property `foo = 234` candidates.push(member); } - else if (ts.isGetAccessor(member)) { + else if (typescript_1.default.isGetAccessor(member)) { // getter: `get foo() { ... }` candidates.push(member); } - else if (ts.isSetAccessor(member)) { + else if (typescript_1.default.isSetAccessor(member)) { // setter: `set foo() { ... }` candidates.push(member); } - else if (ts.isConstructorDeclaration(member)) { + else if (typescript_1.default.isConstructorDeclaration(member)) { // constructor-prop:`constructor(private foo) {}` for (const param of member.parameters) { - if (hasModifier(param, ts.SyntaxKind.PrivateKeyword) - || hasModifier(param, ts.SyntaxKind.ProtectedKeyword) - || hasModifier(param, ts.SyntaxKind.PublicKeyword) - || hasModifier(param, ts.SyntaxKind.ReadonlyKeyword)) { + if (hasModifier(param, typescript_1.default.SyntaxKind.PrivateKeyword) + || hasModifier(param, typescript_1.default.SyntaxKind.ProtectedKeyword) + || hasModifier(param, typescript_1.default.SyntaxKind.PublicKeyword) + || hasModifier(param, typescript_1.default.SyntaxKind.ReadonlyKeyword)) { candidates.push(param); } } @@ -109,8 +112,8 @@ class ClassData { } const { name } = node; let ident = name.getText(); - if (name.kind === ts.SyntaxKind.ComputedPropertyName) { - if (name.expression.kind !== ts.SyntaxKind.StringLiteral) { + if (name.kind === typescript_1.default.SyntaxKind.ComputedPropertyName) { + if (name.expression.kind !== typescript_1.default.SyntaxKind.StringLiteral) { // unsupported: [Symbol.foo] or [abc + 'field'] return; } @@ -120,10 +123,10 @@ class ClassData { return ident; } static _getFieldType(node) { - if (hasModifier(node, ts.SyntaxKind.PrivateKeyword)) { + if (hasModifier(node, typescript_1.default.SyntaxKind.PrivateKeyword)) { return 2 /* FieldType.Private */; } - else if (hasModifier(node, ts.SyntaxKind.ProtectedKeyword)) { + else if (hasModifier(node, typescript_1.default.SyntaxKind.ProtectedKeyword)) { return 1 /* FieldType.Protected */; } else { @@ -302,7 +305,7 @@ class DeclarationData { this.replacementName = fileIdents.next(); } getLocations(service) { - if (ts.isVariableDeclaration(this.node)) { + if (typescript_1.default.isVariableDeclaration(this.node)) { // If the const aliases any types, we need to rename those too const definitionResult = service.getDefinitionAndBoundSpan(this.fileName, this.node.name.getStart()); if (definitionResult?.definitions && definitionResult.definitions.length > 1) { @@ -350,20 +353,20 @@ class Mangler { this.projectPath = projectPath; this.log = log; this.config = config; - this.renameWorkerPool = workerpool.pool(path.join(__dirname, 'renameWorker.js'), { + this.renameWorkerPool = workerpool_1.default.pool(path_1.default.join(__dirname, 'renameWorker.js'), { maxWorkers: 1, minWorkers: 'max' }); } async computeNewFileContents(strictImplicitPublicHandling) { - const service = ts.createLanguageService(new staticLanguageServiceHost_1.StaticLanguageServiceHost(this.projectPath)); + const service = typescript_1.default.createLanguageService(new staticLanguageServiceHost_1.StaticLanguageServiceHost(this.projectPath)); // STEP: // - Find all classes and their field info. // - Find exported symbols. const fileIdents = new ShortIdent('$'); const visit = (node) => { if (this.config.manglePrivateFields) { - if (ts.isClassDeclaration(node) || ts.isClassExpression(node)) { + if (typescript_1.default.isClassDeclaration(node) || typescript_1.default.isClassExpression(node)) { const anchor = node.name ?? node; const key = `${node.getSourceFile().fileName}|${anchor.getStart()}`; if (this.allClassDataByKey.has(key)) { @@ -376,19 +379,19 @@ class Mangler { // Find exported classes, functions, and vars if (( // Exported class - ts.isClassDeclaration(node) - && hasModifier(node, ts.SyntaxKind.ExportKeyword) + typescript_1.default.isClassDeclaration(node) + && hasModifier(node, typescript_1.default.SyntaxKind.ExportKeyword) && node.name) || ( // Exported function - ts.isFunctionDeclaration(node) - && ts.isSourceFile(node.parent) - && hasModifier(node, ts.SyntaxKind.ExportKeyword) + typescript_1.default.isFunctionDeclaration(node) + && typescript_1.default.isSourceFile(node.parent) + && hasModifier(node, typescript_1.default.SyntaxKind.ExportKeyword) && node.name && node.body // On named function and not on the overload ) || ( // Exported variable - ts.isVariableDeclaration(node) - && hasModifier(node.parent.parent, ts.SyntaxKind.ExportKeyword) // Variable statement is exported - && ts.isSourceFile(node.parent.parent.parent)) + typescript_1.default.isVariableDeclaration(node) + && hasModifier(node.parent.parent, typescript_1.default.SyntaxKind.ExportKeyword) // Variable statement is exported + && typescript_1.default.isSourceFile(node.parent.parent.parent)) // Disabled for now because we need to figure out how to handle // enums that are used in monaco or extHost interfaces. /* || ( @@ -406,17 +409,17 @@ class Mangler { this.allExportedSymbols.add(new DeclarationData(node.getSourceFile().fileName, node, fileIdents)); } } - ts.forEachChild(node, visit); + typescript_1.default.forEachChild(node, visit); }; for (const file of service.getProgram().getSourceFiles()) { if (!file.isDeclarationFile) { - ts.forEachChild(file, visit); + typescript_1.default.forEachChild(file, visit); } } this.log(`Done collecting. Classes: ${this.allClassDataByKey.size}. Exported symbols: ${this.allExportedSymbols.size}`); // STEP: connect sub and super-types const setupParents = (data) => { - const extendsClause = data.node.heritageClauses?.find(h => h.token === ts.SyntaxKind.ExtendsKeyword); + const extendsClause = data.node.heritageClauses?.find(h => h.token === typescript_1.default.SyntaxKind.ExtendsKeyword); if (!extendsClause) { // no EXTENDS-clause return; @@ -497,7 +500,7 @@ class Mangler { .then((locations) => ({ newName, locations }))); }; for (const data of this.allClassDataByKey.values()) { - if (hasModifier(data.node, ts.SyntaxKind.DeclareKeyword)) { + if (hasModifier(data.node, typescript_1.default.SyntaxKind.DeclareKeyword)) { continue; } fields: for (const [name, info] of data.fields) { @@ -545,7 +548,7 @@ class Mangler { let savedBytes = 0; for (const item of service.getProgram().getSourceFiles()) { const { mapRoot, sourceRoot } = service.getProgram().getCompilerOptions(); - const projectDir = path.dirname(this.projectPath); + const projectDir = path_1.default.dirname(this.projectPath); const sourceMapRoot = mapRoot ?? (0, url_1.pathToFileURL)(sourceRoot ?? projectDir).toString(); // source maps let generator; @@ -557,7 +560,7 @@ class Mangler { } else { // source map generator - const relativeFileName = normalize(path.relative(projectDir, item.fileName)); + const relativeFileName = normalize(path_1.default.relative(projectDir, item.fileName)); const mappingsByLine = new Map(); // apply renames edits.sort((a, b) => b.offset - a.offset); @@ -596,7 +599,7 @@ class Mangler { }); } // source map generation, make sure to get mappings per line correct - generator = new source_map_1.SourceMapGenerator({ file: path.basename(item.fileName), sourceRoot: sourceMapRoot }); + generator = new source_map_1.SourceMapGenerator({ file: path_1.default.basename(item.fileName), sourceRoot: sourceMapRoot }); generator.setSourceContent(relativeFileName, item.getFullText()); for (const [, mappings] of mappingsByLine) { let lineDelta = 0; @@ -614,19 +617,19 @@ class Mangler { } service.dispose(); this.renameWorkerPool.terminate(); - this.log(`Done: ${savedBytes / 1000}kb saved, memory-usage: ${JSON.stringify(v8.getHeapStatistics())}`); + this.log(`Done: ${savedBytes / 1000}kb saved, memory-usage: ${JSON.stringify(node_v8_1.default.getHeapStatistics())}`); return result; } } exports.Mangler = Mangler; // --- ast utils function hasModifier(node, kind) { - const modifiers = ts.canHaveModifiers(node) ? ts.getModifiers(node) : undefined; + const modifiers = typescript_1.default.canHaveModifiers(node) ? typescript_1.default.getModifiers(node) : undefined; return Boolean(modifiers?.find(mode => mode.kind === kind)); } function isInAmbientContext(node) { for (let p = node.parent; p; p = p.parent) { - if (ts.isModuleDeclaration(p)) { + if (typescript_1.default.isModuleDeclaration(p)) { return true; } } @@ -636,21 +639,21 @@ function normalize(path) { return path.replace(/\\/g, '/'); } async function _run() { - const root = path.join(__dirname, '..', '..', '..'); - const projectBase = path.join(root, 'src'); - const projectPath = path.join(projectBase, 'tsconfig.json'); - const newProjectBase = path.join(path.dirname(projectBase), path.basename(projectBase) + '2'); - fs.cpSync(projectBase, newProjectBase, { recursive: true }); + const root = path_1.default.join(__dirname, '..', '..', '..'); + const projectBase = path_1.default.join(root, 'src'); + const projectPath = path_1.default.join(projectBase, 'tsconfig.json'); + const newProjectBase = path_1.default.join(path_1.default.dirname(projectBase), path_1.default.basename(projectBase) + '2'); + fs_1.default.cpSync(projectBase, newProjectBase, { recursive: true }); const mangler = new Mangler(projectPath, console.log, { mangleExports: true, manglePrivateFields: true, }); for (const [fileName, contents] of await mangler.computeNewFileContents(new Set(['saveState']))) { - const newFilePath = path.join(newProjectBase, path.relative(projectBase, fileName)); - await fs.promises.mkdir(path.dirname(newFilePath), { recursive: true }); - await fs.promises.writeFile(newFilePath, contents.out); + const newFilePath = path_1.default.join(newProjectBase, path_1.default.relative(projectBase, fileName)); + await fs_1.default.promises.mkdir(path_1.default.dirname(newFilePath), { recursive: true }); + await fs_1.default.promises.writeFile(newFilePath, contents.out); if (contents.sourceMap) { - await fs.promises.writeFile(newFilePath + '.map', contents.sourceMap); + await fs_1.default.promises.writeFile(newFilePath + '.map', contents.sourceMap); } } } diff --git a/build/lib/mangle/index.ts b/build/lib/mangle/index.ts index f291bd63f6b..a6f066e2d2a 100644 --- a/build/lib/mangle/index.ts +++ b/build/lib/mangle/index.ts @@ -3,14 +3,14 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as v8 from 'node:v8'; -import * as fs from 'fs'; -import * as path from 'path'; +import v8 from 'node:v8'; +import fs from 'fs'; +import path from 'path'; import { argv } from 'process'; import { Mapping, SourceMapGenerator } from 'source-map'; -import * as ts from 'typescript'; +import ts from 'typescript'; import { pathToFileURL } from 'url'; -import * as workerpool from 'workerpool'; +import workerpool from 'workerpool'; import { StaticLanguageServiceHost } from './staticLanguageServiceHost'; const buildfile = require('../../buildfile'); diff --git a/build/lib/mangle/renameWorker.js b/build/lib/mangle/renameWorker.js index 6cd429b8c9a..8bd59a4e2d5 100644 --- a/build/lib/mangle/renameWorker.js +++ b/build/lib/mangle/renameWorker.js @@ -3,20 +3,23 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const ts = require("typescript"); -const workerpool = require("workerpool"); +const typescript_1 = __importDefault(require("typescript")); +const workerpool_1 = __importDefault(require("workerpool")); const staticLanguageServiceHost_1 = require("./staticLanguageServiceHost"); let service; function findRenameLocations(projectPath, fileName, position) { if (!service) { - service = ts.createLanguageService(new staticLanguageServiceHost_1.StaticLanguageServiceHost(projectPath)); + service = typescript_1.default.createLanguageService(new staticLanguageServiceHost_1.StaticLanguageServiceHost(projectPath)); } return service.findRenameLocations(fileName, position, false, false, { providePrefixAndSuffixTextForRename: true, }) ?? []; } -workerpool.worker({ +workerpool_1.default.worker({ findRenameLocations }); //# sourceMappingURL=renameWorker.js.map \ No newline at end of file diff --git a/build/lib/mangle/renameWorker.ts b/build/lib/mangle/renameWorker.ts index 29b34e8c514..0cce5677593 100644 --- a/build/lib/mangle/renameWorker.ts +++ b/build/lib/mangle/renameWorker.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as ts from 'typescript'; -import * as workerpool from 'workerpool'; +import ts from 'typescript'; +import workerpool from 'workerpool'; import { StaticLanguageServiceHost } from './staticLanguageServiceHost'; let service: ts.LanguageService | undefined; diff --git a/build/lib/mangle/staticLanguageServiceHost.js b/build/lib/mangle/staticLanguageServiceHost.js index 1f338f0e61c..7777888dd06 100644 --- a/build/lib/mangle/staticLanguageServiceHost.js +++ b/build/lib/mangle/staticLanguageServiceHost.js @@ -3,10 +3,13 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.StaticLanguageServiceHost = void 0; -const ts = require("typescript"); -const path = require("path"); +const typescript_1 = __importDefault(require("typescript")); +const path_1 = __importDefault(require("path")); class StaticLanguageServiceHost { projectPath; _cmdLine; @@ -14,11 +17,11 @@ class StaticLanguageServiceHost { constructor(projectPath) { this.projectPath = projectPath; const existingOptions = {}; - const parsed = ts.readConfigFile(projectPath, ts.sys.readFile); + const parsed = typescript_1.default.readConfigFile(projectPath, typescript_1.default.sys.readFile); if (parsed.error) { throw parsed.error; } - this._cmdLine = ts.parseJsonConfigFileContent(parsed.config, ts.sys, path.dirname(projectPath), existingOptions); + this._cmdLine = typescript_1.default.parseJsonConfigFileContent(parsed.config, typescript_1.default.sys, path_1.default.dirname(projectPath), existingOptions); if (this._cmdLine.errors.length > 0) { throw parsed.error; } @@ -38,28 +41,28 @@ class StaticLanguageServiceHost { getScriptSnapshot(fileName) { let result = this._scriptSnapshots.get(fileName); if (result === undefined) { - const content = ts.sys.readFile(fileName); + const content = typescript_1.default.sys.readFile(fileName); if (content === undefined) { return undefined; } - result = ts.ScriptSnapshot.fromString(content); + result = typescript_1.default.ScriptSnapshot.fromString(content); this._scriptSnapshots.set(fileName, result); } return result; } getCurrentDirectory() { - return path.dirname(this.projectPath); + return path_1.default.dirname(this.projectPath); } getDefaultLibFileName(options) { - return ts.getDefaultLibFilePath(options); + return typescript_1.default.getDefaultLibFilePath(options); } - directoryExists = ts.sys.directoryExists; - getDirectories = ts.sys.getDirectories; - fileExists = ts.sys.fileExists; - readFile = ts.sys.readFile; - readDirectory = ts.sys.readDirectory; + directoryExists = typescript_1.default.sys.directoryExists; + getDirectories = typescript_1.default.sys.getDirectories; + fileExists = typescript_1.default.sys.fileExists; + readFile = typescript_1.default.sys.readFile; + readDirectory = typescript_1.default.sys.readDirectory; // this is necessary to make source references work. - realpath = ts.sys.realpath; + realpath = typescript_1.default.sys.realpath; } exports.StaticLanguageServiceHost = StaticLanguageServiceHost; //# sourceMappingURL=staticLanguageServiceHost.js.map \ No newline at end of file diff --git a/build/lib/mangle/staticLanguageServiceHost.ts b/build/lib/mangle/staticLanguageServiceHost.ts index c2793342ce3..b41b4e52133 100644 --- a/build/lib/mangle/staticLanguageServiceHost.ts +++ b/build/lib/mangle/staticLanguageServiceHost.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as ts from 'typescript'; -import * as path from 'path'; +import ts from 'typescript'; +import path from 'path'; export class StaticLanguageServiceHost implements ts.LanguageServiceHost { diff --git a/build/lib/monaco-api.js b/build/lib/monaco-api.js index 2052806c46b..84cc556cb62 100644 --- a/build/lib/monaco-api.js +++ b/build/lib/monaco-api.js @@ -3,21 +3,24 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.DeclarationResolver = exports.FSProvider = exports.RECIPE_PATH = void 0; exports.run3 = run3; exports.execute = execute; -const fs = require("fs"); -const path = require("path"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); const dtsv = '3'; const tsfmt = require('../../tsfmt.json'); -const SRC = path.join(__dirname, '../../src'); -exports.RECIPE_PATH = path.join(__dirname, '../monaco/monaco.d.ts.recipe'); -const DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts'); +const SRC = path_1.default.join(__dirname, '../../src'); +exports.RECIPE_PATH = path_1.default.join(__dirname, '../monaco/monaco.d.ts.recipe'); +const DECLARATION_PATH = path_1.default.join(__dirname, '../../src/vs/monaco.d.ts'); function logErr(message, ...rest) { - fancyLog(ansiColors.yellow(`[monaco.d.ts]`), message, ...rest); + (0, fancy_log_1.default)(ansi_colors_1.default.yellow(`[monaco.d.ts]`), message, ...rest); } function isDeclaration(ts, a) { return (a.kind === ts.SyntaxKind.InterfaceDeclaration @@ -464,7 +467,7 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) { }; } function _run(ts, sourceFileGetter) { - const recipe = fs.readFileSync(exports.RECIPE_PATH).toString(); + const recipe = fs_1.default.readFileSync(exports.RECIPE_PATH).toString(); const t = generateDeclarationFile(ts, recipe, sourceFileGetter); if (!t) { return null; @@ -472,7 +475,7 @@ function _run(ts, sourceFileGetter) { const result = t.result; const usageContent = t.usageContent; const enums = t.enums; - const currentContent = fs.readFileSync(DECLARATION_PATH).toString(); + const currentContent = fs_1.default.readFileSync(DECLARATION_PATH).toString(); const one = currentContent.replace(/\r\n/gm, '\n'); const other = result.replace(/\r\n/gm, '\n'); const isTheSame = (one === other); @@ -486,13 +489,13 @@ function _run(ts, sourceFileGetter) { } class FSProvider { existsSync(filePath) { - return fs.existsSync(filePath); + return fs_1.default.existsSync(filePath); } statSync(filePath) { - return fs.statSync(filePath); + return fs_1.default.statSync(filePath); } readFileSync(_moduleId, filePath) { - return fs.readFileSync(filePath); + return fs_1.default.readFileSync(filePath); } } exports.FSProvider = FSProvider; @@ -532,9 +535,9 @@ class DeclarationResolver { } _getFileName(moduleId) { if (/\.d\.ts$/.test(moduleId)) { - return path.join(SRC, moduleId); + return path_1.default.join(SRC, moduleId); } - return path.join(SRC, `${moduleId}.ts`); + return path_1.default.join(SRC, `${moduleId}.ts`); } _getDeclarationSourceFile(moduleId) { const fileName = this._getFileName(moduleId); diff --git a/build/lib/monaco-api.ts b/build/lib/monaco-api.ts index 288bec0f858..5dc9a04266c 100644 --- a/build/lib/monaco-api.ts +++ b/build/lib/monaco-api.ts @@ -3,11 +3,11 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; +import fs from 'fs'; import type * as ts from 'typescript'; -import * as path from 'path'; -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; +import path from 'path'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; const dtsv = '3'; diff --git a/build/lib/nls.js b/build/lib/nls.js index 6ddcd46167a..af648b40ed8 100644 --- a/build/lib/nls.js +++ b/build/lib/nls.js @@ -3,14 +3,50 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.nls = nls; -const lazy = require("lazy.js"); +const lazy_js_1 = __importDefault(require("lazy.js")); const event_stream_1 = require("event-stream"); -const File = require("vinyl"); -const sm = require("source-map"); -const path = require("path"); -const sort = require("gulp-sort"); +const vinyl_1 = __importDefault(require("vinyl")); +const sm = __importStar(require("source-map")); +const path = __importStar(require("path")); +const gulp_sort_1 = __importDefault(require("gulp-sort")); var CollectStepResult; (function (CollectStepResult) { CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes"; @@ -46,7 +82,7 @@ function nls(options) { let base; const input = (0, event_stream_1.through)(); const output = input - .pipe(sort()) // IMPORTANT: to ensure stable NLS metadata generation, we must sort the files because NLS messages are globally extracted and indexed across all files + .pipe((0, gulp_sort_1.default)()) // IMPORTANT: to ensure stable NLS metadata generation, we must sort the files because NLS messages are globally extracted and indexed across all files .pipe((0, event_stream_1.through)(function (f) { if (!f.sourceMap) { return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); @@ -67,7 +103,7 @@ function nls(options) { this.emit('data', _nls.patchFile(f, typescript, options)); }, function () { for (const file of [ - new File({ + new vinyl_1.default({ contents: Buffer.from(JSON.stringify({ keys: _nls.moduleToNLSKeys, messages: _nls.moduleToNLSMessages, @@ -75,17 +111,17 @@ function nls(options) { base, path: `${base}/nls.metadata.json` }), - new File({ + new vinyl_1.default({ contents: Buffer.from(JSON.stringify(_nls.allNLSMessages)), base, path: `${base}/nls.messages.json` }), - new File({ + new vinyl_1.default({ contents: Buffer.from(JSON.stringify(_nls.allNLSModulesAndKeys)), base, path: `${base}/nls.keys.json` }), - new File({ + new vinyl_1.default({ contents: Buffer.from(`/*--------------------------------------------------------- * Copyright (C) Microsoft Corporation. All rights reserved. *--------------------------------------------------------*/ @@ -111,7 +147,7 @@ var _nls; _nls.allNLSModulesAndKeys = []; let allNLSMessagesIndex = 0; function fileFrom(file, contents, path = file.path) { - return new File({ + return new vinyl_1.default({ contents: Buffer.from(contents), base: file.base, cwd: file.cwd, @@ -163,7 +199,7 @@ var _nls; const service = ts.createLanguageService(serviceHost); const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true); // all imports - const imports = lazy(collect(ts, sourceFile, n => isImportNode(ts, n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse)); + const imports = (0, lazy_js_1.default)(collect(ts, sourceFile, n => isImportNode(ts, n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse)); // import nls = require('vs/nls'); const importEqualsDeclarations = imports .filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration) @@ -188,7 +224,7 @@ var _nls; .filter(r => !r.isWriteAccess) // find the deepest call expressions AST nodes that contain those references .map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n))) - .map(a => lazy(a).last()) + .map(a => (0, lazy_js_1.default)(a).last()) .filter(n => !!n) .map(n => n) // only `localize` calls @@ -214,7 +250,7 @@ var _nls; const localizeCallExpressions = localizeReferences .concat(namedLocalizeReferences) .map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n))) - .map(a => lazy(a).last()) + .map(a => (0, lazy_js_1.default)(a).last()) .filter(n => !!n) .map(n => n); // collect everything @@ -281,14 +317,14 @@ var _nls; } } toString() { - return lazy(this.lines).zip(this.lineEndings) + return (0, lazy_js_1.default)(this.lines).zip(this.lineEndings) .flatten().toArray().join(''); } } function patchJavascript(patches, contents) { const model = new TextModel(contents); // patch the localize calls - lazy(patches).reverse().each(p => model.apply(p)); + (0, lazy_js_1.default)(patches).reverse().each(p => model.apply(p)); return model.toString(); } function patchSourcemap(patches, rsm, smc) { @@ -349,7 +385,7 @@ var _nls; const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end))); return { span: { start, end }, content: c.content }; }; - const localizePatches = lazy(localizeCalls) + const localizePatches = (0, lazy_js_1.default)(localizeCalls) .map(lc => (options.preserveEnglish ? [ { range: lc.keySpan, content: `${allNLSMessagesIndex++}` } // localize('key', "message") => localize(, "message") ] : [ @@ -358,7 +394,7 @@ var _nls; ])) .flatten() .map(toPatch); - const localize2Patches = lazy(localize2Calls) + const localize2Patches = (0, lazy_js_1.default)(localize2Calls) .map(lc => ({ range: lc.keySpan, content: `${allNLSMessagesIndex++}` } // localize2('key', "message") => localize(, "message") )) .map(toPatch); diff --git a/build/lib/nls.ts b/build/lib/nls.ts index cac832903a3..4194eb3c489 100644 --- a/build/lib/nls.ts +++ b/build/lib/nls.ts @@ -4,12 +4,12 @@ *--------------------------------------------------------------------------------------------*/ import type * as ts from 'typescript'; -import * as lazy from 'lazy.js'; +import lazy from 'lazy.js'; import { duplex, through } from 'event-stream'; -import * as File from 'vinyl'; +import File from 'vinyl'; import * as sm from 'source-map'; import * as path from 'path'; -import * as sort from 'gulp-sort'; +import sort from 'gulp-sort'; declare class FileSourceMap extends File { public sourceMap: sm.RawSourceMap; diff --git a/build/lib/node.js b/build/lib/node.js index 74a54a3c170..01a381183ff 100644 --- a/build/lib/node.js +++ b/build/lib/node.js @@ -3,16 +3,19 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fs = require("fs"); -const root = path.dirname(path.dirname(__dirname)); -const npmrcPath = path.join(root, 'remote', '.npmrc'); -const npmrc = fs.readFileSync(npmrcPath, 'utf8'); +const path_1 = __importDefault(require("path")); +const fs_1 = __importDefault(require("fs")); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); +const npmrcPath = path_1.default.join(root, 'remote', '.npmrc'); +const npmrc = fs_1.default.readFileSync(npmrcPath, 'utf8'); const version = /^target="(.*)"$/m.exec(npmrc)[1]; const platform = process.platform; const arch = process.arch; const node = platform === 'win32' ? 'node.exe' : 'node'; -const nodePath = path.join(root, '.build', 'node', `v${version}`, `${platform}-${arch}`, node); +const nodePath = path_1.default.join(root, '.build', 'node', `v${version}`, `${platform}-${arch}`, node); console.log(nodePath); //# sourceMappingURL=node.js.map \ No newline at end of file diff --git a/build/lib/node.ts b/build/lib/node.ts index 4beb13ae91b..a2fdc361aa1 100644 --- a/build/lib/node.ts +++ b/build/lib/node.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as fs from 'fs'; +import path from 'path'; +import fs from 'fs'; const root = path.dirname(path.dirname(__dirname)); const npmrcPath = path.join(root, 'remote', '.npmrc'); diff --git a/build/lib/optimize.js b/build/lib/optimize.js index 83f34dc0745..d45ff0d67d3 100644 --- a/build/lib/optimize.js +++ b/build/lib/optimize.js @@ -3,22 +3,58 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.bundleTask = bundleTask; exports.minifyTask = minifyTask; -const es = require("event-stream"); -const gulp = require("gulp"); -const filter = require("gulp-filter"); -const path = require("path"); -const fs = require("fs"); -const pump = require("pump"); -const VinylFile = require("vinyl"); -const bundle = require("./bundle"); +const es = __importStar(require("event-stream")); +const gulp = __importStar(require("gulp")); +const gulp_filter_1 = __importDefault(require("gulp-filter")); +const path = __importStar(require("path")); +const fs = __importStar(require("fs")); +const pump_1 = __importDefault(require("pump")); +const vinyl_1 = __importDefault(require("vinyl")); +const bundle = __importStar(require("./bundle")); const postcss_1 = require("./postcss"); -const esbuild = require("esbuild"); -const sourcemaps = require("gulp-sourcemaps"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); +const esbuild = __importStar(require("esbuild")); +const sourcemaps = __importStar(require("gulp-sourcemaps")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansiColors = __importStar(require("ansi-colors")); const REPO_ROOT_PATH = path.join(__dirname, '../..'); const DEFAULT_FILE_HEADER = [ '/*!--------------------------------------------------------', @@ -44,7 +80,7 @@ function bundleESMTask(opts) { const files = []; const tasks = []; for (const entryPoint of entryPoints) { - fancyLog(`Bundled entry point: ${ansiColors.yellow(entryPoint.name)}...`); + (0, fancy_log_1.default)(`Bundled entry point: ${ansiColors.yellow(entryPoint.name)}...`); // support for 'dest' via esbuild#in/out const dest = entryPoint.dest?.replace(/\.[^/.]+$/, '') ?? entryPoint.name; // banner contents @@ -128,7 +164,7 @@ function bundleESMTask(opts) { path: file.path, base: path.join(REPO_ROOT_PATH, opts.src) }; - files.push(new VinylFile(fileProps)); + files.push(new vinyl_1.default(fileProps)); } }); tasks.push(task); @@ -160,10 +196,10 @@ function minifyTask(src, sourceMapBaseUrl) { return cb => { const cssnano = require('cssnano'); const svgmin = require('gulp-svgmin'); - const jsFilter = filter('**/*.js', { restore: true }); - const cssFilter = filter('**/*.css', { restore: true }); - const svgFilter = filter('**/*.svg', { restore: true }); - pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), es.map((f, cb) => { + const jsFilter = (0, gulp_filter_1.default)('**/*.js', { restore: true }); + const cssFilter = (0, gulp_filter_1.default)('**/*.css', { restore: true }); + const svgFilter = (0, gulp_filter_1.default)('**/*.svg', { restore: true }); + (0, pump_1.default)(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), es.map((f, cb) => { esbuild.build({ entryPoints: [f.path], minify: true, diff --git a/build/lib/optimize.ts b/build/lib/optimize.ts index 8c49fa81888..55566d4f241 100644 --- a/build/lib/optimize.ts +++ b/build/lib/optimize.ts @@ -5,16 +5,16 @@ import * as es from 'event-stream'; import * as gulp from 'gulp'; -import * as filter from 'gulp-filter'; +import filter from 'gulp-filter'; import * as path from 'path'; import * as fs from 'fs'; -import * as pump from 'pump'; -import * as VinylFile from 'vinyl'; +import pump from 'pump'; +import VinylFile from 'vinyl'; import * as bundle from './bundle'; import { gulpPostcss } from './postcss'; import * as esbuild from 'esbuild'; import * as sourcemaps from 'gulp-sourcemaps'; -import * as fancyLog from 'fancy-log'; +import fancyLog from 'fancy-log'; import * as ansiColors from 'ansi-colors'; const REPO_ROOT_PATH = path.join(__dirname, '../..'); diff --git a/build/lib/policies.js b/build/lib/policies.js index 1560dc7415d..d52015c550b 100644 --- a/build/lib/policies.js +++ b/build/lib/policies.js @@ -3,13 +3,16 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); const child_process_1 = require("child_process"); const fs_1 = require("fs"); -const path = require("path"); -const byline = require("byline"); +const path_1 = __importDefault(require("path")); +const byline_1 = __importDefault(require("byline")); const ripgrep_1 = require("@vscode/ripgrep"); -const Parser = require("tree-sitter"); +const tree_sitter_1 = __importDefault(require("tree-sitter")); const { typescript } = require('tree-sitter-typescript'); const product = require('../../product.json'); const packageJson = require('../../package.json'); @@ -258,7 +261,7 @@ const StringArrayQ = { } }; function getProperty(qtype, node, key) { - const query = new Parser.Query(typescript, `( + const query = new tree_sitter_1.default.Query(typescript, `( (pair key: [(property_identifier)(string)] @key value: ${qtype.Q} @@ -331,7 +334,7 @@ function getPolicy(moduleName, configurationNode, settingNode, policyNode, categ return result; } function getPolicies(moduleName, node) { - const query = new Parser.Query(typescript, ` + const query = new tree_sitter_1.default.Query(typescript, ` ( (call_expression function: (member_expression property: (property_identifier) @registerConfigurationFn) (#eq? @registerConfigurationFn registerConfiguration) @@ -360,7 +363,7 @@ async function getFiles(root) { return new Promise((c, e) => { const result = []; const rg = (0, child_process_1.spawn)(ripgrep_1.rgPath, ['-l', 'registerConfiguration\\(', '-g', 'src/**/*.ts', '-g', '!src/**/test/**', root]); - const stream = byline(rg.stdout.setEncoding('utf8')); + const stream = (0, byline_1.default)(rg.stdout.setEncoding('utf8')); stream.on('data', path => result.push(path)); stream.on('error', err => e(err)); stream.on('end', () => c(result)); @@ -494,13 +497,13 @@ async function getNLS(extensionGalleryServiceUrl, resourceUrlTemplate, languageI return await getSpecificNLS(resourceUrlTemplate, languageId, latestCompatibleVersion); } async function parsePolicies() { - const parser = new Parser(); + const parser = new tree_sitter_1.default(); parser.setLanguage(typescript); const files = await getFiles(process.cwd()); - const base = path.join(process.cwd(), 'src'); + const base = path_1.default.join(process.cwd(), 'src'); const policies = []; for (const file of files) { - const moduleName = path.relative(base, file).replace(/\.ts$/i, '').replace(/\\/g, '/'); + const moduleName = path_1.default.relative(base, file).replace(/\.ts$/i, '').replace(/\\/g, '/'); const contents = await fs_1.promises.readFile(file, { encoding: 'utf8' }); const tree = parser.parse(contents); policies.push(...getPolicies(moduleName, tree.rootNode)); @@ -529,11 +532,11 @@ async function main() { const root = '.build/policies/win32'; await fs_1.promises.rm(root, { recursive: true, force: true }); await fs_1.promises.mkdir(root, { recursive: true }); - await fs_1.promises.writeFile(path.join(root, `${product.win32RegValueName}.admx`), admx.replace(/\r?\n/g, '\n')); + await fs_1.promises.writeFile(path_1.default.join(root, `${product.win32RegValueName}.admx`), admx.replace(/\r?\n/g, '\n')); for (const { languageId, contents } of adml) { - const languagePath = path.join(root, languageId === 'en-us' ? 'en-us' : Languages[languageId]); + const languagePath = path_1.default.join(root, languageId === 'en-us' ? 'en-us' : Languages[languageId]); await fs_1.promises.mkdir(languagePath, { recursive: true }); - await fs_1.promises.writeFile(path.join(languagePath, `${product.win32RegValueName}.adml`), contents.replace(/\r?\n/g, '\n')); + await fs_1.promises.writeFile(path_1.default.join(languagePath, `${product.win32RegValueName}.adml`), contents.replace(/\r?\n/g, '\n')); } } if (require.main === module) { diff --git a/build/lib/policies.ts b/build/lib/policies.ts index f602c8a0d6e..57941d8e967 100644 --- a/build/lib/policies.ts +++ b/build/lib/policies.ts @@ -5,10 +5,10 @@ import { spawn } from 'child_process'; import { promises as fs } from 'fs'; -import * as path from 'path'; -import * as byline from 'byline'; +import path from 'path'; +import byline from 'byline'; import { rgPath } from '@vscode/ripgrep'; -import * as Parser from 'tree-sitter'; +import Parser from 'tree-sitter'; const { typescript } = require('tree-sitter-typescript'); const product = require('../../product.json'); const packageJson = require('../../package.json'); diff --git a/build/lib/postcss.js b/build/lib/postcss.js index 356015ab159..210a184e5f5 100644 --- a/build/lib/postcss.js +++ b/build/lib/postcss.js @@ -1,15 +1,18 @@ "use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.gulpPostcss = gulpPostcss; /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -const postcss = require("postcss"); -const es = require("event-stream"); +const postcss_1 = __importDefault(require("postcss")); +const event_stream_1 = __importDefault(require("event-stream")); function gulpPostcss(plugins, handleError) { - const instance = postcss(plugins); - return es.map((file, callback) => { + const instance = (0, postcss_1.default)(plugins); + return event_stream_1.default.map((file, callback) => { if (file.isNull()) { return callback(null, file); } diff --git a/build/lib/postcss.ts b/build/lib/postcss.ts index cf3121e221e..9ec2188d13a 100644 --- a/build/lib/postcss.ts +++ b/build/lib/postcss.ts @@ -2,9 +2,9 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as postcss from 'postcss'; -import * as File from 'vinyl'; -import * as es from 'event-stream'; +import postcss from 'postcss'; +import File from 'vinyl'; +import es from 'event-stream'; export function gulpPostcss(plugins: postcss.AcceptedPlugin[], handleError?: (err: Error) => void) { const instance = postcss(plugins); diff --git a/build/lib/preLaunch.js b/build/lib/preLaunch.js index 4791514fdfe..75207fe50c0 100644 --- a/build/lib/preLaunch.js +++ b/build/lib/preLaunch.js @@ -3,13 +3,16 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); // @ts-check -const path = require("path"); +const path_1 = __importDefault(require("path")); const child_process_1 = require("child_process"); const fs_1 = require("fs"); const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm'; -const rootDir = path.resolve(__dirname, '..', '..'); +const rootDir = path_1.default.resolve(__dirname, '..', '..'); function runProcess(command, args = []) { return new Promise((resolve, reject) => { const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env, shell: process.platform === 'win32' }); @@ -19,7 +22,7 @@ function runProcess(command, args = []) { } async function exists(subdir) { try { - await fs_1.promises.stat(path.join(rootDir, subdir)); + await fs_1.promises.stat(path_1.default.join(rootDir, subdir)); return true; } catch { diff --git a/build/lib/preLaunch.ts b/build/lib/preLaunch.ts index e0ea274458a..0c178afcb59 100644 --- a/build/lib/preLaunch.ts +++ b/build/lib/preLaunch.ts @@ -5,7 +5,7 @@ // @ts-check -import * as path from 'path'; +import path from 'path'; import { spawn } from 'child_process'; import { promises as fs } from 'fs'; diff --git a/build/lib/reporter.js b/build/lib/reporter.js index 9d4a1b4fd79..16bb44ec539 100644 --- a/build/lib/reporter.js +++ b/build/lib/reporter.js @@ -3,13 +3,16 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.createReporter = createReporter; -const es = require("event-stream"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); -const fs = require("fs"); -const path = require("path"); +const event_stream_1 = __importDefault(require("event-stream")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); class ErrorLog { id; constructor(id) { @@ -23,7 +26,7 @@ class ErrorLog { return; } this.startTime = new Date().getTime(); - fancyLog(`Starting ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''}...`); + (0, fancy_log_1.default)(`Starting ${ansi_colors_1.default.green('compilation')}${this.id ? ansi_colors_1.default.blue(` ${this.id}`) : ''}...`); } onEnd() { if (--this.count > 0) { @@ -37,10 +40,10 @@ class ErrorLog { errors.map(err => { if (!seen.has(err)) { seen.add(err); - fancyLog(`${ansiColors.red('Error')}: ${err}`); + (0, fancy_log_1.default)(`${ansi_colors_1.default.red('Error')}: ${err}`); } }); - fancyLog(`Finished ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - this.startTime) + ' ms')}`); + (0, fancy_log_1.default)(`Finished ${ansi_colors_1.default.green('compilation')}${this.id ? ansi_colors_1.default.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansi_colors_1.default.magenta((new Date().getTime() - this.startTime) + ' ms')}`); const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/s; const messages = errors .map(err => regex.exec(err)) @@ -49,7 +52,7 @@ class ErrorLog { .map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message })); try { const logFileName = 'log' + (this.id ? `_${this.id}` : ''); - fs.writeFileSync(path.join(buildLogFolder, logFileName), JSON.stringify(messages)); + fs_1.default.writeFileSync(path_1.default.join(buildLogFolder, logFileName), JSON.stringify(messages)); } catch (err) { //noop @@ -65,9 +68,9 @@ function getErrorLog(id = '') { } return errorLog; } -const buildLogFolder = path.join(path.dirname(path.dirname(__dirname)), '.build'); +const buildLogFolder = path_1.default.join(path_1.default.dirname(path_1.default.dirname(__dirname)), '.build'); try { - fs.mkdirSync(buildLogFolder); + fs_1.default.mkdirSync(buildLogFolder); } catch (err) { // ignore @@ -81,7 +84,7 @@ function createReporter(id) { result.end = (emitError) => { errors.length = 0; errorLog.onStart(); - return es.through(undefined, function () { + return event_stream_1.default.through(undefined, function () { errorLog.onEnd(); if (emitError && errors.length > 0) { if (!errors.__logged__) { diff --git a/build/lib/reporter.ts b/build/lib/reporter.ts index 382e0c78546..c21fd841c0d 100644 --- a/build/lib/reporter.ts +++ b/build/lib/reporter.ts @@ -3,11 +3,11 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as es from 'event-stream'; -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; -import * as fs from 'fs'; -import * as path from 'path'; +import es from 'event-stream'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; +import fs from 'fs'; +import path from 'path'; class ErrorLog { constructor(public id: string) { diff --git a/build/lib/snapshotLoader.js b/build/lib/snapshotLoader.js index 0e58ceedffa..7d9b3f154f1 100644 --- a/build/lib/snapshotLoader.js +++ b/build/lib/snapshotLoader.js @@ -3,6 +3,8 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.snaps = void 0; var snaps; (function (snaps) { const fs = require('fs'); @@ -52,5 +54,5 @@ var snaps; fs.writeFileSync(wrappedInputFilepath, wrappedInputFile); cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]); } -})(snaps || (snaps = {})); +})(snaps || (exports.snaps = snaps = {})); //# sourceMappingURL=snapshotLoader.js.map \ No newline at end of file diff --git a/build/lib/snapshotLoader.ts b/build/lib/snapshotLoader.ts index c3d66dba7e1..3cb2191144d 100644 --- a/build/lib/snapshotLoader.ts +++ b/build/lib/snapshotLoader.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -namespace snaps { +export namespace snaps { const fs = require('fs'); const path = require('path'); diff --git a/build/lib/standalone.js b/build/lib/standalone.js index 16ae1e2b2d8..0e7a9ecc782 100644 --- a/build/lib/standalone.js +++ b/build/lib/standalone.js @@ -3,14 +3,50 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.extractEditor = extractEditor; exports.createESMSourcesAndResources2 = createESMSourcesAndResources2; -const fs = require("fs"); -const path = require("path"); -const tss = require("./treeshaking"); -const REPO_ROOT = path.join(__dirname, '../../'); -const SRC_DIR = path.join(REPO_ROOT, 'src'); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const tss = __importStar(require("./treeshaking")); +const REPO_ROOT = path_1.default.join(__dirname, '../../'); +const SRC_DIR = path_1.default.join(REPO_ROOT, 'src'); const dirCache = {}; function writeFile(filePath, contents) { function ensureDirs(dirPath) { @@ -18,21 +54,21 @@ function writeFile(filePath, contents) { return; } dirCache[dirPath] = true; - ensureDirs(path.dirname(dirPath)); - if (fs.existsSync(dirPath)) { + ensureDirs(path_1.default.dirname(dirPath)); + if (fs_1.default.existsSync(dirPath)) { return; } - fs.mkdirSync(dirPath); + fs_1.default.mkdirSync(dirPath); } - ensureDirs(path.dirname(filePath)); - fs.writeFileSync(filePath, contents); + ensureDirs(path_1.default.dirname(filePath)); + fs_1.default.writeFileSync(filePath, contents); } function extractEditor(options) { const ts = require('typescript'); - const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString()); + const tsConfig = JSON.parse(fs_1.default.readFileSync(path_1.default.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString()); let compilerOptions; if (tsConfig.extends) { - compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions); + compilerOptions = Object.assign({}, require(path_1.default.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions); delete tsConfig.extends; } else { @@ -62,7 +98,7 @@ function extractEditor(options) { const result = tss.shake(options); for (const fileName in result) { if (result.hasOwnProperty(fileName)) { - writeFile(path.join(options.destRoot, fileName), result[fileName]); + writeFile(path_1.default.join(options.destRoot, fileName), result[fileName]); } } const copied = {}; @@ -71,12 +107,12 @@ function extractEditor(options) { return; } copied[fileName] = true; - const srcPath = path.join(options.sourcesRoot, fileName); - const dstPath = path.join(options.destRoot, fileName); - writeFile(dstPath, fs.readFileSync(srcPath)); + const srcPath = path_1.default.join(options.sourcesRoot, fileName); + const dstPath = path_1.default.join(options.destRoot, fileName); + writeFile(dstPath, fs_1.default.readFileSync(srcPath)); }; const writeOutputFile = (fileName, contents) => { - writeFile(path.join(options.destRoot, fileName), contents); + writeFile(path_1.default.join(options.destRoot, fileName), contents); }; for (const fileName in result) { if (result.hasOwnProperty(fileName)) { @@ -86,14 +122,14 @@ function extractEditor(options) { const importedFileName = info.importedFiles[i].fileName; let importedFilePath = importedFileName; if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) { - importedFilePath = path.join(path.dirname(fileName), importedFilePath); + importedFilePath = path_1.default.join(path_1.default.dirname(fileName), importedFilePath); } if (/\.css$/.test(importedFilePath)) { transportCSS(importedFilePath, copyFile, writeOutputFile); } else { - const pathToCopy = path.join(options.sourcesRoot, importedFilePath); - if (fs.existsSync(pathToCopy) && !fs.statSync(pathToCopy).isDirectory()) { + const pathToCopy = path_1.default.join(options.sourcesRoot, importedFilePath); + if (fs_1.default.existsSync(pathToCopy) && !fs_1.default.statSync(pathToCopy).isDirectory()) { copyFile(importedFilePath); } } @@ -107,18 +143,18 @@ function extractEditor(options) { ].forEach(copyFile); } function createESMSourcesAndResources2(options) { - const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder); - const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder); - const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder); + const SRC_FOLDER = path_1.default.join(REPO_ROOT, options.srcFolder); + const OUT_FOLDER = path_1.default.join(REPO_ROOT, options.outFolder); + const OUT_RESOURCES_FOLDER = path_1.default.join(REPO_ROOT, options.outResourcesFolder); const getDestAbsoluteFilePath = (file) => { const dest = options.renames[file.replace(/\\/g, '/')] || file; if (dest === 'tsconfig.json') { - return path.join(OUT_FOLDER, `tsconfig.json`); + return path_1.default.join(OUT_FOLDER, `tsconfig.json`); } if (/\.ts$/.test(dest)) { - return path.join(OUT_FOLDER, dest); + return path_1.default.join(OUT_FOLDER, dest); } - return path.join(OUT_RESOURCES_FOLDER, dest); + return path_1.default.join(OUT_RESOURCES_FOLDER, dest); }; const allFiles = walkDirRecursive(SRC_FOLDER); for (const file of allFiles) { @@ -126,15 +162,15 @@ function createESMSourcesAndResources2(options) { continue; } if (file === 'tsconfig.json') { - const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString()); + const tsConfig = JSON.parse(fs_1.default.readFileSync(path_1.default.join(SRC_FOLDER, file)).toString()); tsConfig.compilerOptions.module = 'es2022'; - tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/'); + tsConfig.compilerOptions.outDir = path_1.default.join(path_1.default.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/'); write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t')); continue; } if (/\.ts$/.test(file) || /\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) { // Transport the files directly - write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file))); + write(getDestAbsoluteFilePath(file), fs_1.default.readFileSync(path_1.default.join(SRC_FOLDER, file))); continue; } console.log(`UNKNOWN FILE: ${file}`); @@ -148,10 +184,10 @@ function createESMSourcesAndResources2(options) { return result; } function _walkDirRecursive(dir, result, trimPos) { - const files = fs.readdirSync(dir); + const files = fs_1.default.readdirSync(dir); for (let i = 0; i < files.length; i++) { - const file = path.join(dir, files[i]); - if (fs.statSync(file).isDirectory()) { + const file = path_1.default.join(dir, files[i]); + if (fs_1.default.statSync(file).isDirectory()) { _walkDirRecursive(file, result, trimPos); } else { @@ -206,8 +242,8 @@ function transportCSS(module, enqueue, write) { if (!/\.css/.test(module)) { return false; } - const filename = path.join(SRC_DIR, module); - const fileContents = fs.readFileSync(filename).toString(); + const filename = path_1.default.join(SRC_DIR, module); + const fileContents = fs_1.default.readFileSync(filename).toString(); const inlineResources = 'base64'; // see https://github.com/microsoft/monaco-editor/issues/148 const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64'); write(module, newContents); @@ -217,12 +253,12 @@ function transportCSS(module, enqueue, write) { const fontMatch = url.match(/^(.*).ttf\?(.*)$/); if (fontMatch) { const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter - const fontPath = path.join(path.dirname(module), relativeFontPath); + const fontPath = path_1.default.join(path_1.default.dirname(module), relativeFontPath); enqueue(fontPath); return relativeFontPath; } - const imagePath = path.join(path.dirname(module), url); - const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath)); + const imagePath = path_1.default.join(path_1.default.dirname(module), url); + const fileContents = fs_1.default.readFileSync(path_1.default.join(SRC_DIR, imagePath)); const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png'; let DATA = ';base64,' + fileContents.toString('base64'); if (!forceBase64 && /\.svg$/.test(url)) { diff --git a/build/lib/standalone.ts b/build/lib/standalone.ts index 8736583fb09..b2ae02f1007 100644 --- a/build/lib/standalone.ts +++ b/build/lib/standalone.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; +import fs from 'fs'; +import path from 'path'; import * as tss from './treeshaking'; const REPO_ROOT = path.join(__dirname, '../../'); diff --git a/build/lib/stats.js b/build/lib/stats.js index e089cb0c1b4..3f6d953ae40 100644 --- a/build/lib/stats.js +++ b/build/lib/stats.js @@ -3,11 +3,14 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.createStatsStream = createStatsStream; -const es = require("event-stream"); -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); +const event_stream_1 = __importDefault(require("event-stream")); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); class Entry { name; totalCount; @@ -28,13 +31,13 @@ class Entry { } else { if (this.totalCount === 1) { - return `Stats for '${ansiColors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`; + return `Stats for '${ansi_colors_1.default.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`; } else { const count = this.totalCount < 100 - ? ansiColors.green(this.totalCount.toString()) - : ansiColors.red(this.totalCount.toString()); - return `Stats for '${ansiColors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`; + ? ansi_colors_1.default.green(this.totalCount.toString()) + : ansi_colors_1.default.red(this.totalCount.toString()); + return `Stats for '${ansi_colors_1.default.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`; } } } @@ -43,7 +46,7 @@ const _entries = new Map(); function createStatsStream(group, log) { const entry = new Entry(group, 0, 0); _entries.set(entry.name, entry); - return es.through(function (data) { + return event_stream_1.default.through(function (data) { const file = data; if (typeof file.path === 'string') { entry.totalCount += 1; @@ -61,13 +64,13 @@ function createStatsStream(group, log) { }, function () { if (log) { if (entry.totalCount === 1) { - fancyLog(`Stats for '${ansiColors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`); + (0, fancy_log_1.default)(`Stats for '${ansi_colors_1.default.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`); } else { const count = entry.totalCount < 100 - ? ansiColors.green(entry.totalCount.toString()) - : ansiColors.red(entry.totalCount.toString()); - fancyLog(`Stats for '${ansiColors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`); + ? ansi_colors_1.default.green(entry.totalCount.toString()) + : ansi_colors_1.default.red(entry.totalCount.toString()); + (0, fancy_log_1.default)(`Stats for '${ansi_colors_1.default.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`); } } this.emit('end'); diff --git a/build/lib/stats.ts b/build/lib/stats.ts index fe4b22453b5..8db55d3e777 100644 --- a/build/lib/stats.ts +++ b/build/lib/stats.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as es from 'event-stream'; -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; -import * as File from 'vinyl'; +import es from 'event-stream'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; +import File from 'vinyl'; class Entry { constructor(readonly name: string, public totalCount: number, public totalSize: number) { } diff --git a/build/lib/stylelint/validateVariableNames.js b/build/lib/stylelint/validateVariableNames.js index 6a50d1d6894..b0e064e7b56 100644 --- a/build/lib/stylelint/validateVariableNames.js +++ b/build/lib/stylelint/validateVariableNames.js @@ -3,15 +3,18 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.getVariableNameValidator = getVariableNameValidator; const fs_1 = require("fs"); -const path = require("path"); +const path_1 = __importDefault(require("path")); const RE_VAR_PROP = /var\(\s*(--([\w\-\.]+))/g; let knownVariables; function getKnownVariableNames() { if (!knownVariables) { - const knownVariablesFileContent = (0, fs_1.readFileSync)(path.join(__dirname, './vscode-known-variables.json'), 'utf8').toString(); + const knownVariablesFileContent = (0, fs_1.readFileSync)(path_1.default.join(__dirname, './vscode-known-variables.json'), 'utf8').toString(); const knownVariablesInfo = JSON.parse(knownVariablesFileContent); knownVariables = new Set([...knownVariablesInfo.colors, ...knownVariablesInfo.others]); } diff --git a/build/lib/stylelint/validateVariableNames.ts b/build/lib/stylelint/validateVariableNames.ts index 6d9fa8a7cef..b28aed13f4b 100644 --- a/build/lib/stylelint/validateVariableNames.ts +++ b/build/lib/stylelint/validateVariableNames.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { readFileSync } from 'fs'; -import path = require('path'); +import path from 'path'; const RE_VAR_PROP = /var\(\s*(--([\w\-\.]+))/g; diff --git a/build/lib/task.js b/build/lib/task.js index 597b2a0d397..6887714681a 100644 --- a/build/lib/task.js +++ b/build/lib/task.js @@ -3,12 +3,15 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.series = series; exports.parallel = parallel; exports.define = define; -const fancyLog = require("fancy-log"); -const ansiColors = require("ansi-colors"); +const fancy_log_1 = __importDefault(require("fancy-log")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); function _isPromise(p) { if (typeof p.then === 'function') { return true; @@ -21,14 +24,14 @@ function _renderTime(time) { async function _execute(task) { const name = task.taskName || task.displayName || ``; if (!task._tasks) { - fancyLog('Starting', ansiColors.cyan(name), '...'); + (0, fancy_log_1.default)('Starting', ansi_colors_1.default.cyan(name), '...'); } const startTime = process.hrtime(); await _doExecute(task); const elapsedArr = process.hrtime(startTime); const elapsedNanoseconds = (elapsedArr[0] * 1e9 + elapsedArr[1]); if (!task._tasks) { - fancyLog(`Finished`, ansiColors.cyan(name), 'after', ansiColors.magenta(_renderTime(elapsedNanoseconds / 1e6))); + (0, fancy_log_1.default)(`Finished`, ansi_colors_1.default.cyan(name), 'after', ansi_colors_1.default.magenta(_renderTime(elapsedNanoseconds / 1e6))); } } async function _doExecute(task) { diff --git a/build/lib/task.ts b/build/lib/task.ts index 7d2a4dee016..6af23983178 100644 --- a/build/lib/task.ts +++ b/build/lib/task.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fancyLog from 'fancy-log'; -import * as ansiColors from 'ansi-colors'; +import fancyLog from 'fancy-log'; +import ansiColors from 'ansi-colors'; export interface BaseTask { displayName?: string; diff --git a/build/lib/test/i18n.test.js b/build/lib/test/i18n.test.js index b8f4a2bedef..41aa8a7f668 100644 --- a/build/lib/test/i18n.test.js +++ b/build/lib/test/i18n.test.js @@ -3,9 +3,45 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const i18n = require("../i18n"); +const assert_1 = __importDefault(require("assert")); +const i18n = __importStar(require("../i18n")); suite('XLF Parser Tests', () => { const sampleXlf = 'Key #1Key #2 &'; const sampleTranslatedXlf = 'Key #1Кнопка #1Key #2 &Кнопка #2 &'; @@ -17,25 +53,25 @@ suite('XLF Parser Tests', () => { const xlf = new i18n.XLF('vscode-workbench'); xlf.addFile(name, keys, messages); const xlfString = xlf.toString(); - assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf); + assert_1.default.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf); }); test('XLF to keys & messages conversion', () => { i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) { - assert.deepStrictEqual(resolvedFiles[0].messages, translatedMessages); - assert.strictEqual(resolvedFiles[0].name, name); + assert_1.default.deepStrictEqual(resolvedFiles[0].messages, translatedMessages); + assert_1.default.strictEqual(resolvedFiles[0].name, name); }); }); test('JSON file source path to Transifex resource match', () => { const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench'; const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/contrib/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/textfile', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject }; - assert.deepStrictEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform); - assert.deepStrictEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib); - assert.deepStrictEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor); - assert.deepStrictEqual(i18n.getResource('vs/base/common/errorMessage'), base); - assert.deepStrictEqual(i18n.getResource('vs/code/electron-main/window'), code); - assert.deepStrictEqual(i18n.getResource('vs/workbench/contrib/html/browser/webview'), workbenchParts); - assert.deepStrictEqual(i18n.getResource('vs/workbench/services/textfile/node/testFileService'), workbenchServices); - assert.deepStrictEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench); + assert_1.default.deepStrictEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform); + assert_1.default.deepStrictEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib); + assert_1.default.deepStrictEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor); + assert_1.default.deepStrictEqual(i18n.getResource('vs/base/common/errorMessage'), base); + assert_1.default.deepStrictEqual(i18n.getResource('vs/code/electron-main/window'), code); + assert_1.default.deepStrictEqual(i18n.getResource('vs/workbench/contrib/html/browser/webview'), workbenchParts); + assert_1.default.deepStrictEqual(i18n.getResource('vs/workbench/services/textfile/node/testFileService'), workbenchServices); + assert_1.default.deepStrictEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench); }); }); //# sourceMappingURL=i18n.test.js.map \ No newline at end of file diff --git a/build/lib/test/i18n.test.ts b/build/lib/test/i18n.test.ts index b8a68323dd7..4e4545548b8 100644 --- a/build/lib/test/i18n.test.ts +++ b/build/lib/test/i18n.test.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import assert = require('assert'); -import i18n = require('../i18n'); +import assert from 'assert'; +import * as i18n from '../i18n'; suite('XLF Parser Tests', () => { const sampleXlf = 'Key #1Key #2 &'; diff --git a/build/lib/treeshaking.js b/build/lib/treeshaking.js index af06f4e3ec5..d51eee91f1e 100644 --- a/build/lib/treeshaking.js +++ b/build/lib/treeshaking.js @@ -3,13 +3,16 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.ShakeLevel = void 0; exports.toStringShakeLevel = toStringShakeLevel; exports.shake = shake; -const fs = require("fs"); -const path = require("path"); -const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts')); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const TYPESCRIPT_LIB_FOLDER = path_1.default.dirname(require.resolve('typescript/lib/lib.d.ts')); var ShakeLevel; (function (ShakeLevel) { ShakeLevel[ShakeLevel["Files"] = 0] = "Files"; @@ -30,7 +33,7 @@ function printDiagnostics(options, diagnostics) { for (const diag of diagnostics) { let result = ''; if (diag.file) { - result += `${path.join(options.sourcesRoot, diag.file.fileName)}`; + result += `${path_1.default.join(options.sourcesRoot, diag.file.fileName)}`; } if (diag.file && diag.start) { const location = diag.file.getLineAndCharacterOfPosition(diag.start); @@ -72,8 +75,8 @@ function createTypeScriptLanguageService(ts, options) { }); // Add additional typings options.typings.forEach((typing) => { - const filePath = path.join(options.sourcesRoot, typing); - FILES[typing] = fs.readFileSync(filePath).toString(); + const filePath = path_1.default.join(options.sourcesRoot, typing); + FILES[typing] = fs_1.default.readFileSync(filePath).toString(); }); // Resolve libs const RESOLVED_LIBS = processLibFiles(ts, options); @@ -104,19 +107,19 @@ function discoverAndReadFiles(ts, options) { if (options.redirects[moduleId]) { redirectedModuleId = options.redirects[moduleId]; } - const dts_filename = path.join(options.sourcesRoot, redirectedModuleId + '.d.ts'); - if (fs.existsSync(dts_filename)) { - const dts_filecontents = fs.readFileSync(dts_filename).toString(); + const dts_filename = path_1.default.join(options.sourcesRoot, redirectedModuleId + '.d.ts'); + if (fs_1.default.existsSync(dts_filename)) { + const dts_filecontents = fs_1.default.readFileSync(dts_filename).toString(); FILES[`${moduleId}.d.ts`] = dts_filecontents; continue; } - const js_filename = path.join(options.sourcesRoot, redirectedModuleId + '.js'); - if (fs.existsSync(js_filename)) { + const js_filename = path_1.default.join(options.sourcesRoot, redirectedModuleId + '.js'); + if (fs_1.default.existsSync(js_filename)) { // This is an import for a .js file, so ignore it... continue; } - const ts_filename = path.join(options.sourcesRoot, redirectedModuleId + '.ts'); - const ts_filecontents = fs.readFileSync(ts_filename).toString(); + const ts_filename = path_1.default.join(options.sourcesRoot, redirectedModuleId + '.ts'); + const ts_filecontents = fs_1.default.readFileSync(ts_filename).toString(); const info = ts.preProcessFile(ts_filecontents); for (let i = info.importedFiles.length - 1; i >= 0; i--) { const importedFileName = info.importedFiles[i].fileName; @@ -126,7 +129,7 @@ function discoverAndReadFiles(ts, options) { } let importedModuleId = importedFileName; if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) { - importedModuleId = path.join(path.dirname(moduleId), importedModuleId); + importedModuleId = path_1.default.join(path_1.default.dirname(moduleId), importedModuleId); if (importedModuleId.endsWith('.js')) { // ESM: code imports require to be relative and have a '.js' file extension importedModuleId = importedModuleId.substr(0, importedModuleId.length - 3); } @@ -148,8 +151,8 @@ function processLibFiles(ts, options) { const key = `defaultLib:${filename}`; if (!result[key]) { // add this file - const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename); - const sourceText = fs.readFileSync(filepath).toString(); + const filepath = path_1.default.join(TYPESCRIPT_LIB_FOLDER, filename); + const sourceText = fs_1.default.readFileSync(filepath).toString(); result[key] = sourceText; // precess dependencies and "recurse" const info = ts.preProcessFile(sourceText); @@ -459,7 +462,7 @@ function markNodes(ts, languageService, options) { if (importText.endsWith('.js')) { // ESM: code imports require to be relative and to have a '.js' file extension importText = importText.substr(0, importText.length - 3); } - fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts'; + fullPath = path_1.default.join(path_1.default.dirname(nodeSourceFile.fileName), importText) + '.ts'; } else { fullPath = importText + '.ts'; diff --git a/build/lib/treeshaking.ts b/build/lib/treeshaking.ts index cd17c5f0278..ac71bb205da 100644 --- a/build/lib/treeshaking.ts +++ b/build/lib/treeshaking.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; +import fs from 'fs'; +import path from 'path'; import type * as ts from 'typescript'; const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts')); diff --git a/build/lib/tsb/builder.js b/build/lib/tsb/builder.js index e7a2519d1c9..f720699680d 100644 --- a/build/lib/tsb/builder.js +++ b/build/lib/tsb/builder.js @@ -3,16 +3,52 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.CancellationToken = void 0; exports.createTypeScriptBuilder = createTypeScriptBuilder; -const fs = require("fs"); -const path = require("path"); -const crypto = require("crypto"); -const utils = require("./utils"); -const colors = require("ansi-colors"); -const ts = require("typescript"); -const Vinyl = require("vinyl"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const crypto_1 = __importDefault(require("crypto")); +const utils = __importStar(require("./utils")); +const ansi_colors_1 = __importDefault(require("ansi-colors")); +const typescript_1 = __importDefault(require("typescript")); +const vinyl_1 = __importDefault(require("vinyl")); const source_map_1 = require("source-map"); var CancellationToken; (function (CancellationToken) { @@ -28,7 +64,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) { const host = new LanguageServiceHost(cmd, projectFile, _log); const outHost = new LanguageServiceHost({ ...cmd, options: { ...cmd.options, sourceRoot: cmd.options.outDir } }, cmd.options.outDir ?? '', _log); let lastCycleCheckVersion; - const service = ts.createLanguageService(host, ts.createDocumentRegistry()); + const service = typescript_1.default.createLanguageService(host, typescript_1.default.createDocumentRegistry()); const lastBuildVersion = Object.create(null); const lastDtsHash = Object.create(null); const userWantsDeclarations = cmd.options.declaration; @@ -92,7 +128,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) { if (/\.d\.ts$/.test(fileName)) { // if it's already a d.ts file just emit it signature const snapshot = host.getScriptSnapshot(fileName); - const signature = crypto.createHash('sha256') + const signature = crypto_1.default.createHash('sha256') .update(snapshot.getText(0, snapshot.getLength())) .digest('base64'); return resolve({ @@ -109,7 +145,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) { continue; } if (/\.d\.ts$/.test(file.name)) { - signature = crypto.createHash('sha256') + signature = crypto_1.default.createHash('sha256') .update(file.text) .digest('base64'); if (!userWantsDeclarations) { @@ -117,7 +153,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) { continue; } } - const vinyl = new Vinyl({ + const vinyl = new vinyl_1.default({ path: file.name, contents: Buffer.from(file.text), base: !config._emitWithoutBasePath && baseFor(host.getScriptSnapshot(fileName)) || undefined @@ -125,9 +161,9 @@ function createTypeScriptBuilder(config, projectFile, cmd) { if (!emitSourceMapsInStream && /\.js$/.test(file.name)) { const sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0]; if (sourcemapFile) { - const extname = path.extname(vinyl.relative); - const basename = path.basename(vinyl.relative, extname); - const dirname = path.dirname(vinyl.relative); + const extname = path_1.default.extname(vinyl.relative); + const basename = path_1.default.basename(vinyl.relative, extname); + const dirname = path_1.default.dirname(vinyl.relative); const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts'; let sourceMap = JSON.parse(sourcemapFile.text); sourceMap.sources[0] = tsname.replace(/\\/g, '/'); @@ -359,7 +395,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) { delete oldErrors[projectFile]; if (oneCycle) { const cycleError = { - category: ts.DiagnosticCategory.Error, + category: typescript_1.default.DiagnosticCategory.Error, code: 1, file: undefined, start: undefined, @@ -383,7 +419,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) { // print stats const headNow = process.memoryUsage().heapUsed; const MB = 1024 * 1024; - _log('[tsb]', `time: ${colors.yellow((Date.now() - t1) + 'ms')} + \nmem: ${colors.cyan(Math.ceil(headNow / MB) + 'MB')} ${colors.bgcyan('delta: ' + Math.ceil((headNow - headUsed) / MB))}`); + _log('[tsb]', `time: ${ansi_colors_1.default.yellow((Date.now() - t1) + 'ms')} + \nmem: ${ansi_colors_1.default.cyan(Math.ceil(headNow / MB) + 'MB')} ${ansi_colors_1.default.bgcyan('delta: ' + Math.ceil((headNow - headUsed) / MB))}`); headUsed = headNow; }); } @@ -480,11 +516,11 @@ class LanguageServiceHost { let result = this._snapshots[filename]; if (!result && resolve) { try { - result = new VinylScriptSnapshot(new Vinyl({ + result = new VinylScriptSnapshot(new vinyl_1.default({ path: filename, - contents: fs.readFileSync(filename), + contents: fs_1.default.readFileSync(filename), base: this.getCompilationSettings().outDir, - stat: fs.statSync(filename) + stat: fs_1.default.statSync(filename) })); this.addScriptSnapshot(filename, result); } @@ -529,16 +565,16 @@ class LanguageServiceHost { return delete this._snapshots[filename]; } getCurrentDirectory() { - return path.dirname(this._projectPath); + return path_1.default.dirname(this._projectPath); } getDefaultLibFileName(options) { - return ts.getDefaultLibFilePath(options); + return typescript_1.default.getDefaultLibFilePath(options); } - directoryExists = ts.sys.directoryExists; - getDirectories = ts.sys.getDirectories; - fileExists = ts.sys.fileExists; - readFile = ts.sys.readFile; - readDirectory = ts.sys.readDirectory; + directoryExists = typescript_1.default.sys.directoryExists; + getDirectories = typescript_1.default.sys.getDirectories; + fileExists = typescript_1.default.sys.fileExists; + readFile = typescript_1.default.sys.readFile; + readDirectory = typescript_1.default.sys.readDirectory; // ---- dependency management collectDependents(filename, target) { while (this._dependenciesRecomputeList.length) { @@ -570,18 +606,18 @@ class LanguageServiceHost { this._log('processFile', `Missing snapshot for: ${filename}`); return; } - const info = ts.preProcessFile(snapshot.getText(0, snapshot.getLength()), true); + const info = typescript_1.default.preProcessFile(snapshot.getText(0, snapshot.getLength()), true); // (0) clear out old dependencies this._dependencies.resetNode(filename); // (1) ///-references info.referencedFiles.forEach(ref => { - const resolvedPath = path.resolve(path.dirname(filename), ref.fileName); + const resolvedPath = path_1.default.resolve(path_1.default.dirname(filename), ref.fileName); const normalizedPath = normalize(resolvedPath); this._dependencies.inertEdge(filename, normalizedPath); }); // (2) import-require statements info.importedFiles.forEach(ref => { - if (!ref.fileName.startsWith('.') || path.extname(ref.fileName) === '') { + if (!ref.fileName.startsWith('.') || path_1.default.extname(ref.fileName) === '') { // node module? return; } @@ -589,8 +625,8 @@ class LanguageServiceHost { let dirname = filename; let found = false; while (!found && dirname.indexOf(stopDirname) === 0) { - dirname = path.dirname(dirname); - let resolvedPath = path.resolve(dirname, ref.fileName); + dirname = path_1.default.dirname(dirname); + let resolvedPath = path_1.default.resolve(dirname, ref.fileName); if (resolvedPath.endsWith('.js')) { resolvedPath = resolvedPath.slice(0, -3); } diff --git a/build/lib/tsb/builder.ts b/build/lib/tsb/builder.ts index 509284d0cdc..403d2cec932 100644 --- a/build/lib/tsb/builder.ts +++ b/build/lib/tsb/builder.ts @@ -3,13 +3,13 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as fs from 'fs'; -import * as path from 'path'; -import * as crypto from 'crypto'; +import fs from 'fs'; +import path from 'path'; +import crypto from 'crypto'; import * as utils from './utils'; -import * as colors from 'ansi-colors'; -import * as ts from 'typescript'; -import * as Vinyl from 'vinyl'; +import colors from 'ansi-colors'; +import ts from 'typescript'; +import Vinyl from 'vinyl'; import { RawSourceMap, SourceMapConsumer, SourceMapGenerator } from 'source-map'; export interface IConfiguration { diff --git a/build/lib/tsb/index.js b/build/lib/tsb/index.js index 204c06e80ac..843b76c823f 100644 --- a/build/lib/tsb/index.js +++ b/build/lib/tsb/index.js @@ -3,17 +3,53 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.create = create; -const Vinyl = require("vinyl"); -const through = require("through"); -const builder = require("./builder"); -const ts = require("typescript"); +const vinyl_1 = __importDefault(require("vinyl")); +const through_1 = __importDefault(require("through")); +const builder = __importStar(require("./builder")); +const typescript_1 = __importDefault(require("typescript")); const stream_1 = require("stream"); const path_1 = require("path"); const utils_1 = require("./utils"); const fs_1 = require("fs"); -const log = require("fancy-log"); +const fancy_log_1 = __importDefault(require("fancy-log")); const transpiler_1 = require("./transpiler"); const colors = require("ansi-colors"); class EmptyDuplex extends stream_1.Duplex { @@ -32,31 +68,31 @@ function create(projectPath, existingOptions, config, onError = _defaultOnError) onError(diag.message); } else if (!diag.file || !diag.start) { - onError(ts.flattenDiagnosticMessageText(diag.messageText, '\n')); + onError(typescript_1.default.flattenDiagnosticMessageText(diag.messageText, '\n')); } else { const lineAndCh = diag.file.getLineAndCharacterOfPosition(diag.start); - onError(utils_1.strings.format('{0}({1},{2}): {3}', diag.file.fileName, lineAndCh.line + 1, lineAndCh.character + 1, ts.flattenDiagnosticMessageText(diag.messageText, '\n'))); + onError(utils_1.strings.format('{0}({1},{2}): {3}', diag.file.fileName, lineAndCh.line + 1, lineAndCh.character + 1, typescript_1.default.flattenDiagnosticMessageText(diag.messageText, '\n'))); } } - const parsed = ts.readConfigFile(projectPath, ts.sys.readFile); + const parsed = typescript_1.default.readConfigFile(projectPath, typescript_1.default.sys.readFile); if (parsed.error) { printDiagnostic(parsed.error); return createNullCompiler(); } - const cmdLine = ts.parseJsonConfigFileContent(parsed.config, ts.sys, (0, path_1.dirname)(projectPath), existingOptions); + const cmdLine = typescript_1.default.parseJsonConfigFileContent(parsed.config, typescript_1.default.sys, (0, path_1.dirname)(projectPath), existingOptions); if (cmdLine.errors.length > 0) { cmdLine.errors.forEach(printDiagnostic); return createNullCompiler(); } function logFn(topic, message) { if (config.verbose) { - log(colors.cyan(topic), message); + (0, fancy_log_1.default)(colors.cyan(topic), message); } } // FULL COMPILE stream doing transpile, syntax and semantic diagnostics function createCompileStream(builder, token) { - return through(function (file) { + return (0, through_1.default)(function (file) { // give the file to the compiler if (file.isStream()) { this.emit('error', 'no support for streams'); @@ -70,7 +106,7 @@ function create(projectPath, existingOptions, config, onError = _defaultOnError) } // TRANSPILE ONLY stream doing just TS to JS conversion function createTranspileStream(transpiler) { - return through(function (file) { + return (0, through_1.default)(function (file) { // give the file to the compiler if (file.isStream()) { this.emit('error', 'no support for streams'); @@ -116,7 +152,7 @@ function create(projectPath, existingOptions, config, onError = _defaultOnError) let path; for (; more && _pos < _fileNames.length; _pos++) { path = _fileNames[_pos]; - more = this.push(new Vinyl({ + more = this.push(new vinyl_1.default({ path, contents: (0, fs_1.readFileSync)(path), stat: (0, fs_1.statSync)(path), diff --git a/build/lib/tsb/index.ts b/build/lib/tsb/index.ts index 53c752d2655..e577d386cd9 100644 --- a/build/lib/tsb/index.ts +++ b/build/lib/tsb/index.ts @@ -3,15 +3,15 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as Vinyl from 'vinyl'; -import * as through from 'through'; +import Vinyl from 'vinyl'; +import through from 'through'; import * as builder from './builder'; -import * as ts from 'typescript'; +import ts from 'typescript'; import { Readable, Writable, Duplex } from 'stream'; import { dirname } from 'path'; import { strings } from './utils'; import { readFileSync, statSync } from 'fs'; -import * as log from 'fancy-log'; +import log from 'fancy-log'; import { ESBuildTranspiler, ITranspiler, TscTranspiler } from './transpiler'; import colors = require('ansi-colors'); diff --git a/build/lib/tsb/transpiler.js b/build/lib/tsb/transpiler.js index a4439b8d7ae..adccb104416 100644 --- a/build/lib/tsb/transpiler.js +++ b/build/lib/tsb/transpiler.js @@ -3,28 +3,31 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.ESBuildTranspiler = exports.TscTranspiler = void 0; -const esbuild = require("esbuild"); -const ts = require("typescript"); -const threads = require("node:worker_threads"); -const Vinyl = require("vinyl"); +const esbuild_1 = __importDefault(require("esbuild")); +const typescript_1 = __importDefault(require("typescript")); +const node_worker_threads_1 = __importDefault(require("node:worker_threads")); +const vinyl_1 = __importDefault(require("vinyl")); const node_os_1 = require("node:os"); function transpile(tsSrc, options) { const isAmd = /\n(import|export)/m.test(tsSrc); - if (!isAmd && options.compilerOptions?.module === ts.ModuleKind.AMD) { + if (!isAmd && options.compilerOptions?.module === typescript_1.default.ModuleKind.AMD) { // enforce NONE module-system for not-amd cases - options = { ...options, ...{ compilerOptions: { ...options.compilerOptions, module: ts.ModuleKind.None } } }; + options = { ...options, ...{ compilerOptions: { ...options.compilerOptions, module: typescript_1.default.ModuleKind.None } } }; } - const out = ts.transpileModule(tsSrc, options); + const out = typescript_1.default.transpileModule(tsSrc, options); return { jsSrc: out.outputText, diag: out.diagnostics ?? [] }; } -if (!threads.isMainThread) { +if (!node_worker_threads_1.default.isMainThread) { // WORKER - threads.parentPort?.addListener('message', (req) => { + node_worker_threads_1.default.parentPort?.addListener('message', (req) => { const res = { jsSrcs: [], diagnostics: [] @@ -34,7 +37,7 @@ if (!threads.isMainThread) { res.jsSrcs.push(out.jsSrc); res.diagnostics.push(out.diag); } - threads.parentPort.postMessage(res); + node_worker_threads_1.default.parentPort.postMessage(res); }); } class OutputFileNameOracle { @@ -43,7 +46,7 @@ class OutputFileNameOracle { this.getOutputFileName = (file) => { try { // windows: path-sep normalizing - file = ts.normalizePath(file); + file = typescript_1.default.normalizePath(file); if (!cmdLine.options.configFilePath) { // this is needed for the INTERNAL getOutputFileNames-call below... cmdLine.options.configFilePath = configFilePath; @@ -53,7 +56,7 @@ class OutputFileNameOracle { file = file.slice(0, -5) + '.ts'; cmdLine.fileNames.push(file); } - const outfile = ts.getOutputFileNames(cmdLine, file, true)[0]; + const outfile = typescript_1.default.getOutputFileNames(cmdLine, file, true)[0]; if (isDts) { cmdLine.fileNames.pop(); } @@ -70,7 +73,7 @@ class OutputFileNameOracle { class TranspileWorker { static pool = 1; id = TranspileWorker.pool++; - _worker = new threads.Worker(__filename); + _worker = new node_worker_threads_1.default.Worker(__filename); _pending; _durations = []; constructor(outFileFn) { @@ -107,7 +110,7 @@ class TranspileWorker { } const outBase = options.compilerOptions?.outDir ?? file.base; const outPath = outFileFn(file.path); - outFiles.push(new Vinyl({ + outFiles.push(new vinyl_1.default({ path: outPath, base: outBase, contents: Buffer.from(jsSrc), @@ -249,7 +252,7 @@ class ESBuildTranspiler { compilerOptions: { ...this._cmdLine.options, ...{ - module: isExtension ? ts.ModuleKind.CommonJS : undefined + module: isExtension ? typescript_1.default.ModuleKind.CommonJS : undefined } } }), @@ -270,7 +273,7 @@ class ESBuildTranspiler { throw Error('file.contents must be a Buffer'); } const t1 = Date.now(); - this._jobs.push(esbuild.transform(file.contents, { + this._jobs.push(esbuild_1.default.transform(file.contents, { ...this._transformOpts, sourcefile: file.path, }).then(result => { @@ -281,7 +284,7 @@ class ESBuildTranspiler { } const outBase = this._cmdLine.options.outDir ?? file.base; const outPath = this._outputFileNames.getOutputFileName(file.path); - this.onOutfile(new Vinyl({ + this.onOutfile(new vinyl_1.default({ path: outPath, base: outBase, contents: Buffer.from(result.code), diff --git a/build/lib/tsb/transpiler.ts b/build/lib/tsb/transpiler.ts index ae841dcf88b..16a3b347538 100644 --- a/build/lib/tsb/transpiler.ts +++ b/build/lib/tsb/transpiler.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as esbuild from 'esbuild'; -import * as ts from 'typescript'; -import * as threads from 'node:worker_threads'; -import * as Vinyl from 'vinyl'; +import esbuild from 'esbuild'; +import ts from 'typescript'; +import threads from 'node:worker_threads'; +import Vinyl from 'vinyl'; import { cpus } from 'node:os'; interface TranspileReq { diff --git a/build/lib/typings/event-stream.d.ts b/build/lib/typings/event-stream.d.ts index 260051be52e..2b021ef258e 100644 --- a/build/lib/typings/event-stream.d.ts +++ b/build/lib/typings/event-stream.d.ts @@ -1,7 +1,7 @@ declare module "event-stream" { import { Stream } from 'stream'; import { ThroughStream as _ThroughStream } from 'through'; - import * as File from 'vinyl'; + import File from 'vinyl'; export interface ThroughStream extends _ThroughStream { queue(data: File | null): any; diff --git a/build/lib/util.js b/build/lib/util.js index 82e4189dd1a..8b6f0396281 100644 --- a/build/lib/util.js +++ b/build/lib/util.js @@ -3,6 +3,9 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.incremental = incremental; exports.debounce = debounce; @@ -23,20 +26,20 @@ exports.rebase = rebase; exports.filter = filter; exports.streamToPromise = streamToPromise; exports.getElectronVersion = getElectronVersion; -const es = require("event-stream"); -const _debounce = require("debounce"); -const _filter = require("gulp-filter"); -const rename = require("gulp-rename"); -const path = require("path"); -const fs = require("fs"); -const _rimraf = require("rimraf"); +const event_stream_1 = __importDefault(require("event-stream")); +const debounce_1 = __importDefault(require("debounce")); +const gulp_filter_1 = __importDefault(require("gulp-filter")); +const gulp_rename_1 = __importDefault(require("gulp-rename")); +const path_1 = __importDefault(require("path")); +const fs_1 = __importDefault(require("fs")); +const rimraf_1 = __importDefault(require("rimraf")); const url_1 = require("url"); -const ternaryStream = require("ternary-stream"); -const root = path.dirname(path.dirname(__dirname)); +const ternary_stream_1 = __importDefault(require("ternary-stream")); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); const NoCancellationToken = { isCancellationRequested: () => false }; function incremental(streamProvider, initial, supportsCancellation) { - const input = es.through(); - const output = es.through(); + const input = event_stream_1.default.through(); + const output = event_stream_1.default.through(); let state = 'idle'; let buffer = Object.create(null); const token = !supportsCancellation ? undefined : { isCancellationRequested: () => Object.keys(buffer).length > 0 }; @@ -45,7 +48,7 @@ function incremental(streamProvider, initial, supportsCancellation) { const stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken); input .pipe(stream) - .pipe(es.through(undefined, () => { + .pipe(event_stream_1.default.through(undefined, () => { state = 'idle'; eventuallyRun(); })) @@ -54,14 +57,14 @@ function incremental(streamProvider, initial, supportsCancellation) { if (initial) { run(initial, false); } - const eventuallyRun = _debounce(() => { + const eventuallyRun = (0, debounce_1.default)(() => { const paths = Object.keys(buffer); if (paths.length === 0) { return; } const data = paths.map(path => buffer[path]); buffer = Object.create(null); - run(es.readArray(data), true); + run(event_stream_1.default.readArray(data), true); }, 500); input.on('data', (f) => { buffer[f.path] = f; @@ -69,16 +72,16 @@ function incremental(streamProvider, initial, supportsCancellation) { eventuallyRun(); } }); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } function debounce(task, duration = 500) { - const input = es.through(); - const output = es.through(); + const input = event_stream_1.default.through(); + const output = event_stream_1.default.through(); let state = 'idle'; const run = () => { state = 'running'; task() - .pipe(es.through(undefined, () => { + .pipe(event_stream_1.default.through(undefined, () => { const shouldRunAgain = state === 'stale'; state = 'idle'; if (shouldRunAgain) { @@ -88,7 +91,7 @@ function debounce(task, duration = 500) { .pipe(output); }; run(); - const eventuallyRun = _debounce(() => run(), duration); + const eventuallyRun = (0, debounce_1.default)(() => run(), duration); input.on('data', () => { if (state === 'idle') { eventuallyRun(); @@ -97,13 +100,13 @@ function debounce(task, duration = 500) { state = 'stale'; } }); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } function fixWin32DirectoryPermissions() { if (!/win32/.test(process.platform)) { - return es.through(); + return event_stream_1.default.through(); } - return es.mapSync(f => { + return event_stream_1.default.mapSync(f => { if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) { f.stat.mode = 16877; } @@ -111,7 +114,7 @@ function fixWin32DirectoryPermissions() { }); } function setExecutableBit(pattern) { - const setBit = es.mapSync(f => { + const setBit = event_stream_1.default.mapSync(f => { if (!f.stat) { f.stat = { isFile() { return true; } }; } @@ -121,13 +124,13 @@ function setExecutableBit(pattern) { if (!pattern) { return setBit; } - const input = es.through(); - const filter = _filter(pattern, { restore: true }); + const input = event_stream_1.default.through(); + const filter = (0, gulp_filter_1.default)(pattern, { restore: true }); const output = input .pipe(filter) .pipe(setBit) .pipe(filter.restore); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } function toFileUri(filePath) { const match = filePath.match(/^([a-z])\:(.*)$/i); @@ -137,27 +140,27 @@ function toFileUri(filePath) { return 'file://' + filePath.replace(/\\/g, '/'); } function skipDirectories() { - return es.mapSync(f => { + return event_stream_1.default.mapSync(f => { if (!f.isDirectory()) { return f; } }); } function cleanNodeModules(rulePath) { - const rules = fs.readFileSync(rulePath, 'utf8') + const rules = fs_1.default.readFileSync(rulePath, 'utf8') .split(/\r?\n/g) .map(line => line.trim()) .filter(line => line && !/^#/.test(line)); const excludes = rules.filter(line => !/^!/.test(line)).map(line => `!**/node_modules/${line}`); const includes = rules.filter(line => /^!/.test(line)).map(line => `**/node_modules/${line.substr(1)}`); - const input = es.through(); - const output = es.merge(input.pipe(_filter(['**', ...excludes])), input.pipe(_filter(includes))); - return es.duplex(input, output); + const input = event_stream_1.default.through(); + const output = event_stream_1.default.merge(input.pipe((0, gulp_filter_1.default)(['**', ...excludes])), input.pipe((0, gulp_filter_1.default)(includes))); + return event_stream_1.default.duplex(input, output); } function loadSourcemaps() { - const input = es.through(); + const input = event_stream_1.default.through(); const output = input - .pipe(es.map((f, cb) => { + .pipe(event_stream_1.default.map((f, cb) => { if (f.sourceMap) { cb(undefined, f); return; @@ -185,7 +188,7 @@ function loadSourcemaps() { return; } f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8'); - fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => { + fs_1.default.readFile(path_1.default.join(path_1.default.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => { if (err) { return cb(err); } @@ -193,54 +196,54 @@ function loadSourcemaps() { cb(undefined, f); }); })); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } function stripSourceMappingURL() { - const input = es.through(); + const input = event_stream_1.default.through(); const output = input - .pipe(es.mapSync(f => { + .pipe(event_stream_1.default.mapSync(f => { const contents = f.contents.toString('utf8'); f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8'); return f; })); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } /** Splits items in the stream based on the predicate, sending them to onTrue if true, or onFalse otherwise */ -function $if(test, onTrue, onFalse = es.through()) { +function $if(test, onTrue, onFalse = event_stream_1.default.through()) { if (typeof test === 'boolean') { return test ? onTrue : onFalse; } - return ternaryStream(test, onTrue, onFalse); + return (0, ternary_stream_1.default)(test, onTrue, onFalse); } /** Operator that appends the js files' original path a sourceURL, so debug locations map */ function appendOwnPathSourceURL() { - const input = es.through(); + const input = event_stream_1.default.through(); const output = input - .pipe(es.mapSync(f => { + .pipe(event_stream_1.default.mapSync(f => { if (!(f.contents instanceof Buffer)) { throw new Error(`contents of ${f.path} are not a buffer`); } f.contents = Buffer.concat([f.contents, Buffer.from(`\n//# sourceURL=${(0, url_1.pathToFileURL)(f.path)}`)]); return f; })); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } function rewriteSourceMappingURL(sourceMappingURLBase) { - const input = es.through(); + const input = event_stream_1.default.through(); const output = input - .pipe(es.mapSync(f => { + .pipe(event_stream_1.default.mapSync(f => { const contents = f.contents.toString('utf8'); - const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`; + const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path_1.default.dirname(f.relative).replace(/\\/g, '/')}/$1`; f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str)); return f; })); - return es.duplex(input, output); + return event_stream_1.default.duplex(input, output); } function rimraf(dir) { const result = () => new Promise((c, e) => { let retries = 0; const retry = () => { - _rimraf(dir, { maxBusyTries: 1 }, (err) => { + (0, rimraf_1.default)(dir, { maxBusyTries: 1 }, (err) => { if (!err) { return c(); } @@ -252,14 +255,14 @@ function rimraf(dir) { }; retry(); }); - result.taskName = `clean-${path.basename(dir).toLowerCase()}`; + result.taskName = `clean-${path_1.default.basename(dir).toLowerCase()}`; return result; } function _rreaddir(dirPath, prepend, result) { - const entries = fs.readdirSync(dirPath, { withFileTypes: true }); + const entries = fs_1.default.readdirSync(dirPath, { withFileTypes: true }); for (const entry of entries) { if (entry.isDirectory()) { - _rreaddir(path.join(dirPath, entry.name), `${prepend}/${entry.name}`, result); + _rreaddir(path_1.default.join(dirPath, entry.name), `${prepend}/${entry.name}`, result); } else { result.push(`${prepend}/${entry.name}`); @@ -272,20 +275,20 @@ function rreddir(dirPath) { return result; } function ensureDir(dirPath) { - if (fs.existsSync(dirPath)) { + if (fs_1.default.existsSync(dirPath)) { return; } - ensureDir(path.dirname(dirPath)); - fs.mkdirSync(dirPath); + ensureDir(path_1.default.dirname(dirPath)); + fs_1.default.mkdirSync(dirPath); } function rebase(count) { - return rename(f => { + return (0, gulp_rename_1.default)(f => { const parts = f.dirname ? f.dirname.split(/[\/\\]/) : []; - f.dirname = parts.slice(count).join(path.sep); + f.dirname = parts.slice(count).join(path_1.default.sep); }); } function filter(fn) { - const result = es.through(function (data) { + const result = event_stream_1.default.through(function (data) { if (fn(data)) { this.emit('data', data); } @@ -293,7 +296,7 @@ function filter(fn) { result.restore.push(data); } }); - result.restore = es.through(); + result.restore = event_stream_1.default.through(); return result; } function streamToPromise(stream) { @@ -303,7 +306,7 @@ function streamToPromise(stream) { }); } function getElectronVersion() { - const npmrc = fs.readFileSync(path.join(root, '.npmrc'), 'utf8'); + const npmrc = fs_1.default.readFileSync(path_1.default.join(root, '.npmrc'), 'utf8'); const electronVersion = /^target="(.*)"$/m.exec(npmrc)[1]; const msBuildId = /^ms_build_id="(.*)"$/m.exec(npmrc)[1]; return { electronVersion, msBuildId }; diff --git a/build/lib/util.ts b/build/lib/util.ts index 08921834676..ad81730b3de 100644 --- a/build/lib/util.ts +++ b/build/lib/util.ts @@ -3,18 +3,18 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as es from 'event-stream'; -import _debounce = require('debounce'); -import * as _filter from 'gulp-filter'; -import * as rename from 'gulp-rename'; -import * as path from 'path'; -import * as fs from 'fs'; -import * as _rimraf from 'rimraf'; -import * as VinylFile from 'vinyl'; +import es from 'event-stream'; +import _debounce from 'debounce'; +import _filter from 'gulp-filter'; +import rename from 'gulp-rename'; +import path from 'path'; +import fs from 'fs'; +import _rimraf from 'rimraf'; +import VinylFile from 'vinyl'; import { ThroughStream } from 'through'; -import * as sm from 'source-map'; +import sm from 'source-map'; import { pathToFileURL } from 'url'; -import * as ternaryStream from 'ternary-stream'; +import ternaryStream from 'ternary-stream'; const root = path.dirname(path.dirname(__dirname)); diff --git a/build/lib/watch/index.js b/build/lib/watch/index.js index 86d2611febf..69eca78fd70 100644 --- a/build/lib/watch/index.js +++ b/build/lib/watch/index.js @@ -3,6 +3,7 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +Object.defineProperty(exports, "__esModule", { value: true }); const watch = process.platform === 'win32' ? require('./watch-win32') : require('vscode-gulp-watch'); module.exports = function () { return watch.apply(null, arguments); diff --git a/build/lib/watch/watch-win32.js b/build/lib/watch/watch-win32.js index 934d8e8110f..7b77981d620 100644 --- a/build/lib/watch/watch-win32.js +++ b/build/lib/watch/watch-win32.js @@ -3,14 +3,17 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const cp = require("child_process"); -const fs = require("fs"); -const File = require("vinyl"); -const es = require("event-stream"); -const filter = require("gulp-filter"); -const watcherPath = path.join(__dirname, 'watcher.exe'); +const path_1 = __importDefault(require("path")); +const child_process_1 = __importDefault(require("child_process")); +const fs_1 = __importDefault(require("fs")); +const vinyl_1 = __importDefault(require("vinyl")); +const event_stream_1 = __importDefault(require("event-stream")); +const gulp_filter_1 = __importDefault(require("gulp-filter")); +const watcherPath = path_1.default.join(__dirname, 'watcher.exe'); function toChangeType(type) { switch (type) { case '0': return 'change'; @@ -19,8 +22,8 @@ function toChangeType(type) { } } function watch(root) { - const result = es.through(); - let child = cp.spawn(watcherPath, [root]); + const result = event_stream_1.default.through(); + let child = child_process_1.default.spawn(watcherPath, [root]); child.stdout.on('data', function (data) { const lines = data.toString('utf8').split('\n'); for (let i = 0; i < lines.length; i++) { @@ -34,8 +37,8 @@ function watch(root) { if (/^\.git/.test(changePath) || /(^|\\)out($|\\)/.test(changePath)) { continue; } - const changePathFull = path.join(root, changePath); - const file = new File({ + const changePathFull = path_1.default.join(root, changePath); + const file = new vinyl_1.default({ path: changePathFull, base: root }); @@ -60,20 +63,20 @@ function watch(root) { const cache = Object.create(null); module.exports = function (pattern, options) { options = options || {}; - const cwd = path.normalize(options.cwd || process.cwd()); + const cwd = path_1.default.normalize(options.cwd || process.cwd()); let watcher = cache[cwd]; if (!watcher) { watcher = cache[cwd] = watch(cwd); } - const rebase = !options.base ? es.through() : es.mapSync(function (f) { + const rebase = !options.base ? event_stream_1.default.through() : event_stream_1.default.mapSync(function (f) { f.base = options.base; return f; }); return watcher - .pipe(filter(['**', '!.git{,/**}'], { dot: options.dot })) // ignore all things git - .pipe(filter(pattern, { dot: options.dot })) - .pipe(es.map(function (file, cb) { - fs.stat(file.path, function (err, stat) { + .pipe((0, gulp_filter_1.default)(['**', '!.git{,/**}'], { dot: options.dot })) // ignore all things git + .pipe((0, gulp_filter_1.default)(pattern, { dot: options.dot })) + .pipe(event_stream_1.default.map(function (file, cb) { + fs_1.default.stat(file.path, function (err, stat) { if (err && err.code === 'ENOENT') { return cb(undefined, file); } @@ -83,7 +86,7 @@ module.exports = function (pattern, options) { if (!stat.isFile()) { return cb(); } - fs.readFile(file.path, function (err, contents) { + fs_1.default.readFile(file.path, function (err, contents) { if (err && err.code === 'ENOENT') { return cb(undefined, file); } diff --git a/build/lib/watch/watch-win32.ts b/build/lib/watch/watch-win32.ts index afde6a79f22..bbfde6afba9 100644 --- a/build/lib/watch/watch-win32.ts +++ b/build/lib/watch/watch-win32.ts @@ -3,12 +3,12 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as path from 'path'; -import * as cp from 'child_process'; -import * as fs from 'fs'; -import * as File from 'vinyl'; -import * as es from 'event-stream'; -import * as filter from 'gulp-filter'; +import path from 'path'; +import cp from 'child_process'; +import fs from 'fs'; +import File from 'vinyl'; +import es from 'event-stream'; +import filter from 'gulp-filter'; import { Stream } from 'stream'; const watcherPath = path.join(__dirname, 'watcher.exe'); diff --git a/build/linux/debian/calculate-deps.js b/build/linux/debian/calculate-deps.js index bbcb6bfc3de..34276ce7705 100644 --- a/build/linux/debian/calculate-deps.js +++ b/build/linux/debian/calculate-deps.js @@ -3,13 +3,16 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.generatePackageDeps = generatePackageDeps; const child_process_1 = require("child_process"); const fs_1 = require("fs"); const os_1 = require("os"); -const path = require("path"); -const manifests = require("../../../cgmanifest.json"); +const path_1 = __importDefault(require("path")); +const cgmanifest_json_1 = __importDefault(require("../../../cgmanifest.json")); const dep_lists_1 = require("./dep-lists"); function generatePackageDeps(files, arch, chromiumSysroot, vscodeSysroot) { const dependencies = files.map(file => calculatePackageDeps(file, arch, chromiumSysroot, vscodeSysroot)); @@ -29,7 +32,7 @@ function calculatePackageDeps(binaryPath, arch, chromiumSysroot, vscodeSysroot) console.error('Tried to stat ' + binaryPath + ' but failed.'); } // Get the Chromium dpkg-shlibdeps file. - const chromiumManifest = manifests.registrations.filter(registration => { + const chromiumManifest = cgmanifest_json_1.default.registrations.filter(registration => { return registration.component.type === 'git' && registration.component.git.name === 'chromium'; }); const dpkgShlibdepsUrl = `https://raw.githubusercontent.com/chromium/chromium/${chromiumManifest[0].version}/third_party/dpkg-shlibdeps/dpkg-shlibdeps.pl`; @@ -52,7 +55,7 @@ function calculatePackageDeps(binaryPath, arch, chromiumSysroot, vscodeSysroot) } cmd.push(`-l${chromiumSysroot}/usr/lib`); cmd.push(`-L${vscodeSysroot}/debian/libxkbfile1/DEBIAN/shlibs`); - cmd.push('-O', '-e', path.resolve(binaryPath)); + cmd.push('-O', '-e', path_1.default.resolve(binaryPath)); const dpkgShlibdepsResult = (0, child_process_1.spawnSync)('perl', cmd, { cwd: chromiumSysroot }); if (dpkgShlibdepsResult.status !== 0) { throw new Error(`dpkg-shlibdeps failed with exit code ${dpkgShlibdepsResult.status}. stderr:\n${dpkgShlibdepsResult.stderr} `); diff --git a/build/linux/debian/calculate-deps.ts b/build/linux/debian/calculate-deps.ts index 92f8065f262..addc38696a8 100644 --- a/build/linux/debian/calculate-deps.ts +++ b/build/linux/debian/calculate-deps.ts @@ -6,8 +6,8 @@ import { spawnSync } from 'child_process'; import { constants, statSync } from 'fs'; import { tmpdir } from 'os'; -import path = require('path'); -import * as manifests from '../../../cgmanifest.json'; +import path from 'path'; +import manifests from '../../../cgmanifest.json'; import { additionalDeps } from './dep-lists'; import { DebianArchString } from './types'; diff --git a/build/linux/debian/install-sysroot.js b/build/linux/debian/install-sysroot.js index 354c67a2909..16d8d01468f 100644 --- a/build/linux/debian/install-sysroot.js +++ b/build/linux/debian/install-sysroot.js @@ -3,20 +3,23 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.getVSCodeSysroot = getVSCodeSysroot; exports.getChromiumSysroot = getChromiumSysroot; const child_process_1 = require("child_process"); const os_1 = require("os"); -const fs = require("fs"); -const https = require("https"); -const path = require("path"); +const fs_1 = __importDefault(require("fs")); +const https_1 = __importDefault(require("https")); +const path_1 = __importDefault(require("path")); const crypto_1 = require("crypto"); -const ansiColors = require("ansi-colors"); +const ansi_colors_1 = __importDefault(require("ansi-colors")); // Based on https://source.chromium.org/chromium/chromium/src/+/main:build/linux/sysroot_scripts/install-sysroot.py. const URL_PREFIX = 'https://msftelectronbuild.z5.web.core.windows.net'; const URL_PATH = 'sysroots/toolchain'; -const REPO_ROOT = path.dirname(path.dirname(path.dirname(__dirname))); +const REPO_ROOT = path_1.default.dirname(path_1.default.dirname(path_1.default.dirname(__dirname))); const ghApiHeaders = { Accept: 'application/vnd.github.v3+json', 'User-Agent': 'VSCode Build', @@ -29,7 +32,7 @@ const ghDownloadHeaders = { Accept: 'application/octet-stream', }; function getElectronVersion() { - const npmrc = fs.readFileSync(path.join(REPO_ROOT, '.npmrc'), 'utf8'); + const npmrc = fs_1.default.readFileSync(path_1.default.join(REPO_ROOT, '.npmrc'), 'utf8'); const electronVersion = /^target="(.*)"$/m.exec(npmrc)[1]; const msBuildId = /^ms_build_id="(.*)"$/m.exec(npmrc)[1]; return { electronVersion, msBuildId }; @@ -37,11 +40,11 @@ function getElectronVersion() { function getSha(filename) { const hash = (0, crypto_1.createHash)('sha256'); // Read file 1 MB at a time - const fd = fs.openSync(filename, 'r'); + const fd = fs_1.default.openSync(filename, 'r'); const buffer = Buffer.alloc(1024 * 1024); let position = 0; let bytesRead = 0; - while ((bytesRead = fs.readSync(fd, buffer, 0, buffer.length, position)) === buffer.length) { + while ((bytesRead = fs_1.default.readSync(fd, buffer, 0, buffer.length, position)) === buffer.length) { hash.update(buffer); position += bytesRead; } @@ -49,7 +52,7 @@ function getSha(filename) { return hash.digest('hex'); } function getVSCodeSysrootChecksum(expectedName) { - const checksums = fs.readFileSync(path.join(REPO_ROOT, 'build', 'checksums', 'vscode-sysroot.txt'), 'utf8'); + const checksums = fs_1.default.readFileSync(path_1.default.join(REPO_ROOT, 'build', 'checksums', 'vscode-sysroot.txt'), 'utf8'); for (const line of checksums.split('\n')) { const [checksum, name] = line.split(/\s+/); if (name === expectedName) { @@ -86,22 +89,22 @@ async function fetchUrl(options, retries = 10, retryDelay = 1000) { }); if (assetResponse.ok && (assetResponse.status >= 200 && assetResponse.status < 300)) { const assetContents = Buffer.from(await assetResponse.arrayBuffer()); - console.log(`Fetched response body buffer: ${ansiColors.magenta(`${assetContents.byteLength} bytes`)}`); + console.log(`Fetched response body buffer: ${ansi_colors_1.default.magenta(`${assetContents.byteLength} bytes`)}`); if (options.checksumSha256) { const actualSHA256Checksum = (0, crypto_1.createHash)('sha256').update(assetContents).digest('hex'); if (actualSHA256Checksum !== options.checksumSha256) { - throw new Error(`Checksum mismatch for ${ansiColors.cyan(asset.url)} (expected ${options.checksumSha256}, actual ${actualSHA256Checksum}))`); + throw new Error(`Checksum mismatch for ${ansi_colors_1.default.cyan(asset.url)} (expected ${options.checksumSha256}, actual ${actualSHA256Checksum}))`); } } - console.log(`Verified SHA256 checksums match for ${ansiColors.cyan(asset.url)}`); + console.log(`Verified SHA256 checksums match for ${ansi_colors_1.default.cyan(asset.url)}`); const tarCommand = `tar -xz -C ${options.dest}`; (0, child_process_1.execSync)(tarCommand, { input: assetContents }); console.log(`Fetch complete!`); return; } - throw new Error(`Request ${ansiColors.magenta(asset.url)} failed with status code: ${assetResponse.status}`); + throw new Error(`Request ${ansi_colors_1.default.magenta(asset.url)} failed with status code: ${assetResponse.status}`); } - throw new Error(`Request ${ansiColors.magenta('https://api.github.com')} failed with status code: ${response.status}`); + throw new Error(`Request ${ansi_colors_1.default.magenta('https://api.github.com')} failed with status code: ${response.status}`); } finally { clearTimeout(timeout); @@ -139,21 +142,21 @@ async function getVSCodeSysroot(arch) { if (!checksumSha256) { throw new Error(`Could not find checksum for ${expectedName}`); } - const sysroot = process.env['VSCODE_SYSROOT_DIR'] ?? path.join((0, os_1.tmpdir)(), `vscode-${arch}-sysroot`); - const stamp = path.join(sysroot, '.stamp'); + const sysroot = process.env['VSCODE_SYSROOT_DIR'] ?? path_1.default.join((0, os_1.tmpdir)(), `vscode-${arch}-sysroot`); + const stamp = path_1.default.join(sysroot, '.stamp'); const result = `${sysroot}/${triple}/${triple}/sysroot`; - if (fs.existsSync(stamp) && fs.readFileSync(stamp).toString() === expectedName) { + if (fs_1.default.existsSync(stamp) && fs_1.default.readFileSync(stamp).toString() === expectedName) { return result; } console.log(`Installing ${arch} root image: ${sysroot}`); - fs.rmSync(sysroot, { recursive: true, force: true }); - fs.mkdirSync(sysroot); + fs_1.default.rmSync(sysroot, { recursive: true, force: true }); + fs_1.default.mkdirSync(sysroot); await fetchUrl({ checksumSha256, assetName: expectedName, dest: sysroot }); - fs.writeFileSync(stamp, expectedName); + fs_1.default.writeFileSync(stamp, expectedName); return result; } async function getChromiumSysroot(arch) { @@ -168,24 +171,24 @@ async function getChromiumSysroot(arch) { const sysrootDict = sysrootInfo[sysrootArch]; const tarballFilename = sysrootDict['Tarball']; const tarballSha = sysrootDict['Sha256Sum']; - const sysroot = path.join((0, os_1.tmpdir)(), sysrootDict['SysrootDir']); + const sysroot = path_1.default.join((0, os_1.tmpdir)(), sysrootDict['SysrootDir']); const url = [URL_PREFIX, URL_PATH, tarballSha].join('/'); - const stamp = path.join(sysroot, '.stamp'); - if (fs.existsSync(stamp) && fs.readFileSync(stamp).toString() === url) { + const stamp = path_1.default.join(sysroot, '.stamp'); + if (fs_1.default.existsSync(stamp) && fs_1.default.readFileSync(stamp).toString() === url) { return sysroot; } console.log(`Installing Debian ${arch} root image: ${sysroot}`); - fs.rmSync(sysroot, { recursive: true, force: true }); - fs.mkdirSync(sysroot); - const tarball = path.join(sysroot, tarballFilename); + fs_1.default.rmSync(sysroot, { recursive: true, force: true }); + fs_1.default.mkdirSync(sysroot); + const tarball = path_1.default.join(sysroot, tarballFilename); console.log(`Downloading ${url}`); let downloadSuccess = false; for (let i = 0; i < 3 && !downloadSuccess; i++) { - fs.writeFileSync(tarball, ''); + fs_1.default.writeFileSync(tarball, ''); await new Promise((c) => { - https.get(url, (res) => { + https_1.default.get(url, (res) => { res.on('data', (chunk) => { - fs.appendFileSync(tarball, chunk); + fs_1.default.appendFileSync(tarball, chunk); }); res.on('end', () => { downloadSuccess = true; @@ -198,7 +201,7 @@ async function getChromiumSysroot(arch) { }); } if (!downloadSuccess) { - fs.rmSync(tarball); + fs_1.default.rmSync(tarball); throw new Error('Failed to download ' + url); } const sha = getSha(tarball); @@ -209,8 +212,8 @@ async function getChromiumSysroot(arch) { if (proc.status) { throw new Error('Tarball extraction failed with code ' + proc.status); } - fs.rmSync(tarball); - fs.writeFileSync(stamp, url); + fs_1.default.rmSync(tarball); + fs_1.default.writeFileSync(stamp, url); return sysroot; } //# sourceMappingURL=install-sysroot.js.map \ No newline at end of file diff --git a/build/linux/debian/install-sysroot.ts b/build/linux/debian/install-sysroot.ts index 8ea43a523cf..aa10e39f95f 100644 --- a/build/linux/debian/install-sysroot.ts +++ b/build/linux/debian/install-sysroot.ts @@ -5,12 +5,12 @@ import { spawnSync, execSync } from 'child_process'; import { tmpdir } from 'os'; -import * as fs from 'fs'; -import * as https from 'https'; -import * as path from 'path'; +import fs from 'fs'; +import https from 'https'; +import path from 'path'; import { createHash } from 'crypto'; import { DebianArchString } from './types'; -import * as ansiColors from 'ansi-colors'; +import ansiColors from 'ansi-colors'; // Based on https://source.chromium.org/chromium/chromium/src/+/main:build/linux/sysroot_scripts/install-sysroot.py. const URL_PREFIX = 'https://msftelectronbuild.z5.web.core.windows.net'; diff --git a/build/linux/dependencies-generator.js b/build/linux/dependencies-generator.js index 80b11b3d5b7..38649559873 100644 --- a/build/linux/dependencies-generator.js +++ b/build/linux/dependencies-generator.js @@ -3,10 +3,13 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.getDependencies = getDependencies; const child_process_1 = require("child_process"); -const path = require("path"); +const path_1 = __importDefault(require("path")); const install_sysroot_1 = require("./debian/install-sysroot"); const calculate_deps_1 = require("./debian/calculate-deps"); const calculate_deps_2 = require("./rpm/calculate-deps"); @@ -44,23 +47,23 @@ async function getDependencies(packageType, buildDir, applicationName, arch) { } // Get the files for which we want to find dependencies. const canAsar = false; // TODO@esm ASAR disabled in ESM - const nativeModulesPath = path.join(buildDir, 'resources', 'app', canAsar ? 'node_modules.asar.unpacked' : 'node_modules'); + const nativeModulesPath = path_1.default.join(buildDir, 'resources', 'app', canAsar ? 'node_modules.asar.unpacked' : 'node_modules'); const findResult = (0, child_process_1.spawnSync)('find', [nativeModulesPath, '-name', '*.node']); if (findResult.status) { console.error('Error finding files:'); console.error(findResult.stderr.toString()); return []; } - const appPath = path.join(buildDir, applicationName); + const appPath = path_1.default.join(buildDir, applicationName); // Add the native modules const files = findResult.stdout.toString().trimEnd().split('\n'); // Add the tunnel binary. - files.push(path.join(buildDir, 'bin', product.tunnelApplicationName)); + files.push(path_1.default.join(buildDir, 'bin', product.tunnelApplicationName)); // Add the main executable. files.push(appPath); // Add chrome sandbox and crashpad handler. - files.push(path.join(buildDir, 'chrome-sandbox')); - files.push(path.join(buildDir, 'chrome_crashpad_handler')); + files.push(path_1.default.join(buildDir, 'chrome-sandbox')); + files.push(path_1.default.join(buildDir, 'chrome_crashpad_handler')); // Generate the dependencies. let dependencies; if (packageType === 'deb') { diff --git a/build/linux/dependencies-generator.ts b/build/linux/dependencies-generator.ts index 3163aee5450..46be92eb847 100644 --- a/build/linux/dependencies-generator.ts +++ b/build/linux/dependencies-generator.ts @@ -6,7 +6,7 @@ 'use strict'; import { spawnSync } from 'child_process'; -import path = require('path'); +import path from 'path'; import { getChromiumSysroot, getVSCodeSysroot } from './debian/install-sysroot'; import { generatePackageDeps as generatePackageDepsDebian } from './debian/calculate-deps'; import { generatePackageDeps as generatePackageDepsRpm } from './rpm/calculate-deps'; diff --git a/build/linux/libcxx-fetcher.js b/build/linux/libcxx-fetcher.js index cfdc9498502..d6c998e5aea 100644 --- a/build/linux/libcxx-fetcher.js +++ b/build/linux/libcxx-fetcher.js @@ -3,23 +3,26 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.downloadLibcxxHeaders = downloadLibcxxHeaders; exports.downloadLibcxxObjects = downloadLibcxxObjects; // Can be removed once https://github.com/electron/electron-rebuild/pull/703 is available. -const fs = require("fs"); -const path = require("path"); -const debug = require("debug"); -const extract = require("extract-zip"); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const debug_1 = __importDefault(require("debug")); +const extract_zip_1 = __importDefault(require("extract-zip")); const get_1 = require("@electron/get"); -const root = path.dirname(path.dirname(__dirname)); -const d = debug('libcxx-fetcher'); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); +const d = (0, debug_1.default)('libcxx-fetcher'); async function downloadLibcxxHeaders(outDir, electronVersion, lib_name) { - if (await fs.existsSync(path.resolve(outDir, 'include'))) { + if (await fs_1.default.existsSync(path_1.default.resolve(outDir, 'include'))) { return; } - if (!await fs.existsSync(outDir)) { - await fs.mkdirSync(outDir, { recursive: true }); + if (!await fs_1.default.existsSync(outDir)) { + await fs_1.default.mkdirSync(outDir, { recursive: true }); } d(`downloading ${lib_name}_headers`); const headers = await (0, get_1.downloadArtifact)({ @@ -28,14 +31,14 @@ async function downloadLibcxxHeaders(outDir, electronVersion, lib_name) { artifactName: `${lib_name}_headers.zip`, }); d(`unpacking ${lib_name}_headers from ${headers}`); - await extract(headers, { dir: outDir }); + await (0, extract_zip_1.default)(headers, { dir: outDir }); } async function downloadLibcxxObjects(outDir, electronVersion, targetArch = 'x64') { - if (await fs.existsSync(path.resolve(outDir, 'libc++.a'))) { + if (await fs_1.default.existsSync(path_1.default.resolve(outDir, 'libc++.a'))) { return; } - if (!await fs.existsSync(outDir)) { - await fs.mkdirSync(outDir, { recursive: true }); + if (!await fs_1.default.existsSync(outDir)) { + await fs_1.default.mkdirSync(outDir, { recursive: true }); } d(`downloading libcxx-objects-linux-${targetArch}`); const objects = await (0, get_1.downloadArtifact)({ @@ -45,14 +48,14 @@ async function downloadLibcxxObjects(outDir, electronVersion, targetArch = 'x64' arch: targetArch, }); d(`unpacking libcxx-objects from ${objects}`); - await extract(objects, { dir: outDir }); + await (0, extract_zip_1.default)(objects, { dir: outDir }); } async function main() { const libcxxObjectsDirPath = process.env['VSCODE_LIBCXX_OBJECTS_DIR']; const libcxxHeadersDownloadDir = process.env['VSCODE_LIBCXX_HEADERS_DIR']; const libcxxabiHeadersDownloadDir = process.env['VSCODE_LIBCXXABI_HEADERS_DIR']; const arch = process.env['VSCODE_ARCH']; - const packageJSON = JSON.parse(fs.readFileSync(path.join(root, 'package.json'), 'utf8')); + const packageJSON = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'package.json'), 'utf8')); const electronVersion = packageJSON.devDependencies.electron; if (!libcxxObjectsDirPath || !libcxxHeadersDownloadDir || !libcxxabiHeadersDownloadDir) { throw new Error('Required build env not set'); diff --git a/build/linux/libcxx-fetcher.ts b/build/linux/libcxx-fetcher.ts index 6abb67faa76..6bdbd8a4f30 100644 --- a/build/linux/libcxx-fetcher.ts +++ b/build/linux/libcxx-fetcher.ts @@ -5,10 +5,10 @@ // Can be removed once https://github.com/electron/electron-rebuild/pull/703 is available. -import * as fs from 'fs'; -import * as path from 'path'; -import * as debug from 'debug'; -import * as extract from 'extract-zip'; +import fs from 'fs'; +import path from 'path'; +import debug from 'debug'; +import extract from 'extract-zip'; import { downloadArtifact } from '@electron/get'; const root = path.dirname(path.dirname(__dirname)); diff --git a/build/tsconfig.json b/build/tsconfig.json index ce7a493a7aa..f3ad981d62f 100644 --- a/build/tsconfig.json +++ b/build/tsconfig.json @@ -4,7 +4,7 @@ "lib": [ "ES2020" ], - "module": "commonjs", + "module": "nodenext", "alwaysStrict": true, "removeComments": false, "preserveConstEnums": true, diff --git a/build/win32/explorer-appx-fetcher.js b/build/win32/explorer-appx-fetcher.js index 554b449d872..78d2317147e 100644 --- a/build/win32/explorer-appx-fetcher.js +++ b/build/win32/explorer-appx-fetcher.js @@ -3,23 +3,26 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.downloadExplorerAppx = downloadExplorerAppx; -const fs = require("fs"); -const debug = require("debug"); -const extract = require("extract-zip"); -const path = require("path"); +const fs_1 = __importDefault(require("fs")); +const debug_1 = __importDefault(require("debug")); +const extract_zip_1 = __importDefault(require("extract-zip")); +const path_1 = __importDefault(require("path")); const get_1 = require("@electron/get"); -const root = path.dirname(path.dirname(__dirname)); -const d = debug('explorer-appx-fetcher'); +const root = path_1.default.dirname(path_1.default.dirname(__dirname)); +const d = (0, debug_1.default)('explorer-appx-fetcher'); async function downloadExplorerAppx(outDir, quality = 'stable', targetArch = 'x64') { const fileNamePrefix = quality === 'insider' ? 'code_insiders' : 'code'; const fileName = `${fileNamePrefix}_explorer_${targetArch}.zip`; - if (await fs.existsSync(path.resolve(outDir, 'resources.pri'))) { + if (await fs_1.default.existsSync(path_1.default.resolve(outDir, 'resources.pri'))) { return; } - if (!await fs.existsSync(outDir)) { - await fs.mkdirSync(outDir, { recursive: true }); + if (!await fs_1.default.existsSync(outDir)) { + await fs_1.default.mkdirSync(outDir, { recursive: true }); } d(`downloading ${fileName}`); const artifact = await (0, get_1.downloadArtifact)({ @@ -34,14 +37,14 @@ async function downloadExplorerAppx(outDir, quality = 'stable', targetArch = 'x6 } }); d(`unpacking from ${fileName}`); - await extract(artifact, { dir: fs.realpathSync(outDir) }); + await (0, extract_zip_1.default)(artifact, { dir: fs_1.default.realpathSync(outDir) }); } async function main(outputDir) { const arch = process.env['VSCODE_ARCH']; if (!outputDir) { throw new Error('Required build env not set'); } - const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); + const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8')); await downloadExplorerAppx(outputDir, product.quality, arch); } if (require.main === module) { diff --git a/build/win32/explorer-appx-fetcher.ts b/build/win32/explorer-appx-fetcher.ts index 89fbb57c064..95121cd6503 100644 --- a/build/win32/explorer-appx-fetcher.ts +++ b/build/win32/explorer-appx-fetcher.ts @@ -5,10 +5,10 @@ 'use strict'; -import * as fs from 'fs'; -import * as debug from 'debug'; -import * as extract from 'extract-zip'; -import * as path from 'path'; +import fs from 'fs'; +import debug from 'debug'; +import extract from 'extract-zip'; +import path from 'path'; import { downloadArtifact } from '@electron/get'; const root = path.dirname(path.dirname(__dirname));