diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 1b25207fd35e0..d9e0a894b6b1e 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -63,11 +63,8 @@ jobs:
node-version: [22.x]
# Don't forget to update build-native-release
python-version: [3.11]
- transpile-worker-threads: [false, true]
fail-fast: false
- env:
- CUBEJS_TRANSPILATION_WORKER_THREADS: ${{ matrix.transpile-worker-threads }}
steps:
- id: get-tag-out
run: echo "$OUT"
@@ -121,16 +118,16 @@ jobs:
- name: Lerna test
run: yarn lerna run --concurrency 1 --stream --no-prefix unit
- name: Fix lcov paths
- if: (matrix.node-version == '22.x' && matrix.transpile-worker-threads == true)
+ if: (matrix.node-version == '22.x')
run: |
./.github/actions/codecov-fix.sh
- name: Combine all fixed LCOV files
- if: (matrix.node-version == '22.x' && matrix.transpile-worker-threads == true)
+ if: (matrix.node-version == '22.x')
run: |
echo "" > ./combined-unit.lcov
find ./packages -type f -name lcov.fixed.info -exec cat {} + >> ./combined-unit.lcov || true
- name: Upload coverage artifact
- if: (matrix.node-version == '22.x' && matrix.transpile-worker-threads == true)
+ if: (matrix.node-version == '22.x')
uses: actions/upload-artifact@v4
with:
name: coverage-unit
diff --git a/docs/pages/product/configuration/reference/environment-variables.mdx b/docs/pages/product/configuration/reference/environment-variables.mdx
index 0a1320c211afe..985e19b3aa160 100644
--- a/docs/pages/product/configuration/reference/environment-variables.mdx
+++ b/docs/pages/product/configuration/reference/environment-variables.mdx
@@ -1326,21 +1326,6 @@ learn more.
| --------------- | ---------------------- | --------------------- |
| A valid number | 86400 | 86400 |
-## `CUBEJS_TRANSPILATION_WORKER_THREADS`
-
-If `true`, optimizes data model compilation by running critical parts of the
-code in worker threads.
-
-| Possible Values | Default in Development | Default in Production |
-| --------------- | ---------------------- | --------------------- |
-| `true`, `false` | `false` | `false` |
-
-
-
-See [this issue](https://github.com/cube-js/cube/issues/9285) for details.
-
-
-
## `CUBEJS_WEB_SOCKETS`
If `true`, then use WebSocket for data fetching.
diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts
index 9ce3789cbb409..b46413bf22ecc 100644
--- a/packages/cubejs-backend-shared/src/env.ts
+++ b/packages/cubejs-backend-shared/src/env.ts
@@ -223,16 +223,12 @@ const variables: Record any> = {
nativeOrchestrator: () => get('CUBEJS_TESSERACT_ORCHESTRATOR')
.default('true')
.asBoolStrict(),
- transpilationWorkerThreads: () => get('CUBEJS_TRANSPILATION_WORKER_THREADS')
- .default('false')
- .asBoolStrict(),
allowNonStrictDateRangeMatching: () => get('CUBEJS_PRE_AGGREGATIONS_ALLOW_NON_STRICT_DATE_RANGE_MATCH')
.default('true')
.asBoolStrict(),
transpilationWorkerThreadsCount: () => get('CUBEJS_TRANSPILATION_WORKER_THREADS_COUNT')
.default('0')
.asInt(),
- // This one takes precedence over CUBEJS_TRANSPILATION_WORKER_THREADS
transpilationNative: () => get('CUBEJS_TRANSPILATION_NATIVE')
.default('false')
.asBoolStrict(),
diff --git a/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.js b/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.js
index c198fa4b728d8..0b1e0ff104cd6 100644
--- a/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.js
+++ b/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.js
@@ -4,9 +4,6 @@ import fs from 'fs';
import os from 'os';
import path from 'path';
import syntaxCheck from 'syntax-error';
-import { parse } from '@babel/parser';
-import babelGenerator from '@babel/generator';
-import babelTraverse from '@babel/traverse';
import R from 'ramda';
import workerpool from 'workerpool';
@@ -114,12 +111,11 @@ export class DataSchemaCompiler {
const errorsReport = new ErrorReporter(null, [], this.errorReport);
this.errorsReport = errorsReport;
- const transpilationWorkerThreads = getEnv('transpilationWorkerThreads');
const transpilationNative = getEnv('transpilationNative');
const transpilationNativeThreadsCount = getThreadsCount();
const { compilerId } = this;
- if (!transpilationNative && transpilationWorkerThreads) {
+ if (!transpilationNative) {
const wc = getEnv('transpilationWorkerThreadsCount');
this.workerPool = workerpool.pool(
path.join(__dirname, 'transpilers/transpiler_worker'),
@@ -132,32 +128,27 @@ export class DataSchemaCompiler {
* @returns {Promise<*>}
*/
const transpile = async (stage) => {
- let cubeNames;
- let cubeSymbols;
- let transpilerNames;
let results;
- if (transpilationNative || transpilationWorkerThreads) {
- cubeNames = Object.keys(this.cubeDictionary.byId);
- // We need only cubes and all its member names for transpiling.
- // Cubes doesn't change during transpiling, but are changed during compilation phase,
- // so we can prepare them once for every phase.
- // Communication between main and worker threads uses
- // The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm)
- // which doesn't allow passing any function objects, so we need to sanitize the symbols.
- // Communication with native backend also involves deserialization.
- cubeSymbols = Object.fromEntries(
- Object.entries(this.cubeSymbols.symbols)
- .map(
- ([key, value]) => [key, Object.fromEntries(
- Object.keys(value).map((k) => [k, true]),
- )],
- ),
- );
+ const cubeNames = Object.keys(this.cubeDictionary.byId);
+ // We need only cubes and all its member names for transpiling.
+ // Cubes doesn't change during transpiling, but are changed during compilation phase,
+ // so we can prepare them once for every phase.
+ // Communication between main and worker threads uses
+ // The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm)
+ // which doesn't allow passing any function objects, so we need to sanitize the symbols.
+ // Communication with native backend also involves deserialization.
+ const cubeSymbols = Object.fromEntries(
+ Object.entries(this.cubeSymbols.symbols)
+ .map(
+ ([key, value]) => [key, Object.fromEntries(
+ Object.keys(value).map((k) => [k, true]),
+ )],
+ ),
+ );
- // Transpilers are the same for all files within phase.
- transpilerNames = this.transpilers.map(t => t.constructor.name);
- }
+ // Transpilers are the same for all files within phase.
+ const transpilerNames = this.transpilers.map(t => t.constructor.name);
if (transpilationNative) {
// Warming up swc compiler cache
@@ -192,10 +183,8 @@ export class DataSchemaCompiler {
}
results = (await Promise.all([...nonJsFilesTasks, ...JsFilesTasks])).flat();
- } else if (transpilationWorkerThreads) {
- results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
} else {
- results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, {})));
+ results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
}
return results.filter(f => !!f);
@@ -225,7 +214,7 @@ export class DataSchemaCompiler {
errorsReport,
{ cubeNames: [], cubeSymbols: {}, transpilerNames: [], contextSymbols: {}, compilerId: this.compilerId, stage: 0 }
);
- } else if (transpilationWorkerThreads && this.workerPool) {
+ } else if (this.workerPool) {
this.workerPool.terminate();
}
@@ -334,7 +323,7 @@ export class DataSchemaCompiler {
errorsReport.exitFile();
return { ...file, content: res[0].code };
- } else if (getEnv('transpilationWorkerThreads')) {
+ } else {
const data = {
fileName: file.fileName,
content: file.content,
@@ -348,24 +337,6 @@ export class DataSchemaCompiler {
errorsReport.addWarnings(res.warnings);
return { ...file, content: res.content };
- } else {
- const ast = parse(
- file.content,
- {
- sourceFilename: file.fileName,
- sourceType: 'module',
- plugins: ['objectRestSpread'],
- },
- );
-
- errorsReport.inFile(file);
- this.transpilers.forEach((t) => {
- babelTraverse(ast, t.traverseObject(errorsReport));
- });
- errorsReport.exitFile();
-
- const content = babelGenerator(ast, {}, file.content).code;
- return { ...file, content };
}
} catch (e) {
if (e.toString().indexOf('SyntaxError') !== -1) {
diff --git a/packages/cubejs-schema-compiler/test/integration/postgres/dataschema-compiler.test.ts b/packages/cubejs-schema-compiler/test/integration/postgres/dataschema-compiler.test.ts
index faf68af2a3008..dca8f8f27be55 100644
--- a/packages/cubejs-schema-compiler/test/integration/postgres/dataschema-compiler.test.ts
+++ b/packages/cubejs-schema-compiler/test/integration/postgres/dataschema-compiler.test.ts
@@ -186,7 +186,7 @@ describe('DataSchemaCompiler', () => {
})
`;
- it('Should compile 200 schemas in less than 2500ms * 10', async () => {
+ it('Should compile 200 schemas in less than 10000ms * 10', async () => {
const repeats = 200;
const compilerWith = prepareJsCompiler(schema, { allowJsDuplicatePropsInSchema: false });
@@ -198,7 +198,7 @@ describe('DataSchemaCompiler', () => {
const end = new Date().getTime();
const time = end - start;
- expect(time).toBeLessThan(2500 * 10);
+ expect(time).toBeLessThan(10000 * 10);
});
});
});