From 83d2179912608bac6be321bba1094b49ab79edbd Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Wed, 21 Jun 2023 16:41:42 +0800 Subject: [PATCH 01/21] Generate credential by azure service principle --- packages/cubejs-backend-shared/src/env.ts | 39 +++++++ .../package.json | 1 + .../src/DatabricksDriver.ts | 78 ++++++++++--- yarn.lock | 108 ++++++++++++++++++ 4 files changed, 209 insertions(+), 17 deletions(-) diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts index 0d4b26b50d11e..f6644a49758f5 100644 --- a/packages/cubejs-backend-shared/src/env.ts +++ b/packages/cubejs-backend-shared/src/env.ts @@ -733,6 +733,45 @@ const variables: Record any> = { ] ), + /** + * Tenant ID for the Azure based export bucket srorage. + */ + dbExportBucketTenantId: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_TENANT_ID', dataSource) + ] + ), + + /** + * Client ID for the Azure based export bucket srorage. + */ + dbExportBucketClientId: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID', dataSource) + ] + ), + + /** + * Client Secret for the Azure based export bucket srorage. + */ + dbExportBucketClientSecret: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET', dataSource) + ] + ), + /** * Export bucket options for Integration based. */ diff --git a/packages/cubejs-databricks-jdbc-driver/package.json b/packages/cubejs-databricks-jdbc-driver/package.json index 6d741b2498a30..cbb131672e7c0 100644 --- a/packages/cubejs-databricks-jdbc-driver/package.json +++ b/packages/cubejs-databricks-jdbc-driver/package.json @@ -31,6 +31,7 @@ "@aws-sdk/client-s3": "^3.49.0", "@aws-sdk/s3-request-presigner": "^3.49.0", "@azure/storage-blob": "^12.9.0", + "@azure/identity": "^3.2.2", "@cubejs-backend/base-driver": "^0.33.29", "@cubejs-backend/jdbc-driver": "^0.33.29", "@cubejs-backend/schema-compiler": "^0.33.29", diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index 90614f41ebfa1..592324d44e879 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -18,7 +18,9 @@ import { ContainerSASPermissions, SASProtocol, generateBlobSASQueryParameters, + BlobSASSignatureValues, } from '@azure/storage-blob'; +import { ClientSecretCredential } from '@azure/identity'; import { DriverCapabilities, QueryOptions, UnloadOptions } from '@cubejs-backend/base-driver'; import { JDBCDriver, @@ -89,6 +91,21 @@ export type DatabricksDriverConfiguration = JDBCDriverConfiguration & * Databricks security token (PWD). */ token?: string, + + /** + * Azure tenant id + */ + tenantId?: string, + + /** + * Azure service principle client id + */ + clientId?: string, + + /** + * Azure service principle client sceret + */ + clientSecret?: string, }; async function fileExistsOr( @@ -244,6 +261,16 @@ export class DatabricksDriver extends JDBCDriver { getEnv('dbExportBucketAzureKey', { dataSource }), exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), + // Azure service principle + tenantId: + conf?.tenantId || + getEnv('dbExportBucketTenantId', { dataSource }), + clientId: + conf?.clientId || + getEnv('dbExportBucketClientId', { dataSource }), + clientSecret: + conf?.clientSecret || + getEnv('dbExportBucketClientSecret', { dataSource }), }; super(config); this.config = config; @@ -657,10 +684,16 @@ export class DatabricksDriver extends JDBCDriver { pathname.split(`${this.config.exportBucket}/`)[1]; const expr = new RegExp(`${foldername}\\/.*\\.csv$`, 'i'); - const credential = new StorageSharedKeyCredential( - account, - this.config.azureKey as string, - ); + const credential = this.config.azureKey + ? new StorageSharedKeyCredential( + account, + this.config.azureKey as string, + ) + : new ClientSecretCredential( + this.config.tenantId as string, + this.config.clientId as string, + this.config.clientSecret as string, + ); const blobClient = new BlobServiceClient( `https://${account}.blob.core.windows.net`, credential, @@ -669,19 +702,30 @@ export class DatabricksDriver extends JDBCDriver { const blobsList = containerClient.listBlobsFlat({ prefix: foldername }); for await (const blob of blobsList) { if (blob.name && expr.test(blob.name)) { - const sas = generateBlobSASQueryParameters( - { - containerName: container, - blobName: blob.name, - permissions: ContainerSASPermissions.parse('r'), - startsOn: new Date(new Date().valueOf()), - expiresOn: - new Date(new Date().valueOf() + 1000 * 60 * 60), - protocol: SASProtocol.Https, - version: '2020-08-04', - }, - credential, - ).toString(); + const startsOnDate = new Date(new Date().valueOf()); + const expiresOnDate = new Date(new Date().valueOf() + 1000 * 60 * 60); + const signatureValues: BlobSASSignatureValues = { + containerName: container, + blobName: blob.name, + permissions: ContainerSASPermissions.parse('r'), + startsOn: startsOnDate, + expiresOn: expiresOnDate, + protocol: SASProtocol.Https, + version: '2020-08-04', + }; + const sas = credential instanceof StorageSharedKeyCredential + ? generateBlobSASQueryParameters( + signatureValues, + credential, + ).toString() + : generateBlobSASQueryParameters( + signatureValues, + await blobClient.getUserDelegationKey( + startsOnDate, + expiresOnDate, + ), + account + ).toString(); csvFile.push(`https://${ account }.blob.core.windows.net/${ diff --git a/yarn.lock b/yarn.lock index 9495430a10e45..b7fc333688e43 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1939,6 +1939,27 @@ "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" +"@azure/core-auth@^1.4.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.4.0.tgz#6fa9661c1705857820dbc216df5ba5665ac36a9e" + integrity sha512-HFrcTgmuSuukRf/EdPmqBrc5l6Q5Uu+2TbuhaKbgaCpP2TfAeiNaQPAadxO+CYBRHGUzIDteMAjFspFLDLnKVQ== + dependencies: + "@azure/abort-controller" "^1.0.0" + tslib "^2.2.0" + +"@azure/core-client@^1.4.0": + version "1.7.3" + resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.7.3.tgz#f8cb2a1f91e8bc4921fa2e745cfdfda3e6e491a3" + integrity sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.4.0" + "@azure/core-rest-pipeline" "^1.9.1" + "@azure/core-tracing" "^1.0.0" + "@azure/core-util" "^1.0.0" + "@azure/logger" "^1.0.0" + tslib "^2.2.0" + "@azure/core-http@^2.0.0": version "2.2.2" resolved "https://registry.yarnpkg.com/@azure/core-http/-/core-http-2.2.2.tgz#573798f087d808d39aa71fd7c52b8d7b89f440da" @@ -1978,6 +1999,21 @@ "@azure/core-asynciterator-polyfill" "^1.0.0" tslib "^2.2.0" +"@azure/core-rest-pipeline@^1.1.0", "@azure/core-rest-pipeline@^1.9.1": + version "1.11.0" + resolved "https://registry.yarnpkg.com/@azure/core-rest-pipeline/-/core-rest-pipeline-1.11.0.tgz#fc0e8f56caac08a9d4ac91c07a6c5a360ea31c82" + integrity sha512-nB4KXl6qAyJmBVLWA7SakT4tzpYZTCk4pvRBeI+Ye0WYSOrlTqlMhc4MSS/8atD3ufeYWdkN380LLoXlUUzThw== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.4.0" + "@azure/core-tracing" "^1.0.1" + "@azure/core-util" "^1.3.0" + "@azure/logger" "^1.0.0" + form-data "^4.0.0" + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + tslib "^2.2.0" + "@azure/core-tracing@1.0.0-preview.13": version "1.0.0-preview.13" resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz#55883d40ae2042f6f1e12b17dd0c0d34c536d644" @@ -1986,6 +2022,43 @@ "@opentelemetry/api" "^1.0.1" tslib "^2.2.0" +"@azure/core-tracing@^1.0.0", "@azure/core-tracing@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.1.tgz#352a38cbea438c4a83c86b314f48017d70ba9503" + integrity sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw== + dependencies: + tslib "^2.2.0" + +"@azure/core-util@^1.0.0", "@azure/core-util@^1.3.0": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.3.2.tgz#3f8cfda1e87fac0ce84f8c1a42fcd6d2a986632d" + integrity sha512-2bECOUh88RvL1pMZTcc6OzfobBeWDBf5oBbhjIhT1MV9otMVWCzpOJkkiKtrnO88y5GGBelgY8At73KGAdbkeQ== + dependencies: + "@azure/abort-controller" "^1.0.0" + tslib "^2.2.0" + +"@azure/identity@^3.2.2": + version "3.2.3" + resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.2.3.tgz#dc6a4bc4c798c06dd8a3af2869f83ddadf332e26" + integrity sha512-knIbl7p2i8r3qPsLW2W84esmDPr36RqieLC72OeuqYk4+0TRNthUhWTs655P9S9Pm3TVVxcFsS3Le9SXIWBIFA== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.3.0" + "@azure/core-client" "^1.4.0" + "@azure/core-rest-pipeline" "^1.1.0" + "@azure/core-tracing" "^1.0.0" + "@azure/core-util" "^1.0.0" + "@azure/logger" "^1.0.0" + "@azure/msal-browser" "^2.37.1" + "@azure/msal-common" "^13.1.0" + "@azure/msal-node" "^1.17.3" + events "^3.0.0" + jws "^4.0.0" + open "^8.0.0" + stoppable "^1.1.0" + tslib "^2.2.0" + uuid "^8.3.0" + "@azure/logger@^1.0.0": version "1.0.3" resolved "https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.3.tgz#6e36704aa51be7d4a1bae24731ea580836293c96" @@ -2022,6 +2095,27 @@ "@azure/ms-rest-js" "^2.0.4" adal-node "^0.2.2" +"@azure/msal-browser@^2.37.1": + version "2.37.1" + resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-2.37.1.tgz#7e3fa7df9c6e74eb1fb640e73ef3b5b4407ee98d" + integrity sha512-EoKQISEpIY39Ru1OpWkeFZBcwp6Y0bG81bVmdyy4QJebPPDdVzfm62PSU0XFIRc3bqjZ4PBKBLMYLuo9NZYAow== + dependencies: + "@azure/msal-common" "13.1.0" + +"@azure/msal-common@13.1.0", "@azure/msal-common@^13.1.0": + version "13.1.0" + resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-13.1.0.tgz#73a787ec1e7880c30b187cf451404af547d95339" + integrity sha512-wj+ULrRB0HTuMmtrMjg8j3guCx32GE2BCPbsMCZkHgL1BZetC3o/Su5UJEQMX1HNc9CrIaQNx5WaKWHygYDe0g== + +"@azure/msal-node@^1.17.3": + version "1.17.3" + resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-1.17.3.tgz#1a0bbecb0b3e5e63d50ccb27c9cb3bae5a6c1b65" + integrity sha512-slsa+388bQQWnWH1V91KL+zV57rIp/0OQFfF0EmVMY8gnEIkAnpWWFUVBTTMbxEyjEFMk5ZW9xiHvHBcYFHzDw== + dependencies: + "@azure/msal-common" "13.1.0" + jsonwebtoken "^9.0.0" + uuid "^8.3.0" + "@azure/storage-blob@^12.11.0": version "12.12.0" resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.12.0.tgz#25e277c885692d5adcd8c2a949789b2837a74c59" @@ -21334,6 +21428,15 @@ open@^7.0.2, open@^7.3.1, open@^7.4.2: is-docker "^2.0.0" is-wsl "^2.1.1" +open@^8.0.0: + version "8.4.2" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + opencollective-postinstall@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz#7a0fff978f6dbfa4d006238fbac98ed4198c3259" @@ -26473,6 +26576,11 @@ stealthy-require@^1.1.1: resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= +stoppable@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b" + integrity sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw== + stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" From 3f5a6fbbd533f7e4a5e553a643291435c2eb45fc Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Wed, 21 Jun 2023 16:48:03 +0800 Subject: [PATCH 02/21] Fix lint issues --- packages/cubejs-backend-shared/src/env.ts | 66 +++++++++---------- .../src/DatabricksDriver.ts | 2 +- 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts index f6644a49758f5..ac90a8b0b3042 100644 --- a/packages/cubejs-backend-shared/src/env.ts +++ b/packages/cubejs-backend-shared/src/env.ts @@ -736,41 +736,41 @@ const variables: Record any> = { /** * Tenant ID for the Azure based export bucket srorage. */ - dbExportBucketTenantId: ({ - dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_TENANT_ID', dataSource) - ] - ), + dbExportBucketTenantId: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_TENANT_ID', dataSource) + ] + ), - /** - * Client ID for the Azure based export bucket srorage. - */ - dbExportBucketClientId: ({ - dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID', dataSource) - ] - ), + /** + * Client ID for the Azure based export bucket srorage. + */ + dbExportBucketClientId: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID', dataSource) + ] + ), - /** - * Client Secret for the Azure based export bucket srorage. - */ - dbExportBucketClientSecret: ({ - dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET', dataSource) - ] - ), + /** + * Client Secret for the Azure based export bucket srorage. + */ + dbExportBucketClientSecret: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET', dataSource) + ] + ), /** * Export bucket options for Integration based. diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index 592324d44e879..6ef982ae468ea 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -712,7 +712,7 @@ export class DatabricksDriver extends JDBCDriver { expiresOn: expiresOnDate, protocol: SASProtocol.Https, version: '2020-08-04', - }; + }; const sas = credential instanceof StorageSharedKeyCredential ? generateBlobSASQueryParameters( signatureValues, From 7b9305f11a7a0fcbf4138d0b82d4831fed2b3ac8 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Wed, 21 Jun 2023 16:48:40 +0800 Subject: [PATCH 03/21] Add unit test for new env variables --- .../test/db_env_multi.test.ts | 87 +++++++++++++++++++ .../test/db_env_single.test.ts | 51 +++++++++++ 2 files changed, 138 insertions(+) diff --git a/packages/cubejs-backend-shared/test/db_env_multi.test.ts b/packages/cubejs-backend-shared/test/db_env_multi.test.ts index 1eb8596024f7e..56935857a3a78 100644 --- a/packages/cubejs-backend-shared/test/db_env_multi.test.ts +++ b/packages/cubejs-backend-shared/test/db_env_multi.test.ts @@ -956,6 +956,93 @@ describe('Multiple datasources', () => { ); }); + test('getEnv("dbExportBucketTenantId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default1'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_TENANT_ID = 'postgres1'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_TENANT_ID = 'wrong1'; + expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + + process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default2'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_TENANT_ID = 'postgres2'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_TENANT_ID = 'wrong2'; + expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID; + delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_TENANT_ID; + delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_TENANT_ID; + expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toBeUndefined(); + expect(() => getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + }); + + test('getEnv("dbExportBucketClientId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default1'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_ID = 'postgres1'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_ID = 'wrong1'; + expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default2'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_ID = 'postgres2'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_ID = 'wrong2'; + expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID; + delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_ID; + delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_ID; + expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toBeUndefined(); + expect(() => getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + }); + + test('getEnv("dbExportBucketClientSecret")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default1'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_SECRET = 'postgres1'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_SECRET = 'wrong1'; + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default2'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_SECRET = 'postgres2'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_SECRET = 'wrong2'; + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET; + delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_SECRET; + delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_SECRET; + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toBeUndefined(); + expect(() => getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toThrow( + 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' + ); + }); + test('getEnv("dbExportIntegration")', () => { process.env.CUBEJS_DB_EXPORT_INTEGRATION = 'default1'; process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_INTEGRATION = 'postgres1'; diff --git a/packages/cubejs-backend-shared/test/db_env_single.test.ts b/packages/cubejs-backend-shared/test/db_env_single.test.ts index 4308174e1d4aa..374aea8bb1ecb 100644 --- a/packages/cubejs-backend-shared/test/db_env_single.test.ts +++ b/packages/cubejs-backend-shared/test/db_env_single.test.ts @@ -618,6 +618,57 @@ describe('Single datasources', () => { expect(getEnv('dbExportBucketAzureKey', { dataSource: 'wrong' })).toBeUndefined(); }); + test('getEnv("dbExportBucketTenantId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default1'; + expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toEqual('default1'); + + process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default2'; + expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toEqual('default2'); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID; + expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toBeUndefined(); + expect(getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toBeUndefined(); + }); + + test('getEnv("dbExportBucketClientId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default1'; + expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toEqual('default1'); + + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default2'; + expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toEqual('default2'); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID; + expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toBeUndefined(); + expect(getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toBeUndefined(); + }); + + test('getEnv("dbExportBucketClientSecret")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default1'; + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toEqual('default1'); + + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default2'; + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toEqual('default2'); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET; + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toBeUndefined(); + expect(getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toBeUndefined(); + }); + test('getEnv("dbExportIntegration")', () => { process.env.CUBEJS_DB_EXPORT_INTEGRATION = 'default1'; expect(getEnv('dbExportIntegration', { dataSource: 'default' })).toEqual('default1'); From fa454f7fd3a68dc04682599a8dcb097d308d8184 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Mon, 26 Jun 2023 17:16:57 +0800 Subject: [PATCH 04/21] Add unit test for DatabricksDriver --- .../test/DatabricksDriver.test.ts | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts diff --git a/packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts b/packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts new file mode 100644 index 0000000000000..88cdd66726cea --- /dev/null +++ b/packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts @@ -0,0 +1,65 @@ +import { DatabricksDriver } from '../src/DatabricksDriver'; +import { UnloadOptions } from '@cubejs-backend/base-driver'; +import { ContainerClient, BlobServiceClient } from '@azure/storage-blob'; + +jest.mock('@azure/storage-blob', () => ({ + ...jest.requireActual('@azure/storage-blob'), + generateBlobSASQueryParameters: jest.fn().mockReturnValue('test') +})); + +jest.spyOn(ContainerClient.prototype, 'listBlobsFlat').mockImplementation( + jest.fn().mockReturnValue([{name: 'product.csv/test.csv'}]) +); +jest.spyOn(BlobServiceClient.prototype, 'getUserDelegationKey').mockImplementation( + jest.fn().mockReturnValue('mockKey') +); + +describe('DatabricksDriver', () => { + const mockTableName = 'product'; + const mockSql = 'SELECT * FROM ' + mockTableName; + const mockParams = [1] + const mockOptions: UnloadOptions = { + maxFileSize: 3, + query: { + sql: mockSql, + params: mockParams, + }, + }; + let databricksDriver: DatabricksDriver; + const mockUnloadWithSql = jest.fn().mockResolvedValue('mockType'); + + beforeAll(() => { + process.env.CUBEJS_DB_DATABRICKS_ACCEPT_POLICY='true'; + process.env.CUBEJS_DB_DATABRICKS_URL='jdbc:databricks://adb-123456789.10.azuredatabricks.net:443'; + process.env.CUBEJS_DB_EXPORT_BUCKET_TYPE='azure'; + process.env.CUBEJS_DB_EXPORT_BUCKET='wasbs://cube-export@mock.blob.core.windows.net'; + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY='azure-key'; + process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID='azure-tenant-id'; + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID='azure-client-id'; + process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET='azure-client-sceret' + process.env.CUBEJS_DB_DATABRICKS_TOKEN='token'; + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('should get signed URLs of unloaded csv files by azure storage shared key', async () => { + databricksDriver = new DatabricksDriver(); + databricksDriver['unloadWithSql'] = mockUnloadWithSql; + + const result = await databricksDriver.unload(mockTableName, mockOptions); + expect(mockUnloadWithSql).toHaveBeenCalledWith(mockTableName, mockSql, mockParams); + expect(result.csvFile).toBeTruthy(); + }); + + test('should get signed URLs of unloaded csv files by azure client secret', async () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY=''; + databricksDriver = new DatabricksDriver(); + databricksDriver['unloadWithSql'] = mockUnloadWithSql; + + const result = await databricksDriver.unload(mockTableName, mockOptions); + expect(mockUnloadWithSql).toHaveBeenCalledWith(mockTableName, mockSql, mockParams); + expect(result.csvFile).toBeTruthy(); + }); +}); From ad396baea7ebd7e0593ad17279f65ce7236d5e01 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Mon, 26 Jun 2023 23:16:38 +0800 Subject: [PATCH 05/21] Add test script for databricks driver --- packages/cubejs-databricks-jdbc-driver/package.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/cubejs-databricks-jdbc-driver/package.json b/packages/cubejs-databricks-jdbc-driver/package.json index cbb131672e7c0..fb509b870b6ec 100644 --- a/packages/cubejs-databricks-jdbc-driver/package.json +++ b/packages/cubejs-databricks-jdbc-driver/package.json @@ -18,6 +18,8 @@ "build": "rm -rf dist && npm run tsc", "tsc": "tsc", "watch": "tsc -w", + "test": "npm run unit", + "unit": "jest dist/test", "lint": "eslint src/* --ext .ts", "lint:fix": "eslint --fix src/* --ext .ts", "postinstall": "node bin/post-install" From 50a59a6837aa5fb4679906cd3ccd5dcc088f269a Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Mon, 26 Jun 2023 23:58:39 +0800 Subject: [PATCH 06/21] Update azure identity to 3.2.3 --- .../package.json | 2 +- yarn.lock | 108 ------------------ 2 files changed, 1 insertion(+), 109 deletions(-) diff --git a/packages/cubejs-databricks-jdbc-driver/package.json b/packages/cubejs-databricks-jdbc-driver/package.json index fb509b870b6ec..50d96a59f2a0f 100644 --- a/packages/cubejs-databricks-jdbc-driver/package.json +++ b/packages/cubejs-databricks-jdbc-driver/package.json @@ -33,7 +33,7 @@ "@aws-sdk/client-s3": "^3.49.0", "@aws-sdk/s3-request-presigner": "^3.49.0", "@azure/storage-blob": "^12.9.0", - "@azure/identity": "^3.2.2", + "@azure/identity": "^3.2.3", "@cubejs-backend/base-driver": "^0.33.29", "@cubejs-backend/jdbc-driver": "^0.33.29", "@cubejs-backend/schema-compiler": "^0.33.29", diff --git a/yarn.lock b/yarn.lock index b7fc333688e43..9495430a10e45 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1939,27 +1939,6 @@ "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" -"@azure/core-auth@^1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.4.0.tgz#6fa9661c1705857820dbc216df5ba5665ac36a9e" - integrity sha512-HFrcTgmuSuukRf/EdPmqBrc5l6Q5Uu+2TbuhaKbgaCpP2TfAeiNaQPAadxO+CYBRHGUzIDteMAjFspFLDLnKVQ== - dependencies: - "@azure/abort-controller" "^1.0.0" - tslib "^2.2.0" - -"@azure/core-client@^1.4.0": - version "1.7.3" - resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.7.3.tgz#f8cb2a1f91e8bc4921fa2e745cfdfda3e6e491a3" - integrity sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g== - dependencies: - "@azure/abort-controller" "^1.0.0" - "@azure/core-auth" "^1.4.0" - "@azure/core-rest-pipeline" "^1.9.1" - "@azure/core-tracing" "^1.0.0" - "@azure/core-util" "^1.0.0" - "@azure/logger" "^1.0.0" - tslib "^2.2.0" - "@azure/core-http@^2.0.0": version "2.2.2" resolved "https://registry.yarnpkg.com/@azure/core-http/-/core-http-2.2.2.tgz#573798f087d808d39aa71fd7c52b8d7b89f440da" @@ -1999,21 +1978,6 @@ "@azure/core-asynciterator-polyfill" "^1.0.0" tslib "^2.2.0" -"@azure/core-rest-pipeline@^1.1.0", "@azure/core-rest-pipeline@^1.9.1": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@azure/core-rest-pipeline/-/core-rest-pipeline-1.11.0.tgz#fc0e8f56caac08a9d4ac91c07a6c5a360ea31c82" - integrity sha512-nB4KXl6qAyJmBVLWA7SakT4tzpYZTCk4pvRBeI+Ye0WYSOrlTqlMhc4MSS/8atD3ufeYWdkN380LLoXlUUzThw== - dependencies: - "@azure/abort-controller" "^1.0.0" - "@azure/core-auth" "^1.4.0" - "@azure/core-tracing" "^1.0.1" - "@azure/core-util" "^1.3.0" - "@azure/logger" "^1.0.0" - form-data "^4.0.0" - http-proxy-agent "^5.0.0" - https-proxy-agent "^5.0.0" - tslib "^2.2.0" - "@azure/core-tracing@1.0.0-preview.13": version "1.0.0-preview.13" resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz#55883d40ae2042f6f1e12b17dd0c0d34c536d644" @@ -2022,43 +1986,6 @@ "@opentelemetry/api" "^1.0.1" tslib "^2.2.0" -"@azure/core-tracing@^1.0.0", "@azure/core-tracing@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.1.tgz#352a38cbea438c4a83c86b314f48017d70ba9503" - integrity sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw== - dependencies: - tslib "^2.2.0" - -"@azure/core-util@^1.0.0", "@azure/core-util@^1.3.0": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.3.2.tgz#3f8cfda1e87fac0ce84f8c1a42fcd6d2a986632d" - integrity sha512-2bECOUh88RvL1pMZTcc6OzfobBeWDBf5oBbhjIhT1MV9otMVWCzpOJkkiKtrnO88y5GGBelgY8At73KGAdbkeQ== - dependencies: - "@azure/abort-controller" "^1.0.0" - tslib "^2.2.0" - -"@azure/identity@^3.2.2": - version "3.2.3" - resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.2.3.tgz#dc6a4bc4c798c06dd8a3af2869f83ddadf332e26" - integrity sha512-knIbl7p2i8r3qPsLW2W84esmDPr36RqieLC72OeuqYk4+0TRNthUhWTs655P9S9Pm3TVVxcFsS3Le9SXIWBIFA== - dependencies: - "@azure/abort-controller" "^1.0.0" - "@azure/core-auth" "^1.3.0" - "@azure/core-client" "^1.4.0" - "@azure/core-rest-pipeline" "^1.1.0" - "@azure/core-tracing" "^1.0.0" - "@azure/core-util" "^1.0.0" - "@azure/logger" "^1.0.0" - "@azure/msal-browser" "^2.37.1" - "@azure/msal-common" "^13.1.0" - "@azure/msal-node" "^1.17.3" - events "^3.0.0" - jws "^4.0.0" - open "^8.0.0" - stoppable "^1.1.0" - tslib "^2.2.0" - uuid "^8.3.0" - "@azure/logger@^1.0.0": version "1.0.3" resolved "https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.3.tgz#6e36704aa51be7d4a1bae24731ea580836293c96" @@ -2095,27 +2022,6 @@ "@azure/ms-rest-js" "^2.0.4" adal-node "^0.2.2" -"@azure/msal-browser@^2.37.1": - version "2.37.1" - resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-2.37.1.tgz#7e3fa7df9c6e74eb1fb640e73ef3b5b4407ee98d" - integrity sha512-EoKQISEpIY39Ru1OpWkeFZBcwp6Y0bG81bVmdyy4QJebPPDdVzfm62PSU0XFIRc3bqjZ4PBKBLMYLuo9NZYAow== - dependencies: - "@azure/msal-common" "13.1.0" - -"@azure/msal-common@13.1.0", "@azure/msal-common@^13.1.0": - version "13.1.0" - resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-13.1.0.tgz#73a787ec1e7880c30b187cf451404af547d95339" - integrity sha512-wj+ULrRB0HTuMmtrMjg8j3guCx32GE2BCPbsMCZkHgL1BZetC3o/Su5UJEQMX1HNc9CrIaQNx5WaKWHygYDe0g== - -"@azure/msal-node@^1.17.3": - version "1.17.3" - resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-1.17.3.tgz#1a0bbecb0b3e5e63d50ccb27c9cb3bae5a6c1b65" - integrity sha512-slsa+388bQQWnWH1V91KL+zV57rIp/0OQFfF0EmVMY8gnEIkAnpWWFUVBTTMbxEyjEFMk5ZW9xiHvHBcYFHzDw== - dependencies: - "@azure/msal-common" "13.1.0" - jsonwebtoken "^9.0.0" - uuid "^8.3.0" - "@azure/storage-blob@^12.11.0": version "12.12.0" resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.12.0.tgz#25e277c885692d5adcd8c2a949789b2837a74c59" @@ -21428,15 +21334,6 @@ open@^7.0.2, open@^7.3.1, open@^7.4.2: is-docker "^2.0.0" is-wsl "^2.1.1" -open@^8.0.0: - version "8.4.2" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" - integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== - dependencies: - define-lazy-prop "^2.0.0" - is-docker "^2.1.1" - is-wsl "^2.2.0" - opencollective-postinstall@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz#7a0fff978f6dbfa4d006238fbac98ed4198c3259" @@ -26576,11 +26473,6 @@ stealthy-require@^1.1.1: resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= -stoppable@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b" - integrity sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw== - stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" From c450ef268f3a93a6d73f9cbf34a399dec56907d4 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Wed, 28 Jun 2023 09:56:09 +0800 Subject: [PATCH 07/21] Add yarn lock --- yarn.lock | 108 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 108 insertions(+) diff --git a/yarn.lock b/yarn.lock index 9495430a10e45..13ca3f53e0024 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1939,6 +1939,27 @@ "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" +"@azure/core-auth@^1.4.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.4.0.tgz#6fa9661c1705857820dbc216df5ba5665ac36a9e" + integrity sha512-HFrcTgmuSuukRf/EdPmqBrc5l6Q5Uu+2TbuhaKbgaCpP2TfAeiNaQPAadxO+CYBRHGUzIDteMAjFspFLDLnKVQ== + dependencies: + "@azure/abort-controller" "^1.0.0" + tslib "^2.2.0" + +"@azure/core-client@^1.4.0": + version "1.7.3" + resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.7.3.tgz#f8cb2a1f91e8bc4921fa2e745cfdfda3e6e491a3" + integrity sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.4.0" + "@azure/core-rest-pipeline" "^1.9.1" + "@azure/core-tracing" "^1.0.0" + "@azure/core-util" "^1.0.0" + "@azure/logger" "^1.0.0" + tslib "^2.2.0" + "@azure/core-http@^2.0.0": version "2.2.2" resolved "https://registry.yarnpkg.com/@azure/core-http/-/core-http-2.2.2.tgz#573798f087d808d39aa71fd7c52b8d7b89f440da" @@ -1978,6 +1999,21 @@ "@azure/core-asynciterator-polyfill" "^1.0.0" tslib "^2.2.0" +"@azure/core-rest-pipeline@^1.1.0", "@azure/core-rest-pipeline@^1.9.1": + version "1.11.0" + resolved "https://registry.yarnpkg.com/@azure/core-rest-pipeline/-/core-rest-pipeline-1.11.0.tgz#fc0e8f56caac08a9d4ac91c07a6c5a360ea31c82" + integrity sha512-nB4KXl6qAyJmBVLWA7SakT4tzpYZTCk4pvRBeI+Ye0WYSOrlTqlMhc4MSS/8atD3ufeYWdkN380LLoXlUUzThw== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.4.0" + "@azure/core-tracing" "^1.0.1" + "@azure/core-util" "^1.3.0" + "@azure/logger" "^1.0.0" + form-data "^4.0.0" + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + tslib "^2.2.0" + "@azure/core-tracing@1.0.0-preview.13": version "1.0.0-preview.13" resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz#55883d40ae2042f6f1e12b17dd0c0d34c536d644" @@ -1986,6 +2022,43 @@ "@opentelemetry/api" "^1.0.1" tslib "^2.2.0" +"@azure/core-tracing@^1.0.0", "@azure/core-tracing@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.1.tgz#352a38cbea438c4a83c86b314f48017d70ba9503" + integrity sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw== + dependencies: + tslib "^2.2.0" + +"@azure/core-util@^1.0.0", "@azure/core-util@^1.3.0": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.3.2.tgz#3f8cfda1e87fac0ce84f8c1a42fcd6d2a986632d" + integrity sha512-2bECOUh88RvL1pMZTcc6OzfobBeWDBf5oBbhjIhT1MV9otMVWCzpOJkkiKtrnO88y5GGBelgY8At73KGAdbkeQ== + dependencies: + "@azure/abort-controller" "^1.0.0" + tslib "^2.2.0" + +"@azure/identity@^3.2.3": + version "3.2.3" + resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.2.3.tgz#dc6a4bc4c798c06dd8a3af2869f83ddadf332e26" + integrity sha512-knIbl7p2i8r3qPsLW2W84esmDPr36RqieLC72OeuqYk4+0TRNthUhWTs655P9S9Pm3TVVxcFsS3Le9SXIWBIFA== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.3.0" + "@azure/core-client" "^1.4.0" + "@azure/core-rest-pipeline" "^1.1.0" + "@azure/core-tracing" "^1.0.0" + "@azure/core-util" "^1.0.0" + "@azure/logger" "^1.0.0" + "@azure/msal-browser" "^2.37.1" + "@azure/msal-common" "^13.1.0" + "@azure/msal-node" "^1.17.3" + events "^3.0.0" + jws "^4.0.0" + open "^8.0.0" + stoppable "^1.1.0" + tslib "^2.2.0" + uuid "^8.3.0" + "@azure/logger@^1.0.0": version "1.0.3" resolved "https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.3.tgz#6e36704aa51be7d4a1bae24731ea580836293c96" @@ -2022,6 +2095,27 @@ "@azure/ms-rest-js" "^2.0.4" adal-node "^0.2.2" +"@azure/msal-browser@^2.37.1": + version "2.37.1" + resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-2.37.1.tgz#7e3fa7df9c6e74eb1fb640e73ef3b5b4407ee98d" + integrity sha512-EoKQISEpIY39Ru1OpWkeFZBcwp6Y0bG81bVmdyy4QJebPPDdVzfm62PSU0XFIRc3bqjZ4PBKBLMYLuo9NZYAow== + dependencies: + "@azure/msal-common" "13.1.0" + +"@azure/msal-common@13.1.0", "@azure/msal-common@^13.1.0": + version "13.1.0" + resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-13.1.0.tgz#73a787ec1e7880c30b187cf451404af547d95339" + integrity sha512-wj+ULrRB0HTuMmtrMjg8j3guCx32GE2BCPbsMCZkHgL1BZetC3o/Su5UJEQMX1HNc9CrIaQNx5WaKWHygYDe0g== + +"@azure/msal-node@^1.17.3": + version "1.17.3" + resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-1.17.3.tgz#1a0bbecb0b3e5e63d50ccb27c9cb3bae5a6c1b65" + integrity sha512-slsa+388bQQWnWH1V91KL+zV57rIp/0OQFfF0EmVMY8gnEIkAnpWWFUVBTTMbxEyjEFMk5ZW9xiHvHBcYFHzDw== + dependencies: + "@azure/msal-common" "13.1.0" + jsonwebtoken "^9.0.0" + uuid "^8.3.0" + "@azure/storage-blob@^12.11.0": version "12.12.0" resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.12.0.tgz#25e277c885692d5adcd8c2a949789b2837a74c59" @@ -21334,6 +21428,15 @@ open@^7.0.2, open@^7.3.1, open@^7.4.2: is-docker "^2.0.0" is-wsl "^2.1.1" +open@^8.0.0: + version "8.4.2" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + opencollective-postinstall@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz#7a0fff978f6dbfa4d006238fbac98ed4198c3259" @@ -26473,6 +26576,11 @@ stealthy-require@^1.1.1: resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= +stoppable@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b" + integrity sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw== + stream-browserify@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" From 34beaa6fd1a7a6982d9496dac805a2d8047a5991 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Wed, 12 Jul 2023 11:56:39 +0800 Subject: [PATCH 08/21] Fix jest async issue --- packages/cubejs-databricks-jdbc-driver/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cubejs-databricks-jdbc-driver/package.json b/packages/cubejs-databricks-jdbc-driver/package.json index 6045c45fa334f..c8b1fef5ce8a7 100644 --- a/packages/cubejs-databricks-jdbc-driver/package.json +++ b/packages/cubejs-databricks-jdbc-driver/package.json @@ -19,7 +19,7 @@ "tsc": "tsc", "watch": "tsc -w", "test": "npm run unit", - "unit": "jest dist/test", + "unit": "jest dist/test --forceExit", "lint": "eslint src/* --ext .ts", "lint:fix": "eslint --fix src/* --ext .ts", "postinstall": "node bin/post-install" From 3439d85c8dce6557126a4ed1e3054ee3d012bbb3 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Fri, 1 Sep 2023 18:45:45 +0800 Subject: [PATCH 09/21] Add azure prefix for related env variables --- packages/cubejs-backend-shared/src/env.ts | 12 +- .../test/db_env_multi.test.ts | 114 +++++++++--------- .../test/db_env_single.test.ts | 98 +++++++-------- .../src/DatabricksDriver.ts | 30 ++--- .../test/DatabricksDriver.test.ts | 6 +- 5 files changed, 130 insertions(+), 130 deletions(-) diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts index 2a3c8b7a9a637..278423fadc5b0 100644 --- a/packages/cubejs-backend-shared/src/env.ts +++ b/packages/cubejs-backend-shared/src/env.ts @@ -736,39 +736,39 @@ const variables: Record any> = { /** * Tenant ID for the Azure based export bucket srorage. */ - dbExportBucketTenantId: ({ + dbExportBucketAzureTenantId: ({ dataSource, }: { dataSource: string, }) => ( process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_TENANT_ID', dataSource) + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID', dataSource) ] ), /** * Client ID for the Azure based export bucket srorage. */ - dbExportBucketClientId: ({ + dbExportBucketAzureClientId: ({ dataSource, }: { dataSource: string, }) => ( process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID', dataSource) + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID', dataSource) ] ), /** * Client Secret for the Azure based export bucket srorage. */ - dbExportBucketClientSecret: ({ + dbExportBucketAzureClientSecret: ({ dataSource, }: { dataSource: string, }) => ( process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET', dataSource) + keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET', dataSource) ] ), diff --git a/packages/cubejs-backend-shared/test/db_env_multi.test.ts b/packages/cubejs-backend-shared/test/db_env_multi.test.ts index 56935857a3a78..b37ce923c20c4 100644 --- a/packages/cubejs-backend-shared/test/db_env_multi.test.ts +++ b/packages/cubejs-backend-shared/test/db_env_multi.test.ts @@ -956,89 +956,89 @@ describe('Multiple datasources', () => { ); }); - test('getEnv("dbExportBucketTenantId")', () => { - process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default1'; - process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_TENANT_ID = 'postgres1'; - process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_TENANT_ID = 'wrong1'; - expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('postgres1'); - expect(() => getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toThrow( + test('getEnv("dbExportBucketAzureTenantId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'default1'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'postgres1'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'wrong1'; + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('dbExportBucketAzureTenantId', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); - process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default2'; - process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_TENANT_ID = 'postgres2'; - process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_TENANT_ID = 'wrong2'; - expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('postgres2'); - expect(() => getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toThrow( + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'default2'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'postgres2'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'wrong2'; + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('dbExportBucketAzureTenantId', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); - delete process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID; - delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_TENANT_ID; - delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_TENANT_ID; - expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toBeUndefined(); - expect(() => getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toThrow( + delete process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID; + delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_TENANT_ID; + delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_TENANT_ID; + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'postgres' })).toBeUndefined(); + expect(() => getEnv('dbExportBucketAzureTenantId', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); }); - test('getEnv("dbExportBucketClientId")', () => { - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default1'; - process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_ID = 'postgres1'; - process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_ID = 'wrong1'; - expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('postgres1'); - expect(() => getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toThrow( + test('getEnv("dbExportBucketAzureClientId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'default1'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'postgres1'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'wrong1'; + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('dbExportBucketAzureClientId', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default2'; - process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_ID = 'postgres2'; - process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_ID = 'wrong2'; - expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('postgres2'); - expect(() => getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toThrow( + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'default2'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'postgres2'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'wrong2'; + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('dbExportBucketAzureClientId', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); - delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID; - delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_ID; - delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_ID; - expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toBeUndefined(); - expect(() => getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toThrow( + delete process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID; + delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_CLIENT_ID; + delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_CLIENT_ID; + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'postgres' })).toBeUndefined(); + expect(() => getEnv('dbExportBucketAzureClientId', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); }); - test('getEnv("dbExportBucketClientSecret")', () => { - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default1'; - process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_SECRET = 'postgres1'; - process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_SECRET = 'wrong1'; - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('postgres1'); - expect(() => getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toThrow( + test('getEnv("dbExportBucketAzureClientSecret")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'default1'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'postgres1'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'wrong1'; + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'postgres' })).toEqual('postgres1'); + expect(() => getEnv('dbExportBucketAzureClientSecret', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default2'; - process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_SECRET = 'postgres2'; - process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_SECRET = 'wrong2'; - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('postgres2'); - expect(() => getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toThrow( + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'default2'; + process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'postgres2'; + process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'wrong2'; + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'postgres' })).toEqual('postgres2'); + expect(() => getEnv('dbExportBucketAzureClientSecret', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); - delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET; - delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_CLIENT_SECRET; - delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_CLIENT_SECRET; - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toBeUndefined(); - expect(() => getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toThrow( + delete process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET; + delete process.env.CUBEJS_DS_POSTGRES_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET; + delete process.env.CUBEJS_DS_WRONG_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET; + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'postgres' })).toBeUndefined(); + expect(() => getEnv('dbExportBucketAzureClientSecret', { dataSource: 'wrong' })).toThrow( 'The wrong data source is missing in the declared CUBEJS_DATASOURCES.' ); }); diff --git a/packages/cubejs-backend-shared/test/db_env_single.test.ts b/packages/cubejs-backend-shared/test/db_env_single.test.ts index 374aea8bb1ecb..7143600d1db23 100644 --- a/packages/cubejs-backend-shared/test/db_env_single.test.ts +++ b/packages/cubejs-backend-shared/test/db_env_single.test.ts @@ -618,55 +618,55 @@ describe('Single datasources', () => { expect(getEnv('dbExportBucketAzureKey', { dataSource: 'wrong' })).toBeUndefined(); }); - test('getEnv("dbExportBucketTenantId")', () => { - process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default1'; - expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('default1'); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toEqual('default1'); - - process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID = 'default2'; - expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toEqual('default2'); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toEqual('default2'); - - delete process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID; - expect(getEnv('dbExportBucketTenantId', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'postgres' })).toBeUndefined(); - expect(getEnv('dbExportBucketTenantId', { dataSource: 'wrong' })).toBeUndefined(); - }); - - test('getEnv("dbExportBucketClientId")', () => { - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default1'; - expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('default1'); - expect(getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toEqual('default1'); - - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID = 'default2'; - expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toEqual('default2'); - expect(getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toEqual('default2'); - - delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID; - expect(getEnv('dbExportBucketClientId', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('dbExportBucketClientId', { dataSource: 'postgres' })).toBeUndefined(); - expect(getEnv('dbExportBucketClientId', { dataSource: 'wrong' })).toBeUndefined(); - }); - - test('getEnv("dbExportBucketClientSecret")', () => { - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default1'; - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default1'); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('default1'); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toEqual('default1'); - - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET = 'default2'; - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toEqual('default2'); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toEqual('default2'); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toEqual('default2'); - - delete process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET; - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'default' })).toBeUndefined(); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'postgres' })).toBeUndefined(); - expect(getEnv('dbExportBucketClientSecret', { dataSource: 'wrong' })).toBeUndefined(); + test('getEnv("dbExportBucketAzureTenantId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'default1'; + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'wrong' })).toEqual('default1'); + + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID = 'default2'; + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'wrong' })).toEqual('default2'); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID; + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'postgres' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureTenantId', { dataSource: 'wrong' })).toBeUndefined(); + }); + + test('getEnv("dbExportBucketAzureClientId")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'default1'; + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'wrong' })).toEqual('default1'); + + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID = 'default2'; + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'wrong' })).toEqual('default2'); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID; + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'postgres' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureClientId', { dataSource: 'wrong' })).toBeUndefined(); + }); + + test('getEnv("dbExportBucketAzureClientSecret")', () => { + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'default1'; + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'default' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'postgres' })).toEqual('default1'); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'wrong' })).toEqual('default1'); + + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET = 'default2'; + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'default' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'postgres' })).toEqual('default2'); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'wrong' })).toEqual('default2'); + + delete process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET; + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'default' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'postgres' })).toBeUndefined(); + expect(getEnv('dbExportBucketAzureClientSecret', { dataSource: 'wrong' })).toBeUndefined(); }); test('getEnv("dbExportIntegration")', () => { diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index 6ef982ae468ea..d2e8bb0a8906a 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -95,17 +95,17 @@ export type DatabricksDriverConfiguration = JDBCDriverConfiguration & /** * Azure tenant id */ - tenantId?: string, + azureTenantId?: string, /** * Azure service principle client id */ - clientId?: string, + azureClientId?: string, /** * Azure service principle client sceret */ - clientSecret?: string, + azureClientSecret?: string, }; async function fileExistsOr( @@ -262,15 +262,15 @@ export class DatabricksDriver extends JDBCDriver { exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), // Azure service principle - tenantId: - conf?.tenantId || - getEnv('dbExportBucketTenantId', { dataSource }), - clientId: - conf?.clientId || - getEnv('dbExportBucketClientId', { dataSource }), - clientSecret: - conf?.clientSecret || - getEnv('dbExportBucketClientSecret', { dataSource }), + azureTenantId: + conf?.azureTenantId || + getEnv('dbExportBucketAzureTenantId', { dataSource }), + azureClientId: + conf?.azureClientId || + getEnv('dbExportBucketAzureClientId', { dataSource }), + azureClientSecret: + conf?.azureClientSecret || + getEnv('dbExportBucketAzureClientSecret', { dataSource }), }; super(config); this.config = config; @@ -690,9 +690,9 @@ export class DatabricksDriver extends JDBCDriver { this.config.azureKey as string, ) : new ClientSecretCredential( - this.config.tenantId as string, - this.config.clientId as string, - this.config.clientSecret as string, + this.config.azureTenantId as string, + this.config.azureClientId as string, + this.config.azureClientSecret as string, ); const blobClient = new BlobServiceClient( `https://${account}.blob.core.windows.net`, diff --git a/packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts b/packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts index 88cdd66726cea..275924a184307 100644 --- a/packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts +++ b/packages/cubejs-databricks-jdbc-driver/test/DatabricksDriver.test.ts @@ -34,9 +34,9 @@ describe('DatabricksDriver', () => { process.env.CUBEJS_DB_EXPORT_BUCKET_TYPE='azure'; process.env.CUBEJS_DB_EXPORT_BUCKET='wasbs://cube-export@mock.blob.core.windows.net'; process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY='azure-key'; - process.env.CUBEJS_DB_EXPORT_BUCKET_TENANT_ID='azure-tenant-id'; - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_ID='azure-client-id'; - process.env.CUBEJS_DB_EXPORT_BUCKET_CLIENT_SECRET='azure-client-sceret' + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID='azure-tenant-id'; + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID='azure-client-id'; + process.env.CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET='azure-client-sceret' process.env.CUBEJS_DB_DATABRICKS_TOKEN='token'; }); From e48c6b1880eec4720487f31842e3b473558dd855 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Fri, 1 Sep 2023 20:03:20 +0800 Subject: [PATCH 10/21] Keep shared key credential as default --- .../src/DatabricksDriver.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index d2e8bb0a8906a..b54f088b688cb 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -684,16 +684,18 @@ export class DatabricksDriver extends JDBCDriver { pathname.split(`${this.config.exportBucket}/`)[1]; const expr = new RegExp(`${foldername}\\/.*\\.csv$`, 'i'); - const credential = this.config.azureKey - ? new StorageSharedKeyCredential( - account, - this.config.azureKey as string, - ) - : new ClientSecretCredential( + let credential: StorageSharedKeyCredential | ClientSecretCredential; + credential = new StorageSharedKeyCredential( + account, + this.config.azureKey as string, + ); + if (this.config.azureTenantId && this.config.azureClientId && this.config.azureClientSecret) { + credential = new ClientSecretCredential( this.config.azureTenantId as string, this.config.azureClientId as string, this.config.azureClientSecret as string, ); + } const blobClient = new BlobServiceClient( `https://${account}.blob.core.windows.net`, credential, From 9ac16ea46c0b62be8435da0344688ff1e834227a Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Wed, 6 Sep 2023 21:35:19 +0800 Subject: [PATCH 11/21] Make sas codes more readable --- .../src/DatabricksDriver.ts | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index b54f088b688cb..d755e504f45e9 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -715,12 +715,15 @@ export class DatabricksDriver extends JDBCDriver { protocol: SASProtocol.Https, version: '2020-08-04', }; - const sas = credential instanceof StorageSharedKeyCredential - ? generateBlobSASQueryParameters( + + let sas: string; + if (credential instanceof StorageSharedKeyCredential) { + sas = generateBlobSASQueryParameters( signatureValues, credential, - ).toString() - : generateBlobSASQueryParameters( + ).toString(); + } else if (credential instanceof ClientSecretCredential) { + sas = generateBlobSASQueryParameters( signatureValues, await blobClient.getUserDelegationKey( startsOnDate, @@ -728,6 +731,11 @@ export class DatabricksDriver extends JDBCDriver { ), account ).toString(); + } else { + const credentialType = typeof credential; + throw new Error(`Unexpected credentials type: ${credentialType}`); + } + csvFile.push(`https://${ account }.blob.core.windows.net/${ From 94e4d8b9c3991a81f3cd1ebfd8c57ea2635a322a Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Wed, 6 Sep 2023 21:45:05 +0800 Subject: [PATCH 12/21] Fix typo error in comments --- .../cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index d755e504f45e9..400ef7dd832fe 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -98,12 +98,12 @@ export type DatabricksDriverConfiguration = JDBCDriverConfiguration & azureTenantId?: string, /** - * Azure service principle client id + * Azure service principal client id */ azureClientId?: string, /** - * Azure service principle client sceret + * Azure service principal client sceret */ azureClientSecret?: string, }; @@ -261,7 +261,7 @@ export class DatabricksDriver extends JDBCDriver { getEnv('dbExportBucketAzureKey', { dataSource }), exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), - // Azure service principle + // Azure service principal azureTenantId: conf?.azureTenantId || getEnv('dbExportBucketAzureTenantId', { dataSource }), From 4568bdffc3bea7db4164a26230c5b547d5ec653e Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Mon, 23 Oct 2023 22:20:53 +0800 Subject: [PATCH 13/21] doc(@cubejs-backend/databricks-jdbc-driver):Add new env variables to doc --- .../Configuration/Databases/Databricks-JDBC.mdx | 15 +++++++++++++++ .../data-sources/databricks-jdbc.mdx | 15 +++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/docs/content/Configuration/Databases/Databricks-JDBC.mdx b/docs/content/Configuration/Databases/Databricks-JDBC.mdx index da932538b27db..1ac5a1b3e7bbe 100644 --- a/docs/content/Configuration/Databases/Databricks-JDBC.mdx +++ b/docs/content/Configuration/Databases/Databricks-JDBC.mdx @@ -127,6 +127,17 @@ CUBEJS_DB_EXPORT_BUCKET=wasbs://my-bucket@my-account.blob.core.windows.net CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY= ``` +Access key provides full access to the configuration and data, +to use a fine-grained control over access to storage resources, follow [the Databricks guide on authorize with Azure Active Directory][authorize-with-azure-active-directory]. + +[Create the service principal][azure-authentication-with-service-principal] and replace the access key as follows: + +```dotenv +CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID= +CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID= +CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET= +``` + ## SSL/TLS Cube does not require any additional configuration to enable SSL/TLS for @@ -143,6 +154,10 @@ bucket][self-preaggs-export-bucket] **must be** configured. [azure-bs]: https://azure.microsoft.com/en-gb/services/storage/blobs/ [azure-bs-docs-get-key]: https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys +[authorize-with-azure-active-directory]: + https://learn.microsoft.com/en-us/rest/api/storageservices/authorize-with-azure-active-directory +[azure-authentication-with-service-principal]: + https://learn.microsoft.com/en-us/azure/developer/java/sdk/identity-service-principal-auth [databricks]: https://databricks.com/ [databricks-docs-dbfs]: https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs diff --git a/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx b/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx index 8dc2626d9cd9c..d05adb768ad10 100644 --- a/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx +++ b/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx @@ -129,6 +129,17 @@ CUBEJS_DB_EXPORT_BUCKET=wasbs://my-bucket@my-account.blob.core.windows.net CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY= ``` +Access key provides full access to the configuration and data, +to use a fine-grained control over access to storage resources, follow [the Databricks guide on authorize with Azure Active Directory][authorize-with-azure-active-directory]. + +[Create the service principal][azure-authentication-with-service-principal] and replace the access key as follows: + +```dotenv +CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID= +CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID= +CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET= +``` + ## SSL/TLS Cube does not require any additional configuration to enable SSL/TLS for @@ -145,6 +156,10 @@ bucket][self-preaggs-export-bucket] **must be** configured. [azure-bs]: https://azure.microsoft.com/en-gb/services/storage/blobs/ [azure-bs-docs-get-key]: https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys +[authorize-with-azure-active-directory]: + https://learn.microsoft.com/en-us/rest/api/storageservices/authorize-with-azure-active-directory +[azure-authentication-with-service-principal]: + https://learn.microsoft.com/en-us/azure/developer/java/sdk/identity-service-principal-auth [databricks]: https://databricks.com/ [databricks-docs-dbfs]: https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs From c43b1eb4d0cf2fac64bc02a8ec982b32727fe18e Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Tue, 21 Nov 2023 10:27:35 +0800 Subject: [PATCH 14/21] Remove new Databricks variables doc changes --- .../Configuration/Databases/Databricks-JDBC.mdx | 15 --------------- .../data-sources/databricks-jdbc.mdx | 15 --------------- 2 files changed, 30 deletions(-) diff --git a/docs/content/Configuration/Databases/Databricks-JDBC.mdx b/docs/content/Configuration/Databases/Databricks-JDBC.mdx index 1ac5a1b3e7bbe..da932538b27db 100644 --- a/docs/content/Configuration/Databases/Databricks-JDBC.mdx +++ b/docs/content/Configuration/Databases/Databricks-JDBC.mdx @@ -127,17 +127,6 @@ CUBEJS_DB_EXPORT_BUCKET=wasbs://my-bucket@my-account.blob.core.windows.net CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY= ``` -Access key provides full access to the configuration and data, -to use a fine-grained control over access to storage resources, follow [the Databricks guide on authorize with Azure Active Directory][authorize-with-azure-active-directory]. - -[Create the service principal][azure-authentication-with-service-principal] and replace the access key as follows: - -```dotenv -CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID= -CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID= -CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET= -``` - ## SSL/TLS Cube does not require any additional configuration to enable SSL/TLS for @@ -154,10 +143,6 @@ bucket][self-preaggs-export-bucket] **must be** configured. [azure-bs]: https://azure.microsoft.com/en-gb/services/storage/blobs/ [azure-bs-docs-get-key]: https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys -[authorize-with-azure-active-directory]: - https://learn.microsoft.com/en-us/rest/api/storageservices/authorize-with-azure-active-directory -[azure-authentication-with-service-principal]: - https://learn.microsoft.com/en-us/azure/developer/java/sdk/identity-service-principal-auth [databricks]: https://databricks.com/ [databricks-docs-dbfs]: https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs diff --git a/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx b/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx index d05adb768ad10..8dc2626d9cd9c 100644 --- a/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx +++ b/docs/docs-new/pages/product/configuration/data-sources/databricks-jdbc.mdx @@ -129,17 +129,6 @@ CUBEJS_DB_EXPORT_BUCKET=wasbs://my-bucket@my-account.blob.core.windows.net CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY= ``` -Access key provides full access to the configuration and data, -to use a fine-grained control over access to storage resources, follow [the Databricks guide on authorize with Azure Active Directory][authorize-with-azure-active-directory]. - -[Create the service principal][azure-authentication-with-service-principal] and replace the access key as follows: - -```dotenv -CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID= -CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID= -CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET= -``` - ## SSL/TLS Cube does not require any additional configuration to enable SSL/TLS for @@ -156,10 +145,6 @@ bucket][self-preaggs-export-bucket] **must be** configured. [azure-bs]: https://azure.microsoft.com/en-gb/services/storage/blobs/ [azure-bs-docs-get-key]: https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys -[authorize-with-azure-active-directory]: - https://learn.microsoft.com/en-us/rest/api/storageservices/authorize-with-azure-active-directory -[azure-authentication-with-service-principal]: - https://learn.microsoft.com/en-us/azure/developer/java/sdk/identity-service-principal-auth [databricks]: https://databricks.com/ [databricks-docs-dbfs]: https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs From 22d06a4858a8d038ff9d73948546ab907e292634 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Tue, 21 Nov 2023 10:32:27 +0800 Subject: [PATCH 15/21] doc(@cubejs-backend/databricks-jdbc-driver):Add new env variables to latest doc --- .../data-sources/databricks-jdbc.mdx | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx b/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx index 8dc2626d9cd9c..d05adb768ad10 100644 --- a/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx +++ b/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx @@ -129,6 +129,17 @@ CUBEJS_DB_EXPORT_BUCKET=wasbs://my-bucket@my-account.blob.core.windows.net CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY= ``` +Access key provides full access to the configuration and data, +to use a fine-grained control over access to storage resources, follow [the Databricks guide on authorize with Azure Active Directory][authorize-with-azure-active-directory]. + +[Create the service principal][azure-authentication-with-service-principal] and replace the access key as follows: + +```dotenv +CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID= +CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID= +CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET= +``` + ## SSL/TLS Cube does not require any additional configuration to enable SSL/TLS for @@ -145,6 +156,10 @@ bucket][self-preaggs-export-bucket] **must be** configured. [azure-bs]: https://azure.microsoft.com/en-gb/services/storage/blobs/ [azure-bs-docs-get-key]: https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys +[authorize-with-azure-active-directory]: + https://learn.microsoft.com/en-us/rest/api/storageservices/authorize-with-azure-active-directory +[azure-authentication-with-service-principal]: + https://learn.microsoft.com/en-us/azure/developer/java/sdk/identity-service-principal-auth [databricks]: https://databricks.com/ [databricks-docs-dbfs]: https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs From b3fa7cc9f3ca5ee4ee1c65e032d9b1fe08e96c03 Mon Sep 17 00:00:00 2001 From: Maggie Date: Wed, 29 Nov 2023 10:06:10 +0800 Subject: [PATCH 16/21] Keep dependencies version consistent with master --- packages/cubejs-databricks-jdbc-driver/package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/cubejs-databricks-jdbc-driver/package.json b/packages/cubejs-databricks-jdbc-driver/package.json index 9c3d4838576f5..a576252d76a16 100644 --- a/packages/cubejs-databricks-jdbc-driver/package.json +++ b/packages/cubejs-databricks-jdbc-driver/package.json @@ -34,9 +34,9 @@ "@aws-sdk/s3-request-presigner": "^3.49.0", "@azure/storage-blob": "^12.9.0", "@azure/identity": "^3.2.3", - "@cubejs-backend/base-driver": "^0.34.25", - "@cubejs-backend/jdbc-driver": "^0.34.25", - "@cubejs-backend/schema-compiler": "^0.34.25", + "@cubejs-backend/base-driver": "^0.34.26", + "@cubejs-backend/jdbc-driver": "^0.34.26", + "@cubejs-backend/schema-compiler": "^0.34.26", "@cubejs-backend/shared": "^0.34.25", "inquirer": "^8.0.0", "node-fetch": "^2.6.1", From 944ef372e19828e57a981180a03165ab54204452 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Fri, 1 Dec 2023 16:17:32 +0800 Subject: [PATCH 17/21] Upgrade azure indentity version --- packages/cubejs-databricks-jdbc-driver/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cubejs-databricks-jdbc-driver/package.json b/packages/cubejs-databricks-jdbc-driver/package.json index 09af38294ca0d..f7f2d9cb42e52 100644 --- a/packages/cubejs-databricks-jdbc-driver/package.json +++ b/packages/cubejs-databricks-jdbc-driver/package.json @@ -33,7 +33,7 @@ "@aws-sdk/client-s3": "^3.49.0", "@aws-sdk/s3-request-presigner": "^3.49.0", "@azure/storage-blob": "^12.9.0", - "@azure/identity": "^3.2.3", + "@azure/identity": "3.3.1", "@cubejs-backend/base-driver": "^0.34.26", "@cubejs-backend/jdbc-driver": "^0.34.26", "@cubejs-backend/schema-compiler": "^0.34.27", From 26db2a0ee7f325ccad91b285101f9e5aac605fcc Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Fri, 8 Dec 2023 10:02:18 +0800 Subject: [PATCH 18/21] Add the changes of yarn lock for upgrade --- yarn.lock | 51 ++++++++++++++++++++++++++++++++++----------------- 1 file changed, 34 insertions(+), 17 deletions(-) diff --git a/yarn.lock b/yarn.lock index 03b8a21c64491..06fcbe3780607 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1952,6 +1952,15 @@ "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" +"@azure/core-auth@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.5.0.tgz#a41848c5c31cb3b7c84c409885267d55a2c92e44" + integrity sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-util" "^1.1.0" + tslib "^2.2.0" + "@azure/core-client@^1.4.0": version "1.7.3" resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.7.3.tgz#f8cb2a1f91e8bc4921fa2e745cfdfda3e6e491a3" @@ -2042,13 +2051,21 @@ "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" -"@azure/identity@^3.2.3": - version "3.2.3" - resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.2.3.tgz#dc6a4bc4c798c06dd8a3af2869f83ddadf332e26" - integrity sha512-knIbl7p2i8r3qPsLW2W84esmDPr36RqieLC72OeuqYk4+0TRNthUhWTs655P9S9Pm3TVVxcFsS3Le9SXIWBIFA== +"@azure/core-util@^1.1.0": + version "1.6.1" + resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.6.1.tgz#fea221c4fa43c26543bccf799beb30c1c7878f5a" + integrity sha512-h5taHeySlsV9qxuK64KZxy4iln1BtMYlNt5jbuEFN3UFSAd1EwKg/Gjl5a6tZ/W8t6li3xPnutOx7zbDyXnPmQ== dependencies: "@azure/abort-controller" "^1.0.0" - "@azure/core-auth" "^1.3.0" + tslib "^2.2.0" + +"@azure/identity@3.3.1": + version "3.3.1" + resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.3.1.tgz#6d3b68b655c09ce02b3f9a6d879e59bd000da70e" + integrity sha512-96im0LrJt0kzsMqA8XjWxqbd2pPuEZHDlyLM4zdMv6nowLV/ul3dOW5X55OuLoFX+h22tYnMcGmQb3tlkdt/UA== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.5.0" "@azure/core-client" "^1.4.0" "@azure/core-rest-pipeline" "^1.1.0" "@azure/core-tracing" "^1.0.0" @@ -2101,23 +2118,23 @@ adal-node "^0.2.2" "@azure/msal-browser@^2.37.1": - version "2.37.1" - resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-2.37.1.tgz#7e3fa7df9c6e74eb1fb640e73ef3b5b4407ee98d" - integrity sha512-EoKQISEpIY39Ru1OpWkeFZBcwp6Y0bG81bVmdyy4QJebPPDdVzfm62PSU0XFIRc3bqjZ4PBKBLMYLuo9NZYAow== + version "2.38.3" + resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-2.38.3.tgz#2f131fa9b7a8a9546fc8d34e5d99ce4c18b04147" + integrity sha512-2WuLFnWWPR1IdvhhysT18cBbkXx1z0YIchVss5AwVA95g7CU5CpT3d+5BcgVGNXDXbUU7/5p0xYHV99V5z8C/A== dependencies: - "@azure/msal-common" "13.1.0" + "@azure/msal-common" "13.3.1" -"@azure/msal-common@13.1.0", "@azure/msal-common@^13.1.0": - version "13.1.0" - resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-13.1.0.tgz#73a787ec1e7880c30b187cf451404af547d95339" - integrity sha512-wj+ULrRB0HTuMmtrMjg8j3guCx32GE2BCPbsMCZkHgL1BZetC3o/Su5UJEQMX1HNc9CrIaQNx5WaKWHygYDe0g== +"@azure/msal-common@13.3.1", "@azure/msal-common@^13.1.0": + version "13.3.1" + resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-13.3.1.tgz#012465bf940d12375dc47387b754ccf9d6b92180" + integrity sha512-Lrk1ozoAtaP/cp53May3v6HtcFSVxdFrg2Pa/1xu5oIvsIwhxW6zSPibKefCOVgd5osgykMi5jjcZHv8XkzZEQ== "@azure/msal-node@^1.17.3": - version "1.17.3" - resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-1.17.3.tgz#1a0bbecb0b3e5e63d50ccb27c9cb3bae5a6c1b65" - integrity sha512-slsa+388bQQWnWH1V91KL+zV57rIp/0OQFfF0EmVMY8gnEIkAnpWWFUVBTTMbxEyjEFMk5ZW9xiHvHBcYFHzDw== + version "1.18.4" + resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-1.18.4.tgz#c921b0447c92fb3b0cb1ebf5a9a76fcad2ec7c21" + integrity sha512-Kc/dRvhZ9Q4+1FSfsTFDME/v6+R2Y1fuMty/TfwqE5p9GTPw08BPbKgeWinE8JRHRp+LemjQbUZsn4Q4l6Lszg== dependencies: - "@azure/msal-common" "13.1.0" + "@azure/msal-common" "13.3.1" jsonwebtoken "^9.0.0" uuid "^8.3.0" From 7c18152a80735f284d232a69c5efe0fdba342ca3 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Fri, 8 Dec 2023 18:22:14 +0800 Subject: [PATCH 19/21] Add yarn lock for azure identity --- .../package.json | 8 +- yarn.lock | 100 +++++++++++++++++- 2 files changed, 103 insertions(+), 5 deletions(-) diff --git a/packages/cubejs-databricks-jdbc-driver/package.json b/packages/cubejs-databricks-jdbc-driver/package.json index 7d909079d2e62..67b4ff8ff6327 100644 --- a/packages/cubejs-databricks-jdbc-driver/package.json +++ b/packages/cubejs-databricks-jdbc-driver/package.json @@ -2,7 +2,7 @@ "name": "@cubejs-backend/databricks-jdbc-driver", "description": "Cube.js Databricks database driver", "author": "Cube Dev, Inc.", - "version": "0.34.27", + "version": "0.34.32", "license": "Apache-2.0", "repository": { "type": "git", @@ -33,7 +33,7 @@ "@aws-sdk/client-s3": "^3.49.0", "@aws-sdk/s3-request-presigner": "^3.49.0", "@azure/storage-blob": "^12.9.0", - "@azure/identity": "3.3.1", + "@azure/identity": "^3.3.1", "@cubejs-backend/base-driver": "^0.34.32", "@cubejs-backend/jdbc-driver": "^0.34.32", "@cubejs-backend/schema-compiler": "^0.34.32", @@ -48,11 +48,11 @@ "@cubejs-backend/linter": "^0.34.25", "@types/generic-pool": "^3.1.9", "@types/inquirer": "^7.3.1", - "@types/jest": "^26.0.20", + "@types/jest": "^27", "@types/node": "^14", "@types/ramda": "^0.27.34", "@types/uuid": "^8.3.4", - "jest": "^26.6.3", + "jest": "^27", "typescript": "~5.2.2" }, "publishConfig": { diff --git a/yarn.lock b/yarn.lock index a5e1cf5ff1ff9..479adcbd58f4e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1961,6 +1961,28 @@ "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" +"@azure/core-auth@^1.4.0", "@azure/core-auth@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.5.0.tgz#a41848c5c31cb3b7c84c409885267d55a2c92e44" + integrity sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-util" "^1.1.0" + tslib "^2.2.0" + +"@azure/core-client@^1.4.0": + version "1.7.3" + resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.7.3.tgz#f8cb2a1f91e8bc4921fa2e745cfdfda3e6e491a3" + integrity sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.4.0" + "@azure/core-rest-pipeline" "^1.9.1" + "@azure/core-tracing" "^1.0.0" + "@azure/core-util" "^1.0.0" + "@azure/logger" "^1.0.0" + tslib "^2.2.0" + "@azure/core-http@^2.0.0": version "2.2.2" resolved "https://registry.yarnpkg.com/@azure/core-http/-/core-http-2.2.2.tgz#573798f087d808d39aa71fd7c52b8d7b89f440da" @@ -2000,6 +2022,21 @@ "@azure/core-asynciterator-polyfill" "^1.0.0" tslib "^2.2.0" +"@azure/core-rest-pipeline@^1.1.0", "@azure/core-rest-pipeline@^1.9.1": + version "1.12.2" + resolved "https://registry.yarnpkg.com/@azure/core-rest-pipeline/-/core-rest-pipeline-1.12.2.tgz#a8952164f93b63ab15ae09aac416138da20daecd" + integrity sha512-wLLJQdL4v1yoqYtEtjKNjf8pJ/G/BqVomAWxcKOR1KbZJyCEnCv04yks7Y1NhJ3JzxbDs307W67uX0JzklFdCg== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.4.0" + "@azure/core-tracing" "^1.0.1" + "@azure/core-util" "^1.3.0" + "@azure/logger" "^1.0.0" + form-data "^4.0.0" + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + tslib "^2.2.0" + "@azure/core-tracing@1.0.0-preview.13": version "1.0.0-preview.13" resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz#55883d40ae2042f6f1e12b17dd0c0d34c536d644" @@ -2008,6 +2045,41 @@ "@opentelemetry/api" "^1.0.1" tslib "^2.2.0" +"@azure/core-tracing@^1.0.0", "@azure/core-tracing@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.1.tgz#352a38cbea438c4a83c86b314f48017d70ba9503" + integrity sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw== + dependencies: + tslib "^2.2.0" + +"@azure/core-util@^1.0.0", "@azure/core-util@^1.1.0", "@azure/core-util@^1.3.0", "@azure/core-util@^1.6.1": + version "1.6.1" + resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.6.1.tgz#fea221c4fa43c26543bccf799beb30c1c7878f5a" + integrity sha512-h5taHeySlsV9qxuK64KZxy4iln1BtMYlNt5jbuEFN3UFSAd1EwKg/Gjl5a6tZ/W8t6li3xPnutOx7zbDyXnPmQ== + dependencies: + "@azure/abort-controller" "^1.0.0" + tslib "^2.2.0" + +"@azure/identity@^3.3.1": + version "3.4.1" + resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.4.1.tgz#18ba48b7421c818ef8116e8eec3c03ec1a62649a" + integrity sha512-oQ/r5MBdfZTMIUcY5Ch8G7Vv9aIXDkEYyU4Dfqjim4MQN+LY2uiQ57P1JDopMLeHCsZxM4yy8lEdne3tM9Xhzg== + dependencies: + "@azure/abort-controller" "^1.0.0" + "@azure/core-auth" "^1.5.0" + "@azure/core-client" "^1.4.0" + "@azure/core-rest-pipeline" "^1.1.0" + "@azure/core-tracing" "^1.0.0" + "@azure/core-util" "^1.6.1" + "@azure/logger" "^1.0.0" + "@azure/msal-browser" "^3.5.0" + "@azure/msal-node" "^2.5.1" + events "^3.0.0" + jws "^4.0.0" + open "^8.0.0" + stoppable "^1.1.0" + tslib "^2.2.0" + "@azure/logger@^1.0.0": version "1.0.3" resolved "https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.3.tgz#6e36704aa51be7d4a1bae24731ea580836293c96" @@ -2044,6 +2116,27 @@ "@azure/ms-rest-js" "^2.0.4" adal-node "^0.2.2" +"@azure/msal-browser@^3.5.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-3.6.0.tgz#06ad33c82af4f24f2fcf8a8a61a3a1cbe3dd7fad" + integrity sha512-FrFBJXRJMyWXjAjg4cUNZwEKktzfzD/YD9+S1kj2ors67hKoveam4aL0bZuCZU/jTiHTn0xDQGQh2ksCMXTXtA== + dependencies: + "@azure/msal-common" "14.5.0" + +"@azure/msal-common@14.5.0": + version "14.5.0" + resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-14.5.0.tgz#5a891e5f8eaf23f598bdb4e285e938fb606cd716" + integrity sha512-Gx5rZbiZV/HiZ2nEKfjfAF/qDdZ4/QWxMvMo2jhIFVz528dVKtaZyFAOtsX2Ak8+TQvRsGCaEfuwJFuXB6tu1A== + +"@azure/msal-node@^2.5.1": + version "2.6.0" + resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-2.6.0.tgz#44bad20a1523c92d79de24fb225da86abaeedb6c" + integrity sha512-RWAWCYYrSldIYC47oWtofIun41e6SB9TBYgGYsezq6ednagwo9ZRFyRsvl1NabmdTkdDDXRAABIdveeN2Gtd8w== + dependencies: + "@azure/msal-common" "14.5.0" + jsonwebtoken "^9.0.0" + uuid "^8.3.0" + "@azure/storage-blob@^12.11.0": version "12.12.0" resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.12.0.tgz#25e277c885692d5adcd8c2a949789b2837a74c59" @@ -20238,7 +20331,7 @@ open@^7.3.1: is-docker "^2.0.0" is-wsl "^2.1.1" -open@^8.0.2, open@^8.0.9: +open@^8.0.0, open@^8.0.2, open@^8.0.9: version "8.4.2" resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== @@ -24789,6 +24882,11 @@ stealthy-require@^1.1.1: resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= +stoppable@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b" + integrity sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw== + stream-events@^1.0.1, stream-events@^1.0.4, stream-events@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/stream-events/-/stream-events-1.0.5.tgz#bbc898ec4df33a4902d892333d47da9bf1c406d5" From 458bc7028892c84a78115ff5c89c4000dd0c11bc Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Tue, 12 Dec 2023 19:09:13 +0800 Subject: [PATCH 20/21] Fix undefined azure key error with principal provided --- .../src/DatabricksDriver.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index befc52587e9be..374596b0bbea1 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -711,11 +711,12 @@ export class DatabricksDriver extends JDBCDriver { const expr = new RegExp(`${foldername}\\/.*\\.csv$`, 'i'); let credential: StorageSharedKeyCredential | ClientSecretCredential; - credential = new StorageSharedKeyCredential( - account, - this.config.azureKey as string, - ); - if (this.config.azureTenantId && this.config.azureClientId && this.config.azureClientSecret) { + if (this.config.azureKey !== undefined) { + credential = new StorageSharedKeyCredential( + account, + this.config.azureKey as string, + ); + } else { credential = new ClientSecretCredential( this.config.azureTenantId as string, this.config.azureClientId as string, From b9123f84a42b7d18762a3371a4af042eeec47f34 Mon Sep 17 00:00:00 2001 From: Maggie Zhang Date: Mon, 8 Jan 2024 11:06:55 +0800 Subject: [PATCH 21/21] Add azure export bucket env variables --- .../configuration/environment-variables.mdx | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/docs/pages/reference/configuration/environment-variables.mdx b/docs/pages/reference/configuration/environment-variables.mdx index 95d7f1ea64145..40f9ac91a026e 100644 --- a/docs/pages/reference/configuration/environment-variables.mdx +++ b/docs/pages/reference/configuration/environment-variables.mdx @@ -398,6 +398,66 @@ with a data source][ref-config-multiple-ds-decorating-env]. | -------------------------------------- | ---------------------- | --------------------- | | [A valid AWS region][aws-docs-regions] | N/A | N/A | +## `CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY` + +The Azure Access Key to use for the export bucket. + + + +When using multiple data sources, this environment variable can be [decorated +with a data source][ref-config-multiple-ds-decorating-env]. + + + +| Possible Values | Default in Development | Default in Production | +| ------------------------ | ---------------------- | --------------------- | +| A valid Azure Access Key | N/A | N/A | + +## `CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID` + +The Azure tenant ID to use for the export bucket. + + + +When using multiple data sources, this environment variable can be [decorated +with a data source][ref-config-multiple-ds-decorating-env]. + + + +| Possible Values | Default in Development | Default in Production | +| ----------------------- | ---------------------- | --------------------- | +| A valid Azure Tenant ID | N/A | N/A | + +## `CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID` + +The Azure client ID to use for the export bucket. + + + +When using multiple data sources, this environment variable can be [decorated +with a data source][ref-config-multiple-ds-decorating-env]. + + + +| Possible Values | Default in Development | Default in Production | +| ----------------------- | ---------------------- | --------------------- | +| A valid Azure Client ID | N/A | N/A | + +## `CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET` + +The Azure client secret to use for the export bucket. + + + +When using multiple data sources, this environment variable can be [decorated +with a data source][ref-config-multiple-ds-decorating-env]. + + + +| Possible Values | Default in Development | Default in Production | +| --------------------------- | ---------------------- | --------------------- | +| A valid Azure Client Secret | N/A | N/A | + ## `CUBEJS_DB_EXPORT_BUCKET_MOUNT_DIR` The mount path to use for a [Databricks DBFS mount][databricks-docs-dbfs].