From 4b61f2531d180cc43d21ef4271b2878612b208fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?ch=CF=80?= Date: Tue, 26 Jul 2022 16:27:20 +0200 Subject: [PATCH] #7 Fix date conversion (#8) * Upgrade dependencies and references * Added integration test * Fixed bugs reported by V' / partner Co-authored-by: KK --- .github/workflows/broken_links_checker.yml | 17 +- .github/workflows/ci-build-next-java.yml | 27 +- .github/workflows/ci-build.yml | 40 ++- .github/workflows/dependencies_check.yml | 14 +- ...elease_droid_prepare_original_checksum.yml | 16 +- .../release_droid_print_quick_checksum.yml | 12 +- ...ase_droid_upload_github_release_assets.yml | 12 +- .gitignore | 6 +- .project-keeper.yml | 10 + .vscode/settings.json | 15 + README.md | 1 + dependencies.md | 143 ++++---- doc/changes/changelog.md | 1 + doc/changes/changes_2.0.2.md | 59 ++++ doc/developers_guide.md | 22 ++ doc/user_guide/bigquery_user_guide.md | 87 ++--- error_code_config.yml | 3 +- pk_generated_parent.pom | 275 +++++++++++++++ pom.xml | 312 ++++++------------ release_config.yml | 3 + .../bigquery/BigQueryQueryRewriter.java | 136 +++----- .../bigquery/BigQueryQueryRewriterTest.java | 118 +++++-- .../bigquery/BigQueryVirtualSchemaIT.java | 151 +++++++++ .../bigquery/util/BigQueryDatasetFixture.java | 95 ++++++ .../util/BigQueryEmulatorContainer.java | 75 +++++ .../bigquery/util/BigQueryTestSetup.java | 31 ++ .../util/GoogleCloudBigQuerySetup.java | 90 +++++ .../bigquery/util/IntegrationTestSetup.java | 176 ++++++++++ .../bigquery/util/JdbcDriverProvider.java | 96 ++++++ .../dialects/bigquery/util/TestConfig.java | 80 +++++ src/test/resources/bigquery-data.yaml | 16 + 31 files changed, 1658 insertions(+), 481 deletions(-) create mode 100644 .project-keeper.yml create mode 100644 .vscode/settings.json create mode 100644 doc/changes/changes_2.0.2.md create mode 100644 doc/developers_guide.md create mode 100644 pk_generated_parent.pom create mode 100644 release_config.yml create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryVirtualSchemaIT.java create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryDatasetFixture.java create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryEmulatorContainer.java create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryTestSetup.java create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/util/GoogleCloudBigQuerySetup.java create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/util/IntegrationTestSetup.java create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/util/JdbcDriverProvider.java create mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/util/TestConfig.java create mode 100644 src/test/resources/bigquery-data.yaml diff --git a/.github/workflows/broken_links_checker.yml b/.github/workflows/broken_links_checker.yml index 6a69306..29071df 100644 --- a/.github/workflows/broken_links_checker.yml +++ b/.github/workflows/broken_links_checker.yml @@ -2,15 +2,26 @@ name: Broken Links Checker on: schedule: - - cron: "0 5 * * *" + - cron: "0 5 * * 0" push: + branches: + - main + pull_request: jobs: linkChecker: runs-on: ubuntu-latest + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + - name: Configure broken links checker + run: | + mkdir -p ./target + echo '{ "aliveStatusCodes": [429, 200], "ignorePatterns": [{"pattern": "^https?://(www.)?opensource.org"}] }' > ./target/broken_links_checker.json - uses: gaurav-nelson/github-action-markdown-link-check@v1 with: use-quiet-mode: 'yes' - use-verbose-mode: 'yes' \ No newline at end of file + use-verbose-mode: 'yes' + config-file: ./target/broken_links_checker.json \ No newline at end of file diff --git a/.github/workflows/ci-build-next-java.yml b/.github/workflows/ci-build-next-java.yml index 990977e..6a1006c 100644 --- a/.github/workflows/ci-build-next-java.yml +++ b/.github/workflows/ci-build-next-java.yml @@ -1,32 +1,35 @@ name: CI Build next Java on: - - push + push: + branches: + - main + pull_request: jobs: java-17-compatibility: runs-on: ubuntu-latest + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true steps: - name: Checkout the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up JDK 17 - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: 'temurin' java-version: 17 - - name: Cache local Maven repository - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-maven- + cache: 'maven' - name: Run tests and build with Maven - run: mvn --batch-mode --update-snapshots clean package -DtrimStackTrace=false + run: | + mvn --batch-mode --update-snapshots clean package -DtrimStackTrace=false \ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn - name: Publish Test Report uses: scacap/action-surefire-report@v1 - if: ${{ always() }} + if: ${{ always() && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]' }} with: github_token: ${{ secrets.GITHUB_TOKEN }} + fail_if_no_tests: false diff --git a/.github/workflows/ci-build.yml b/.github/workflows/ci-build.yml index 2fa9fc3..e520870 100644 --- a/.github/workflows/ci-build.yml +++ b/.github/workflows/ci-build.yml @@ -1,31 +1,51 @@ name: CI Build on: - - push + push: + branches: + - main + pull_request: jobs: build: runs-on: ubuntu-latest + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true steps: - name: Checkout the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: 'temurin' java-version: 11 - - name: Cache local Maven repository - uses: actions/cache@v2 + cache: 'maven' + - name: Cache SonarCloud packages + uses: actions/cache@v3 with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-maven- + path: ~/.sonar/cache + key: ${{ runner.os }}-sonar + restore-keys: ${{ runner.os }}-sonar + - name: Enable testcontainer reuse + run: echo 'testcontainers.reuse.enable=true' > "$HOME/.testcontainers.properties" - name: Run tests and build with Maven run: | - mvn --batch-mode --update-snapshots clean verify sonar:sonar \ + mvn --batch-mode clean verify \ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -DtrimStackTrace=false + - name: Publish Test Report + uses: scacap/action-surefire-report@v1 + if: ${{ always() && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]' }} + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + - name: Sonar analysis + if: ${{ env.SONAR_TOKEN != null }} + run: | + mvn --batch-mode org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ -DtrimStackTrace=false \ -Dsonar.organization=exasol \ -Dsonar.host.url=https://sonarcloud.io \ diff --git a/.github/workflows/dependencies_check.yml b/.github/workflows/dependencies_check.yml index d28c0b4..b2ab231 100644 --- a/.github/workflows/dependencies_check.yml +++ b/.github/workflows/dependencies_check.yml @@ -9,18 +9,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: 'temurin' java-version: 11 - - name: Cache local Maven repository - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-maven- + cache: 'maven' - name: Checking dependencies for vulnerabilities - run: mvn org.sonatype.ossindex.maven:ossindex-maven-plugin:audit -f pom.xml \ No newline at end of file + run: mvn --batch-mode org.sonatype.ossindex.maven:ossindex-maven-plugin:audit -f pom.xml \ No newline at end of file diff --git a/.github/workflows/release_droid_prepare_original_checksum.yml b/.github/workflows/release_droid_prepare_original_checksum.yml index 650b120..4a980f8 100644 --- a/.github/workflows/release_droid_prepare_original_checksum.yml +++ b/.github/workflows/release_droid_prepare_original_checksum.yml @@ -8,27 +8,23 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: 'temurin' java-version: 11 - - name: Cache local Maven repository - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-maven- + cache: 'maven' + - name: Enable testcontainer reuse + run: echo 'testcontainers.reuse.enable=true' > "$HOME/.testcontainers.properties" - name: Run tests and build with Maven run: mvn --batch-mode clean verify --file pom.xml - name: Prepare checksum run: find target -maxdepth 1 -name *.jar -exec sha256sum "{}" + > original_checksum - name: Upload checksum to the artifactory - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: original_checksum retention-days: 5 diff --git a/.github/workflows/release_droid_print_quick_checksum.yml b/.github/workflows/release_droid_print_quick_checksum.yml index 746fc43..8add957 100644 --- a/.github/workflows/release_droid_print_quick_checksum.yml +++ b/.github/workflows/release_droid_print_quick_checksum.yml @@ -8,21 +8,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: 'temurin' java-version: 11 - - name: Cache local Maven repository - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-maven- + cache: 'maven' - name: Build with Maven skipping tests run: mvn --batch-mode clean verify -DskipTests - name: Print checksum diff --git a/.github/workflows/release_droid_upload_github_release_assets.yml b/.github/workflows/release_droid_upload_github_release_assets.yml index e2c761b..1fd0b60 100644 --- a/.github/workflows/release_droid_upload_github_release_assets.yml +++ b/.github/workflows/release_droid_upload_github_release_assets.yml @@ -12,21 +12,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: 'temurin' java-version: 11 - - name: Cache local Maven repository - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-maven- + cache: 'maven' - name: Build with Maven skipping tests run: mvn --batch-mode clean verify -DskipTests - name: Generate sha256sum files diff --git a/.gitignore b/.gitignore index 4eebbcb..e1245da 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ pom.xml.versionsBackup # Eclipse and Maven .classpath .project +/.apt_generated/ # .settings : we need Eclipse settings for code formatter and clean-up rules target .cache @@ -29,4 +30,7 @@ venv/ *.bak *.orig *.old -*.md.html \ No newline at end of file +*.md.html +*.flattened-pom.xml + +test.properties diff --git a/.project-keeper.yml b/.project-keeper.yml new file mode 100644 index 0000000..b9f6024 --- /dev/null +++ b/.project-keeper.yml @@ -0,0 +1,10 @@ +sources: + - type: maven + path: pom.xml + modules: + - integration_tests + - jar_artifact +linkReplacements: + - "https://netty.io/netty-common/|https://netty.io" +excludes: + diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..b9916c9 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,15 @@ +{ + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true, + "source.generate.finalModifiers": true + }, + "java.saveActions.organizeImports": true, + "java.sources.organizeImports.starThreshold": 3, + "java.sources.organizeImports.staticStarThreshold": 3, + "java.test.config": { + "vmArgs": [ + "-Djava.util.logging.config.file=src/test/resources/logging.properties" + ] + }, +} \ No newline at end of file diff --git a/README.md b/README.md index 4c85e1c..e48ec34 100644 --- a/README.md +++ b/README.md @@ -38,6 +38,7 @@ Find all the documentation in the [Virtual Schemas project][vs-doc]. ## Information for Developers * [Virtual Schema API Documentation][vs-api] +* [Developers Guide](doc/developers_guide.md) [virtual-schemas-user-guide]: https://docs.exasol.com/database_concepts/virtual_schemas.htm [virtual-schemas]: https://github.com/exasol/virtual-schemas diff --git a/dependencies.md b/dependencies.md index 1664064..790b9e6 100644 --- a/dependencies.md +++ b/dependencies.md @@ -3,68 +3,99 @@ ## Compile Dependencies -| Dependency | License | -| ------------------------------- | -------- | -| [Virtual Schema Common JDBC][0] | [MIT][1] | -| [error-reporting-java][2] | [MIT][1] | +| Dependency | License | +| ------------------------------- | ---------------- | +| [Virtual Schema Common JDBC][0] | [MIT License][1] | +| [error-reporting-java][2] | [MIT][3] | ## Test Dependencies -| Dependency | License | -| ------------------------------- | -------------------------------- | -| [Virtual Schema Common JDBC][0] | [MIT][1] | -| [Hamcrest][6] | [BSD License 3][7] | -| [JUnit Jupiter (Aggregator)][8] | [Eclipse Public License v2.0][9] | -| [mockito-junit-jupiter][10] | [The MIT License][11] | +| Dependency | License | +| ----------------------------------------------- | --------------------------------- | +| [Virtual Schema Common JDBC][0] | [MIT License][1] | +| [Hamcrest][4] | [BSD License 3][5] | +| [JUnit Jupiter (Aggregator)][6] | [Eclipse Public License v2.0][7] | +| [mockito-junit-jupiter][8] | [The MIT License][9] | +| [Testcontainers :: JUnit Jupiter Extension][10] | [MIT][11] | +| [Testcontainers :: JDBC][10] | [MIT][11] | +| [exasol-test-setup-abstraction-java][12] | [MIT License][13] | +| [Test Database Builder for Java][14] | [MIT License][15] | +| [udf-debugging-java][16] | [MIT][3] | +| [Matcher for SQL Result Sets][17] | [MIT][3] | +| [BigQuery][18] | [Apache-2.0][19] | +| [Netty/Common][20] | [Apache License, Version 2.0][21] | +| [io.grpc:grpc-core][22] | [Apache 2.0][23] | ## Plugin Dependencies -| Dependency | License | -| ------------------------------------------------------- | --------------------------------- | -| [JaCoCo :: Maven Plugin][12] | [Eclipse Public License 2.0][13] | -| [Maven Surefire Plugin][14] | [Apache License, Version 2.0][15] | -| [Apache Maven Compiler Plugin][16] | [Apache License, Version 2.0][15] | -| [Apache Maven JAR Plugin][18] | [Apache License, Version 2.0][15] | -| [Apache Maven Assembly Plugin][20] | [Apache License, Version 2.0][15] | -| [Apache Maven Enforcer Plugin][22] | [Apache License, Version 2.0][15] | -| [Versions Maven Plugin][24] | [Apache License, Version 2.0][15] | -| [Artifact reference checker and unifier][26] | [MIT][1] | -| [Project keeper maven plugin][28] | [MIT][1] | -| [org.sonatype.ossindex.maven:ossindex-maven-plugin][30] | [ASL2][31] | -| [error-code-crawler-maven-plugin][32] | [MIT][1] | -| [Reproducible Build Maven Plugin][34] | [Apache 2.0][31] | -| [Apache Maven Clean Plugin][36] | [Apache License, Version 2.0][15] | -| [Apache Maven Resources Plugin][38] | [Apache License, Version 2.0][15] | -| [Apache Maven Install Plugin][40] | [Apache License, Version 2.0][15] | -| [Apache Maven Deploy Plugin][42] | [Apache License, Version 2.0][15] | -| [Apache Maven Site Plugin][44] | [Apache License, Version 2.0][15] | +| Dependency | License | +| ------------------------------------------------------- | ---------------------------------------------- | +| [SonarQube Scanner for Maven][24] | [GNU LGPL 3][25] | +| [Apache Maven Compiler Plugin][26] | [Apache License, Version 2.0][19] | +| [Apache Maven Enforcer Plugin][27] | [Apache License, Version 2.0][19] | +| [Maven Flatten Plugin][28] | [Apache Software Licenese][29] | +| [org.sonatype.ossindex.maven:ossindex-maven-plugin][30] | [ASL2][29] | +| [Reproducible Build Maven Plugin][31] | [Apache 2.0][29] | +| [Maven Surefire Plugin][32] | [Apache License, Version 2.0][19] | +| [Versions Maven Plugin][33] | [Apache License, Version 2.0][19] | +| [Apache Maven Assembly Plugin][34] | [Apache License, Version 2.0][19] | +| [Apache Maven JAR Plugin][35] | [Apache License, Version 2.0][19] | +| [Artifact reference checker and unifier][36] | [MIT][3] | +| [Project keeper maven plugin][37] | [The MIT License][38] | +| [Maven Failsafe Plugin][39] | [Apache License, Version 2.0][19] | +| [JaCoCo :: Maven Plugin][40] | [Eclipse Public License 2.0][41] | +| [error-code-crawler-maven-plugin][42] | [MIT][3] | +| [Maven Clean Plugin][43] | [The Apache Software License, Version 2.0][29] | +| [Maven Resources Plugin][44] | [The Apache Software License, Version 2.0][29] | +| [Maven Install Plugin][45] | [The Apache Software License, Version 2.0][29] | +| [Maven Deploy Plugin][46] | [The Apache Software License, Version 2.0][29] | +| [Maven Site Plugin 3][47] | [The Apache Software License, Version 2.0][29] | -[12]: https://www.eclemma.org/jacoco/index.html -[28]: https://github.com/exasol/project-keeper-maven-plugin +[0]: https://github.com/exasol/virtual-schema-common-jdbc/ +[1]: https://github.com/exasol/virtual-schema-common-jdbc/blob/main/LICENSE [2]: https://github.com/exasol/error-reporting-java -[31]: http://www.apache.org/licenses/LICENSE-2.0.txt -[14]: https://maven.apache.org/surefire/maven-surefire-plugin/ -[1]: https://opensource.org/licenses/MIT -[10]: https://github.com/mockito/mockito -[24]: http://www.mojohaus.org/versions-maven-plugin/ -[7]: http://opensource.org/licenses/BSD-3-Clause -[16]: https://maven.apache.org/plugins/maven-compiler-plugin/ -[38]: https://maven.apache.org/plugins/maven-resources-plugin/ -[0]: https://github.com/exasol/virtual-schema-common-jdbc -[36]: https://maven.apache.org/plugins/maven-clean-plugin/ -[13]: https://www.eclipse.org/legal/epl-2.0/ -[42]: https://maven.apache.org/plugins/maven-deploy-plugin/ -[11]: https://github.com/mockito/mockito/blob/main/LICENSE -[34]: http://zlika.github.io/reproducible-build-maven-plugin -[44]: https://maven.apache.org/plugins/maven-site-plugin/ -[15]: https://www.apache.org/licenses/LICENSE-2.0.txt -[22]: https://maven.apache.org/enforcer/maven-enforcer-plugin/ -[9]: https://www.eclipse.org/legal/epl-v20.html -[40]: https://maven.apache.org/plugins/maven-install-plugin/ -[8]: https://junit.org/junit5/ +[3]: https://opensource.org/licenses/MIT +[4]: http://hamcrest.org/JavaHamcrest/ +[5]: http://opensource.org/licenses/BSD-3-Clause +[6]: https://junit.org/junit5/ +[7]: https://www.eclipse.org/legal/epl-v20.html +[8]: https://github.com/mockito/mockito +[9]: https://github.com/mockito/mockito/blob/main/LICENSE +[10]: https://testcontainers.org +[11]: http://opensource.org/licenses/MIT +[12]: https://github.com/exasol/exasol-test-setup-abstraction-java/ +[13]: https://github.com/exasol/exasol-test-setup-abstraction-java/blob/main/LICENSE +[14]: https://github.com/exasol/test-db-builder-java/ +[15]: https://github.com/exasol/test-db-builder-java/blob/main/LICENSE +[16]: https://github.com/exasol/udf-debugging-java/ +[17]: https://github.com/exasol/hamcrest-resultset-matcher +[18]: https://github.com/googleapis/java-bigquery +[19]: https://www.apache.org/licenses/LICENSE-2.0.txt +[20]: https://netty.io +[21]: https://www.apache.org/licenses/LICENSE-2.0 +[22]: https://github.com/grpc/grpc-java +[23]: https://opensource.org/licenses/Apache-2.0 +[24]: http://sonarsource.github.io/sonar-scanner-maven/ +[25]: http://www.gnu.org/licenses/lgpl.txt +[26]: https://maven.apache.org/plugins/maven-compiler-plugin/ +[27]: https://maven.apache.org/enforcer/maven-enforcer-plugin/ +[28]: https://www.mojohaus.org/flatten-maven-plugin/ +[29]: http://www.apache.org/licenses/LICENSE-2.0.txt [30]: https://sonatype.github.io/ossindex-maven/maven-plugin/ -[6]: http://hamcrest.org/JavaHamcrest/ -[26]: https://github.com/exasol/artifact-reference-checker-maven-plugin -[32]: https://github.com/exasol/error-code-crawler-maven-plugin -[18]: https://maven.apache.org/plugins/maven-jar-plugin/ -[20]: https://maven.apache.org/plugins/maven-assembly-plugin/ +[31]: http://zlika.github.io/reproducible-build-maven-plugin +[32]: https://maven.apache.org/surefire/maven-surefire-plugin/ +[33]: http://www.mojohaus.org/versions-maven-plugin/ +[34]: https://maven.apache.org/plugins/maven-assembly-plugin/ +[35]: https://maven.apache.org/plugins/maven-jar-plugin/ +[36]: https://github.com/exasol/artifact-reference-checker-maven-plugin +[37]: https://github.com/exasol/project-keeper/ +[38]: https://github.com/exasol/project-keeper/blob/main/LICENSE +[39]: https://maven.apache.org/surefire/maven-failsafe-plugin/ +[40]: https://www.jacoco.org/jacoco/trunk/doc/maven.html +[41]: https://www.eclipse.org/legal/epl-2.0/ +[42]: https://github.com/exasol/error-code-crawler-maven-plugin +[43]: http://maven.apache.org/plugins/maven-clean-plugin/ +[44]: http://maven.apache.org/plugins/maven-resources-plugin/ +[45]: http://maven.apache.org/plugins/maven-install-plugin/ +[46]: http://maven.apache.org/plugins/maven-deploy-plugin/ +[47]: http://maven.apache.org/plugins/maven-site-plugin/ diff --git a/doc/changes/changelog.md b/doc/changes/changelog.md index 07033bc..0aa0bf1 100644 --- a/doc/changes/changelog.md +++ b/doc/changes/changelog.md @@ -1,5 +1,6 @@ # Changes +* [2.0.2](changes_2.0.2.md) * [2.0.1](changes_2.0.1.md) * [2.0.0](changes_2.0.0.md) * [1.0.0](changes_1.0.0.md) diff --git a/doc/changes/changes_2.0.2.md b/doc/changes/changes_2.0.2.md new file mode 100644 index 0000000..b22a2ba --- /dev/null +++ b/doc/changes/changes_2.0.2.md @@ -0,0 +1,59 @@ +# Virtual Schema for BigQuery 2.0.2, released 2022-07-26 + +Code name: Fix data type conversion + +## Summary + +This release fixes conversions of the following BigQuery data types: DATE, TIMESTAMP, DATETIME and GEOGRAPHY. + +It also fixes the following vulnerabilities by updating dependencies: + +* CVE-2022-24823 +* [sonatype-2021-0818](https://ossindex.sonatype.org/vulnerability/sonatype-2021-0818) + +## Bugfixes + +* #7: Fixed conversion of BigQuery data types DATE, TIMESTAMP, DATETIME and GEOGRAPHY. + +## Dependency Updates + +### Compile Dependency Updates + +* Updated `com.exasol:virtual-schema-common-jdbc:9.0.4` to `9.0.5` + +### Test Dependency Updates + +* Added `com.exasol:exasol-test-setup-abstraction-java:0.3.2` +* Added `com.exasol:hamcrest-resultset-matcher:1.5.1` +* Added `com.exasol:test-db-builder-java:3.3.3` +* Added `com.exasol:udf-debugging-java:0.6.4` +* Updated `com.exasol:virtual-schema-common-jdbc:9.0.4` to `9.0.5` +* Added `com.google.cloud:google-cloud-bigquery:2.14.0` +* Added `io.grpc:grpc-core:1.48.0` +* Added `io.netty:netty-common:4.1.79.Final` +* Updated `org.junit.jupiter:junit-jupiter:5.8.1` to `5.8.2` +* Updated `org.mockito:mockito-junit-jupiter:4.1.0` to `4.6.1` +* Added `org.testcontainers:jdbc:1.17.3` +* Added `org.testcontainers:junit-jupiter:1.17.3` + +### Plugin Dependency Updates + +* Updated `com.exasol:artifact-reference-checker-maven-plugin:0.3.1` to `0.4.0` +* Updated `com.exasol:error-code-crawler-maven-plugin:0.6.0` to `1.1.1` +* Updated `com.exasol:project-keeper-maven-plugin:1.3.2` to `2.5.0` +* Updated `io.github.zlika:reproducible-build-maven-plugin:0.13` to `0.15` +* Updated `org.apache.maven.plugins:maven-clean-plugin:3.1.0` to `2.5` +* Updated `org.apache.maven.plugins:maven-compiler-plugin:3.8.1` to `3.10.1` +* Updated `org.apache.maven.plugins:maven-deploy-plugin:3.0.0-M1` to `2.7` +* Updated `org.apache.maven.plugins:maven-enforcer-plugin:3.0.0-M3` to `3.0.0` +* Added `org.apache.maven.plugins:maven-failsafe-plugin:3.0.0-M5` +* Updated `org.apache.maven.plugins:maven-install-plugin:3.0.0-M1` to `2.4` +* Updated `org.apache.maven.plugins:maven-jar-plugin:3.2.0` to `3.2.2` +* Updated `org.apache.maven.plugins:maven-resources-plugin:3.2.0` to `2.6` +* Updated `org.apache.maven.plugins:maven-site-plugin:3.9.1` to `3.3` +* Updated `org.apache.maven.plugins:maven-surefire-plugin:3.0.0-M3` to `3.0.0-M5` +* Added `org.codehaus.mojo:flatten-maven-plugin:1.2.7` +* Updated `org.codehaus.mojo:versions-maven-plugin:2.7` to `2.10.0` +* Updated `org.jacoco:jacoco-maven-plugin:0.8.5` to `0.8.8` +* Added `org.sonarsource.scanner.maven:sonar-maven-plugin:3.9.1.2184` +* Updated `org.sonatype.ossindex.maven:ossindex-maven-plugin:3.1.0` to `3.2.0` diff --git a/doc/developers_guide.md b/doc/developers_guide.md new file mode 100644 index 0000000..9b2482a --- /dev/null +++ b/doc/developers_guide.md @@ -0,0 +1,22 @@ +# Developers Guide + +This guide contains information for developers. + +## Running Integration Tests Against Google Cloud + +Integration tests are prepared to use a local [bigquery-emulator](https://github.com/goccy/bigquery-emulator), but the emulator does not yet support all required features. Until this is finished it's only possible to run integration tests against BigQuery in Google Cloud: + +1. Login to Google Cloud, create a Service Account and download the private key as JSON file. +2. Create file `test.properties` with the following content: + + ```properties + googleProjectId = google-project-id + serviceAccountEmail = your.google.account@example.com + privateKeyPath = /path/to/private-key.json + + udfLoggingEnabled = true + ``` + +If file `test.properties` or one of `googleProjectId`, `serviceAccountEmail`, or `privateKeyPath` is missing, then integration tests will be skipped. + +When `udfLoggingEnabled` is set to `true`, UDF logs will be written to `target/udf-logs/*.txt`. diff --git a/doc/user_guide/bigquery_user_guide.md b/doc/user_guide/bigquery_user_guide.md index 284550a..248bca5 100644 --- a/doc/user_guide/bigquery_user_guide.md +++ b/doc/user_guide/bigquery_user_guide.md @@ -4,21 +4,22 @@ The Big Query SQL dialect allows you connecting to the [Google Big Query](https: ## JDBC Driver -Download the [Simba JDBC Driver for Google BigQuery](https://cloud.google.com/bigquery/providers/simba-drivers/). +Download the [Simba JDBC Driver for Google BigQuery](https://cloud.google.com/bigquery/docs/reference/odbc-jdbc-drivers#current_jdbc_driver). ## Uploading the JDBC Driver to BucketFS -1. [Create a bucket in BucketFS](https://docs.exasol.com/administration/on-premise/bucketfs/create_new_bucket_in_bucketfs_service.htm) +1. [Create a bucket in BucketFS](https://docs.exasol.com/administration/on-premise/bucketfs/create_new_bucket_in_bucketfs_service.htm) 1. [Upload the driver to BucketFS](https://docs.exasol.com/administration/on-premise/bucketfs/accessfiles.htm) -**Hint**: Magnitude Simba driver contains a lot of jar files, but you can upload all of them together as an archive (`.tar.gz`, for example). -The archive will be unpacked automatically in the bucket, and you can access the files using the following path pattern '//.jar' +When uploading the archive to the bucket then the Exasol database will automatically extract the contents of the archive and your UDF can access the files using the following path pattern `//.jar`. -Leave only `.jar` files in the archive. It will help you to generate a list for adapter script later. +See the [Exasol documentation](https://docs.exasol.com/db/latest/database_concepts/bucketfs/database_access.htm) for accessing BucketFS. + +Leave only `.jar` files in the archive. It will help you to generate a list for adapter script later. ## Installing the Adapter Script -Upload the latest available release of [Big Query Virtual Schema](https://github.com/exasol/bigquery-virtual-schema/releases) to Bucket FS. +Upload the latest available release of [Big Query Virtual Schema](https://github.com/exasol/bigquery-virtual-schema/releases) to BucketFS. Then create a schema to hold the adapter script. @@ -28,12 +29,12 @@ CREATE SCHEMA SCHEMA_FOR_VS_SCRIPT; The SQL statement below creates the adapter script, defines the Java class that serves as entry point and tells the UDF framework where to find the libraries (JAR files) for Virtual Schema and database driver. -List all the JAR files from Magnitude Simba JDBC driver. +List all the JAR files from the JDBC driver. ```sql CREATE JAVA ADAPTER SCRIPT SCHEMA_FOR_VS_SCRIPT.ADAPTER_SCRIPT_BIGQUERY AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtual-schema-dist-9.0.4-bigquery-2.0.1.jar; + %jar /buckets///virtual-schema-dist-9.0.5-bigquery-2.0.2.jar; %jar /buckets///GoogleBigQueryJDBC42.jar; ... ... @@ -42,28 +43,28 @@ CREATE JAVA ADAPTER SCRIPT SCHEMA_FOR_VS_SCRIPT.ADAPTER_SCRIPT_BIGQUERY AS ; ``` -**Hint**: to avoid filling the list by hands, use a convenience UDF script [bucketfs_ls](https://github.com/exasol/exa-toolbox/blob/master/utilities/bucketfs_ls.sql). -Create a script and run it as in the following example: +**Hint**: to avoid filling the list by hands, use a convenience UDF script [bucketfs_ls](https://github.com/exasol/exa-toolbox/blob/master/utilities/bucketfs_ls.sql). Create a script and run it as in the following example: ```sql -SELECT '%jar /buckets////'|| files || ';' FROM (SELECT EXA_toolbox.bucketfs_ls('/buckets////') files ); +SELECT '%jar /buckets////'|| files || ';' FROM (SELECT EXA_toolbox.bucketfs_ls('/buckets////') files ); ``` ## Defining a Named Connection Please follow the [Authenticating to a Cloud API Service article](https://cloud.google.com/docs/authentication/) to get Google service account credentials. -Upload the key to BucketFS, then create a named connection: +Upload the key as a JSON file to BucketFS, then create a named connection: ```sql CREATE OR REPLACE CONNECTION BIGQUERY_JDBC_CONNECTION TO 'jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=;OAuthType=0;OAuthServiceAcctEmail=;OAuthPvtKeyPath=//'; -``` -You can find additional information about the [JDBC connection string in the Big Query JDBC installation guide](https://www.simba.com/products/BigQuery/doc/JDBC_InstallGuide/content/jdbc/using/intro.htm); +``` + +You can find additional information about the JDBC connection URL [in the Big Query JDBC installation guide](https://storage.googleapis.com/simba-bq-release/jdbc/Simba%20Google%20BigQuery%20JDBC%20Connector%20Install%20and%20Configuration%20Guide.pdf). ## Creating a Virtual Schema -Below you see how a Big Query Virtual Schema is created. Please note that you have to provide the name of a catalog and the name of a schema. +Below you see how a Big Query Virtual Schema is created. Please note that you have to provide the name of a catalog (=the project name) and the name of a schema (=the dataset name). ```sql CREATE VIRTUAL SCHEMA @@ -76,33 +77,45 @@ CREATE VIRTUAL SCHEMA ## Data Types Conversion -BigQuery Data Type | Supported | Converted Exasol Data Type| Known limitations --------------------|-----------|---------------------------|------------------- -BOOLEAN | ✓ | BOOLEAN | -BYTES | × | | -DATE | ✓ | DATE | -DATETIME | ✓ | TIMESTAMP | -FLOAT | ✓ | DOUBLE | Expected range for correct mapping: -99999999.99999999 .. 99999999.99999999. -GEOGRAPHY | ✓ | VARCHAR(65535) | -INTEGER | ✓ | DECIMAL(19,0) | -NUMERIC | ✓ | VARCHAR(2000000) | -RECORD/STRUCT | × | | -STRING | ✓ | VARCHAR(65535) | -TIME | ✓ | VARCHAR(16) | -TIMESTAMP | ✓ | TIMESTAMP | Expected range for correct mapping: 1582-10-15 00:00:01 .. 9999-12-31 23:59:59.9999. JDBC driver maps dates before 1582-10-15 00:00:01 incorrectly. Example of incorrect mapping: 1582-10-14 22:00:01 -> 1582-10-04 22:00:01 - -## Performance +BigQuery Data Type | Supported | Converted Exasol Data Type | Known limitations +-------------------|-----------|----------------------------|------------------- +BOOL/ BOOLEAN | ✓ | BOOLEAN | +DATE | ✓ | DATE | +DATETIME | ✓ | TIMESTAMP | +FLOAT / FLOAT64 | ✓ | DOUBLE | Expected range for correct mapping: `-99999999.99999999` .. `99999999.99999999`. +GEOGRAPHY | ✓ | GEOMETRY | +INTEGER / INT64 | ✓ | DECIMAL | +BIGNUMERIC | ✓ | DOUBLE PRECISION | Expected range for correct mapping: `-99999999.99999999` .. `99999999.99999999`. +NUMERIC | ✓ | DOUBLE PRECISION | Expected range for correct mapping: `-99999999.99999999` .. `99999999.99999999`. +STRING | ✓ | VARCHAR(65535) | +TIME | ✓ | VARCHAR | +TIMESTAMP | ✓ | TIMESTAMP | Expected range for correct mapping: `1582-10-15 00:00:01` .. `9999-12-31 23:59:59.9999`. JDBC driver maps dates before `1582-10-15 00:00:01` incorrectly. Example of incorrect mapping: `1582-10-14 22:00:01` -> `1582-10-04 22:00:01` +BYTES | × | | +STRUCT | × | | +ARRAY | × | | +JSON | × | | +INTERVAL | × | | + +## Known Limitations + +### Performance Please be aware that the current implementation of the dialect can only handle result sets with limited size (a few thousand rows). -If you need to process a large amount of data, please contact our support team. Another implementation of the dialect with a performance improvement (using `IMPORT INTO`) is available, but not documented for self-service because of: + +If you need to process a large amount of data, please contact the Exasol support team. Another implementation of the dialect with a performance improvement (using `IMPORT INTO`) is available, but not documented for self-service because of: 1. the complex installation process -1. security risks (a user has to disable the drivers' security manager to use it) +1. security risks (a user has to disable the driver's security manager to use it) + +### Mapping of Empty Result + +If a query returns an empty result set, the Virtual Schema will map all columns to type `SMALLINT`. ## Testing information -In the following matrix you find combinations of JDBC driver and dialect version that we tested. +In the following matrix you find combinations of JDBC driver and dialect version that Exasol developer have tested successfully: -Virtual Schema Version| Big Query Version | Driver Name | Driver Version -----------------------|---------------------|---------------------------------------------|------------------------ - 3.0.2 | Google BigQuery 2.0 | Magnitude Simba JDBC driver for BigQuery | 1.2.2.1004 +Virtual Schema Version | Big Query Version | Driver Name | Driver Version +-----------------------|---------------------|------------------------------------------|----------------- + 1.0.0 | Google BigQuery 2.0 | Magnitude Simba JDBC driver for BigQuery | 1.2.2.1004 + 2.0.2 | Google BigQuery 2.0 | Magnitude Simba JDBC driver for BigQuery | 1.2.25.1029 diff --git a/error_code_config.yml b/error_code_config.yml index ed96efb..d169185 100644 --- a/error_code_config.yml +++ b/error_code_config.yml @@ -1,4 +1,5 @@ error-tags: VS-BIGQ: packages: - - com.exasol.adapter.document.files \ No newline at end of file + - com.exasol.adapter.document.files + highest-index: 1 diff --git a/pk_generated_parent.pom b/pk_generated_parent.pom new file mode 100644 index 0000000..c556dff --- /dev/null +++ b/pk_generated_parent.pom @@ -0,0 +1,275 @@ + + + 4.0.0 + com.exasol + bigquery-virtual-schema-generated-parent + 2.0.2 + pom + + UTF-8 + UTF-8 + 11 + + + + + MIT License + https://github.com/exasol/bigquery-virtual-schema/blob/main/LICENSE + repo + + + + + Exasol + opensource@exasol.com + Exasol AG + https://www.exasol.com/ + + + + scm:git:https://github.com/exasol/bigquery-virtual-schema.git + scm:git:https://github.com/exasol/bigquery-virtual-schema.git + https://github.com/exasol/bigquery-virtual-schema/ + + + + + + org.sonarsource.scanner.maven + sonar-maven-plugin + 3.9.1.2184 + + + org.apache.maven.plugins + maven-compiler-plugin + 3.10.1 + + ${java.version} + ${java.version} + + + + org.apache.maven.plugins + maven-enforcer-plugin + 3.0.0 + + + enforce-maven + + enforce + + + + + 3.6.3 + + + + + + + + org.codehaus.mojo + flatten-maven-plugin + 1.2.7 + + true + oss + + + + flatten + process-resources + + flatten + + + + flatten.clean + clean + + clean + + + + + + org.sonatype.ossindex.maven + ossindex-maven-plugin + 3.2.0 + + + audit + package + + audit + + + + + + io.github.zlika + reproducible-build-maven-plugin + 0.15 + + + strip-jar + package + + strip-jar + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M5 + + + -Djava.util.logging.config.file=src/test/resources/logging.properties ${argLine} + ${test.excludeTags} + + + + org.codehaus.mojo + versions-maven-plugin + 2.10.0 + + + display-updates + package + + display-plugin-updates + display-dependency-updates + + + + + file:///${project.basedir}/versionsMavenPluginRules.xml + + + + org.apache.maven.plugins + maven-assembly-plugin + 3.3.0 + + + src/assembly/all-dependencies.xml + + NAME_OF_YOUR_JAR + false + + + + make-assembly + package + + single + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.2.2 + + + default-jar + none + + + + + com.exasol + artifact-reference-checker-maven-plugin + 0.4.0 + + + verify + + verify + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + 3.0.0-M5 + + + -Djava.util.logging.config.file=src/test/resources/logging.properties ${argLine} + + ${test.excludeTags} + + + + verify + + integration-test + verify + + + + + + org.jacoco + jacoco-maven-plugin + 0.8.8 + + + prepare-agent + + prepare-agent + + + + merge-results + verify + + merge + + + + + ${project.build.directory}/ + + jacoco*.exec + + + + ${project.build.directory}/aggregate.exec + + + + report + verify + + report + + + ${project.build.directory}/aggregate.exec + + + + + + com.exasol + error-code-crawler-maven-plugin + 1.1.1 + + + verify + + verify + + + + + + + diff --git a/pom.xml b/pom.xml index 347debd..e3ddea2 100644 --- a/pom.xml +++ b/pom.xml @@ -1,11 +1,11 @@ - + 4.0.0 com.exasol bigquery-virtual-schema - 2.0.1 + 2.0.2 Virtual Schema for BigQuery + Virtual Schema for connecting Big Query as a data source to Exasol maven.exasol.com @@ -17,14 +17,7 @@ - UTF-8 - UTF-8 - 11 - 3.0.0-M3 - 9.0.4 - 1.15.0 - target/site/jacoco/jacoco.xml,target/site/jacoco-it/jacoco.xml - + 9.0.5 @@ -53,7 +46,7 @@ error-reporting-java 0.4.1 - + com.exasol virtual-schema-common-jdbc @@ -70,164 +63,85 @@ org.junit.jupiter junit-jupiter - 5.8.1 + 5.8.2 test org.mockito mockito-junit-jupiter - 4.1.0 + 4.6.1 + test + + + + org.testcontainers + junit-jupiter + 1.17.3 + test + + + org.testcontainers + jdbc + 1.17.3 + test + + + com.exasol + exasol-test-setup-abstraction-java + 0.3.2 + test + + + com.exasol + test-db-builder-java + 3.3.3 + test + + + com.exasol + udf-debugging-java + 0.6.4 + test + + + com.exasol + hamcrest-resultset-matcher + 1.5.1 + test + + + com.google.cloud + google-cloud-bigquery + 2.14.0 + test + + + + io.netty + netty-common + 4.1.79.Final + test + + + + io.grpc + grpc-core + 1.48.0 test - - org.jacoco - jacoco-maven-plugin - 0.8.5 - - - prepare-agent - - prepare-agent - - - - prepare-agent-integration - - prepare-agent-integration - - - - merge-results - verify - - merge - - - - - ${project.build.directory}/ - - jacoco*.exec - - - - ${project.build.directory}/aggregate.exec - - - - report - verify - - report - - - ${project.build.directory}/aggregate.exec - - - - - - org.apache.maven.plugins - maven-surefire-plugin - ${surefire.and.failsafe.plugin.version} - - - -Djava.util.logging.config.file=src/test/resources/logging.properties ${argLine} - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.1 - - ${java.version} - ${java.version} - - - - maven-jar-plugin - 3.2.0 - - - default-jar - none - - - org.apache.maven.plugins - org.apache.maven.plugins maven-assembly-plugin - 3.3.0 - - src/assembly/all-dependencies.xml - - virtual-schema-dist-${vscjdbc.version}-bigquery-${version} - false - - - - make-assembly - package - - single - - - - - - org.apache.maven.plugins - maven-enforcer-plugin - ${surefire.and.failsafe.plugin.version} - - - enforce-maven - - enforce - - - - - 3.3.9 - - - - - - - - org.codehaus.mojo - versions-maven-plugin - 2.7 - - - package - - display-plugin-updates - display-dependency-updates - - - - - file:///${project.basedir}/versionsMavenPluginRules.xml + virtual-schema-dist-${vscjdbc.version}-bigquery-${project.version} com.exasol artifact-reference-checker-maven-plugin - 0.3.1 - - - - verify - - - /doc/changes/* @@ -237,7 +151,7 @@ com.exasol project-keeper-maven-plugin - 1.3.2 + 2.5.0 @@ -245,75 +159,45 @@ + + + org.apache.maven.plugins + maven-failsafe-plugin - - jar_artifact - + + false org.sonatype.ossindex.maven ossindex-maven-plugin - 3.1.0 - - - package - - audit - - - - - - com.exasol - error-code-crawler-maven-plugin - 0.6.0 - - - - verify - - - - - - io.github.zlika - reproducible-build-maven-plugin - 0.13 - - - strip-jar - package - - strip-jar - - - + + + + + sonatype-2020-0026 + + + + + sonatype-2020-0926 + + + + + sonatype-2021-0818 + + + - - - - maven-clean-plugin - 3.1.0 - - - maven-deploy-plugin - 3.0.0-M1 - - - maven-install-plugin - 3.0.0-M1 - - - maven-resources-plugin - 3.2.0 - - - maven-site-plugin - 3.9.1 - - - + https://github.com/exasol/bigquery-virtual-schema/ + + bigquery-virtual-schema-generated-parent + com.exasol + 2.0.2 + pk_generated_parent.pom + \ No newline at end of file diff --git a/release_config.yml b/release_config.yml new file mode 100644 index 0000000..10049a6 --- /dev/null +++ b/release_config.yml @@ -0,0 +1,3 @@ +release-platforms: + - GitHub + - Jira diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriter.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriter.java index 10a09a3..b9662b1 100644 --- a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriter.java +++ b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriter.java @@ -1,5 +1,13 @@ package com.exasol.adapter.dialects.bigquery; +import java.math.BigInteger; +import java.sql.*; +import java.sql.Date; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.*; +import java.util.logging.Logger; + import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterException; import com.exasol.adapter.AdapterProperties; @@ -11,26 +19,21 @@ import com.exasol.adapter.jdbc.RemoteMetadataReader; import com.exasol.adapter.sql.SqlStatement; -import java.math.BigInteger; -import java.sql.*; -import java.util.StringJoiner; -import java.util.logging.Logger; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - /** * This class implements a BigQuery-specific query rewriter. */ public class BigQueryQueryRewriter extends ImportIntoTemporaryTableQueryRewriter { private static final Logger LOGGER = Logger.getLogger(BigQueryQueryRewriter.class.getName()); - private static final double[] TEN_POWERS = {10d, 100d, 1000d, 10000d, 100000d, 1000000d}; - @SuppressWarnings("squid:S4784") // this pattern is secure - private static final Pattern DATE_PATTERN = Pattern.compile("(\\d{4})-(\\d{1,2})-(\\d{1,2})"); - @SuppressWarnings("squid:S4784") // this pattern is secure - private static final Pattern TIME_PATTERN = Pattern.compile("(\\d{1,2}):(\\d{1,2}):(\\d{1,2})(?:\\.(\\d{1,6}))?"); private static final String CAST = "CAST"; private static final String CAST_NULL_AS_VARCHAR_4 = CAST + " (NULL AS VARCHAR(4))"; + private static final ZoneId UTC_TIMEZONE_ID = ZoneId.of("UTC"); + private static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS") + .withZone(UTC_TIMEZONE_ID); + private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd") + .withZone(UTC_TIMEZONE_ID); + private final Calendar utcCalendar = Calendar.getInstance(TimeZone.getTimeZone(UTC_TIMEZONE_ID)); + /** * Create a new instance of the {@link BigQueryQueryRewriter}. * @@ -39,7 +42,7 @@ public class BigQueryQueryRewriter extends ImportIntoTemporaryTableQueryRewriter * @param connectionFactory factory for the JDBC connection to remote data source */ public BigQueryQueryRewriter(final SqlDialect dialect, final RemoteMetadataReader remoteMetadataReader, - final ConnectionFactory connectionFactory) { + final ConnectionFactory connectionFactory) { super(dialect, remoteMetadataReader, connectionFactory); } @@ -89,26 +92,36 @@ private void appendQueryForEmptyTable(final StringBuilder builder, final ResultS builder.append(") WHERE false"); } - private void appendRow(final StringBuilder builder, final ResultSet resultSet, - final ResultSetMetaData resultSetMetadata) throws SQLException { - final int columnCount = resultSetMetadata.getColumnCount(); + private void appendRow(final StringBuilder builder, final ResultSet resultSet, final ResultSetMetaData metadata) + throws SQLException { + final int columnCount = metadata.getColumnCount(); builder.append(" ("); for (int i = 1; i <= columnCount; ++i) { - final String columnName = resultSetMetadata.getColumnName(i); + final String columnName = metadata.getColumnName(i); if (i > 1) { builder.append(", "); } - appendColumnValue(builder, resultSet, columnName, resultSetMetadata.getColumnType(i)); + appendColumnValue(builder, resultSet, columnName, metadata.getColumnType(i), metadata.getColumnTypeName(i)); } builder.append(")"); } private void appendColumnValue(final StringBuilder builder, final ResultSet resultSet, final String columnName, - final int type) throws SQLException { + final int type, final String typeName) throws SQLException { + LOGGER.fine(() -> "Mapping column " + columnName + " of type " + type + "/" + typeName); + + if ("GEOGRAPHY".equals(typeName)) { + appendGeometry(builder, resultSet, columnName); + return; + } + switch (type) { case Types.BIGINT: + case Types.INTEGER: appendBigInt(builder, resultSet, columnName); break; + case Types.DECIMAL: + case Types.NUMERIC: case Types.DOUBLE: appendDouble(builder, resultSet, columnName); break; @@ -122,12 +135,16 @@ private void appendColumnValue(final StringBuilder builder, final ResultSet resu appendTimestamp(builder, resultSet, columnName); break; case Types.VARCHAR: + case Types.CHAR: appendVarchar(builder, resultSet, columnName); break; case Types.TIME: + appendVarchar(builder, resultSet, columnName); + break; case Types.VARBINARY: - case Types.NUMERIC: default: + LOGGER.info( + () -> "Mapping unknown column " + columnName + " of type " + type + "/" + typeName + " to string"); appendString(builder, resultSet, columnName); break; } @@ -139,6 +156,13 @@ private void appendVarchar(final StringBuilder builder, final ResultSet resultSe builder.append(resultSet.wasNull() ? CAST_NULL_AS_VARCHAR_4 : stringLiteral); } + private void appendGeometry(final StringBuilder builder, final ResultSet resultSet, final String columnName) + throws SQLException { + String value = resultSet.getString(columnName); + value = resultSet.wasNull() ? "NULL" : "'" + value + "'"; + builder.append(CAST + " (" + value + " AS GEOMETRY)"); + } + private void appendBigInt(final StringBuilder builder, final ResultSet resultSet, final String columnName) throws SQLException { final String string = resultSet.getString(columnName); @@ -159,75 +183,23 @@ private void appendBoolean(final StringBuilder builder, final ResultSet resultSe private void appendDate(final StringBuilder builder, final ResultSet resultSet, final String columnName) throws SQLException { - final String value = resultSet.getString(columnName); - if (value == null) { - builder.append(CAST_NULL_AS_VARCHAR_4); - } else { - builder.append("'"); - builder.append(castDate(value)); - builder.append("'"); - } - } - - private String castDate(final String dateToCast) { - final Matcher matcher = DATE_PATTERN.matcher(dateToCast); - if (matcher.matches()) { - final int year = Integer.parseInt(matcher.group(1)); - final int month = Integer.parseInt(matcher.group(2)); - final int day = Integer.parseInt(matcher.group(3)); - return String.format("%02d.%02d.%04d", day, month, year); + final Date date = resultSet.getDate(columnName, utcCalendar); + if (date == null) { + builder.append(CAST + " (NULL AS DATE)"); } else { - throw new IllegalArgumentException( - "Date does not match required format: YYYY-[M]M-[D]D. Actual value was:" + dateToCast); + builder.append(CAST + "('" + DATE_FORMATTER.format(date.toLocalDate()) + "' AS DATE)"); } } private void appendTimestamp(final StringBuilder builder, final ResultSet resultSet, final String columnName) throws SQLException { - final String value = resultSet.getString(columnName); - if (value == null) { - builder.append(CAST_NULL_AS_VARCHAR_4); - } else { - builder.append("'"); - builder.append(castTimestamp(value)); - builder.append("'"); - } - } - - private String castTimestamp(final String timestampToCast) { - final StringBuilder builder = new StringBuilder(); - final String[] splitTimestamp = getSplitTimestamp(timestampToCast); - builder.append(castDate(splitTimestamp[0])); - builder.append(" "); - builder.append(castTime(splitTimestamp[1])); - return builder.toString(); - } - - private String[] getSplitTimestamp(final String timestampToCast) { - if (timestampToCast.contains("T")) { - return timestampToCast.split("T"); - } else { - return timestampToCast.split(" "); - } - } - - private String castTime(final String timeToCast) { - final Matcher matcher = TIME_PATTERN.matcher(timeToCast); - if (matcher.matches()) { - final int hour = Integer.parseInt(matcher.group(1)); - final int minute = Integer.parseInt(matcher.group(2)); - final int second = Integer.parseInt(matcher.group(3)); - final String fractionOfSecond = matcher.group(4); - if (fractionOfSecond != null) { - final int fractionOfSecondInt = Integer.parseInt(fractionOfSecond); - final int fractionOfSecondRounded = (int) Math - .round((fractionOfSecondInt / TEN_POWERS[fractionOfSecond.length() - 1]) * 1000); - return String.format("%02d:%02d:%02d.%03d", hour, minute, second, fractionOfSecondRounded); - } else { - return String.format("%02d:%02d:%02d", hour, minute, second); - } + final Timestamp timestamp = resultSet.getTimestamp(columnName, utcCalendar); + if (timestamp == null) { + builder.append(CAST + " (NULL AS TIMESTAMP)"); } else { - throw new IllegalArgumentException("Time does not match required format: [H]H:[M]M:[S]S[.DDDDDD]]"); + builder.append("CAST ('"); + builder.append(TIMESTAMP_FORMATTER.format(timestamp.toInstant())); + builder.append("' AS TIMESTAMP)"); } } diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriterTest.java b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriterTest.java index 54e4b9c..bc2558c 100644 --- a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriterTest.java +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryQueryRewriterTest.java @@ -3,9 +3,12 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; import java.sql.*; +import java.time.Instant; +import java.util.Calendar; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -64,8 +67,10 @@ void testRewriteWithJdbcConnectionEmptyTable() throws AdapterException, SQLExcep equalTo("SELECT * FROM VALUES(1, 1, 1, 1, 1) WHERE false")); } - @CsvSource({ "float_col, 8, 105.0", // - "float_col, 8, 99.4" // + @CsvSource({ "decimal_col, 3, 105.0", // + "numeric_col, 2, 105.0", // + "double_col, 8, 105.0", // + "double_col, 8, 99.4" // }) @ParameterizedTest void testRewriteWithFloatingValues(final String columnName, final int type, final double columnValue) @@ -92,12 +97,9 @@ void testRewriteWithBoolean() throws AdapterException, SQLException { @CsvSource({ "string_col, 12, hello, hello", // "string_col, 12, i'm, i\\'m", // + "char_col, 1, char value, char value", // "time_col, 92, 12:10:09.000, 12:10:09.000", // - "numeric_col, 2, 22222.2222, 22222.2222", // - "numeric_col, 2, 11.5, 11.5", // - "date_col, 91, 1111-01-01, 01.01.1111", // - "date_col, 91, 2019-12-3, 03.12.2019", // - "date_col, 91, 2019-5-02, 02.05.2019" // + "time_varbinary, -3, varbinary, varbinary" // }) @ParameterizedTest void testRewriteWithStringValues(final String columnName, final int type, final String columnValue, @@ -105,39 +107,94 @@ void testRewriteWithStringValues(final String columnName, final int type, final assertQueryWithOneStringValue(columnName, type, columnValue, "SELECT * FROM VALUES ('" + resultValue + "')"); } - private void assertQueryWithOneStringValue(final String columnName, final int type, final String columnValue, + @CsvSource({ "1111-01-01, 1111-01-01", // + "2019-12-3, 2019-12-03", // + "2019-5-02, 2019-05-02" // + }) + @ParameterizedTest + void testRewriteWithDateValues(final java.sql.Date columnValue, final String resultValue) + throws AdapterException, SQLException { + final String columnName = "col"; + final int type = Types.DATE; + when(this.mockResultSet.getDate(eq(columnName), any(Calendar.class))).thenReturn(columnValue); + assertQueryWithOneColumn(columnName, type, null, "SELECT * FROM VALUES (CAST('" + resultValue + "' AS DATE))"); + } + + @CsvSource({ "22222.2222, 22222.2222", // + "-99999999.99999999, -9.999999999999999E7", // + "99999999.99999999, 9.999999999999999E7", // + "99999999.99999999, 9.999999999999999E7", // + "11.5, 11.5", // + "42, 42.0" }) + @ParameterizedTest + void testRewriteWithNumeric(final Double columnValue, final String resultValue) + throws AdapterException, SQLException { + final String columnName = "col"; + final int type = Types.NUMERIC; + when(this.mockResultSet.getDouble(columnName)).thenReturn(columnValue); + assertQueryWithOneColumn(columnName, type, null, "SELECT * FROM VALUES (" + resultValue + ")"); + } + + private void assertQueryWithOneColumn(final String columnName, final int type, final String typeName, final String query) throws SQLException, AdapterException { when(this.mockResultSetMetaData.getColumnName(1)).thenReturn(columnName); - when(this.mockResultSet.getString(columnName)).thenReturn(columnValue); when(this.mockResultSetMetaData.getColumnType(1)).thenReturn(type); + when(this.mockResultSetMetaData.getColumnTypeName(1)).thenReturn(typeName); when(this.mockResultSetMetaData.getColumnCount()).thenReturn(1); when(this.mockResultSet.next()).thenReturn(true, false); assertThat(this.queryRewriter.rewrite(this.statement, this.exaMetadata, AdapterProperties.emptyProperties()), equalTo(query)); } + private void assertQueryWithOneStringValue(final String columnName, final int type, final String columnValue, + final String query) throws SQLException, AdapterException { + when(this.mockResultSet.getString(columnName)).thenReturn(columnValue); + assertQueryWithOneColumn(columnName, type, null, query); + } + @Test void testRewriteWithBigInt() throws AdapterException, SQLException { assertQueryWithOneStringValue("bigint_col", Types.BIGINT, "123456", "SELECT * FROM VALUES (123456)"); } - @CsvSource({ "1111-01-01 12:10:09.000000, 01.01.1111 12:10:09.000", // - "1111-1-1 12:10:9.000000, 01.01.1111 12:10:09.000", // - "1111-1-1 12:10:9, 01.01.1111 12:10:09", // - "1111-1-1 12:10:9.1, 01.01.1111 12:10:09.100", // - "1111-1-1 12:10:9.12, 01.01.1111 12:10:09.120", // - "1111-1-1 12:10:9.123, 01.01.1111 12:10:09.123", // - "1111-1-1 12:10:9.1234, 01.01.1111 12:10:09.123", // - "1111-1-1 12:10:9.1239, 01.01.1111 12:10:09.124", // - "1111-1-1 12:10:9.12345, 01.01.1111 12:10:09.123", // - "1111-1-1 12:10:9.123666, 01.01.1111 12:10:09.124", // - "1111-1-1T1:2:30, 01.01.1111 01:02:30" // + @Test + void testRewriteWithInteger() throws AdapterException, SQLException { + assertQueryWithOneStringValue("int_col", Types.INTEGER, "123456", "SELECT * FROM VALUES (123456)"); + } + + @CsvSource({ "1111-01-01T12:10:09.000000Z, 1111-01-01 12:10:09.000", // + "1111-01-01T12:10:09.000000Z, 1111-01-01 12:10:09.000", // + "1111-01-01T12:10:09Z, 1111-01-01 12:10:09.000", // + "1111-01-01T12:10:09.1Z, 1111-01-01 12:10:09.100", // + "1111-01-01T12:10:09.12Z, 1111-01-01 12:10:09.120", // + "1111-01-01T12:10:09.123Z, 1111-01-01 12:10:09.123", // + "1111-01-01T12:10:09.1234Z, 1111-01-01 12:10:09.123", // + "1111-01-01T12:10:09.1239Z, 1111-01-01 12:10:09.123", // + "1111-01-01T12:10:09.12345Z, 1111-01-01 12:10:09.123", // + "1111-01-01T12:10:09.123666Z, 1111-01-01 12:10:09.123", // + "1111-01-01T01:02:30Z, 1111-01-01 01:02:30.000" // }) @ParameterizedTest - void testRewriteWithDatetime(final String valueToConvert, final String expectedValue) + void testRewriteWithDatetime(final Instant valueToConvert, final String expectedValue) throws AdapterException, SQLException { - assertQueryWithOneStringValue("timestamp", 93, valueToConvert, - "SELECT * FROM VALUES ('" + expectedValue + "')"); + when(this.mockResultSet.getTimestamp(eq("col_timestamp"), any(Calendar.class))) + .thenReturn(new Timestamp(valueToConvert.toEpochMilli())); + assertQueryWithOneColumn("col_timestamp", Types.TIMESTAMP, null, + "SELECT * FROM VALUES (CAST ('" + expectedValue + "' AS TIMESTAMP))"); + } + + @Test + void testRewriteWithGeography() throws AdapterException, SQLException { + when(this.mockResultSet.getString("col")).thenReturn("POINT(1 2)"); + assertQueryWithOneColumn("col", Types.VARCHAR, "GEOGRAPHY", + "SELECT * FROM VALUES (CAST ('POINT(1 2)' AS GEOMETRY))"); + } + + @Test + void testRewriteWithNullGeography() throws AdapterException, SQLException { + when(this.mockResultSet.getString("col")).thenReturn(null); + when(this.mockResultSet.wasNull()).thenReturn(true); + assertQueryWithOneColumn("col", Types.VARCHAR, "GEOGRAPHY", "SELECT * FROM VALUES (CAST (NULL AS GEOMETRY))"); } @Test @@ -157,8 +214,8 @@ void testRewriteWithJdbcConnectionWithThreeRows() throws AdapterException, SQLEx equalTo("SELECT * FROM VALUES (1, 'foo', true), (2, 'bar', false), (3, 'cat', true)")); } - @ValueSource(ints = { Types.VARCHAR, Types.TIME, Types.VARBINARY, Types.NUMERIC }) - @ParameterizedTest + @ValueSource(ints = { Types.VARCHAR, Types.TIME, Types.VARBINARY }) + @ParameterizedTest(name = "Null value of type {0}") void testRewriteStringWithValueNull(final int type) throws AdapterException, SQLException { mockOneRowWithOneColumnOfType(type); assertThat(this.queryRewriter.rewrite(this.statement, this.exaMetadata, AdapterProperties.emptyProperties()), @@ -172,6 +229,13 @@ private void mockOneRowWithOneColumnOfType(final int type) throws SQLException { when(this.mockResultSet.wasNull()).thenReturn(true); } + @Test + void testRewriteNumericWithValueNull() throws AdapterException, SQLException { + mockOneRowWithOneColumnOfType(Types.NUMERIC); + assertThat(this.queryRewriter.rewrite(this.statement, this.exaMetadata, AdapterProperties.emptyProperties()), + equalTo("SELECT * FROM VALUES (CAST (NULL AS DOUBLE))")); + } + @Test void testRewriteBigIntWithValueNull() throws AdapterException, SQLException { mockOneRowWithOneColumnOfType(Types.BIGINT); @@ -183,14 +247,14 @@ void testRewriteBigIntWithValueNull() throws AdapterException, SQLException { void testRewriteTimestampWithValueNull() throws AdapterException, SQLException { mockOneRowWithOneColumnOfType(Types.TIMESTAMP); assertThat(this.queryRewriter.rewrite(this.statement, this.exaMetadata, AdapterProperties.emptyProperties()), - equalTo("SELECT * FROM VALUES (CAST (NULL AS VARCHAR(4)))")); + equalTo("SELECT * FROM VALUES (CAST (NULL AS TIMESTAMP))")); } @Test void testRewriteDateWithValueNull() throws AdapterException, SQLException { mockOneRowWithOneColumnOfType(Types.DATE); assertThat(this.queryRewriter.rewrite(this.statement, this.exaMetadata, AdapterProperties.emptyProperties()), - equalTo("SELECT * FROM VALUES (CAST (NULL AS VARCHAR(4)))")); + equalTo("SELECT * FROM VALUES (CAST (NULL AS DATE))")); } @Test diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryVirtualSchemaIT.java b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryVirtualSchemaIT.java new file mode 100644 index 0000000..51603b8 --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryVirtualSchemaIT.java @@ -0,0 +1,151 @@ +package com.exasol.adapter.dialects.bigquery; + +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toMap; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +import java.math.BigDecimal; +import java.sql.*; +import java.time.*; +import java.util.*; +import java.util.Date; +import java.util.stream.Stream; + +import org.junit.jupiter.api.*; + +import com.exasol.adapter.dialects.bigquery.util.BigQueryDatasetFixture.BigQueryTable; +import com.exasol.adapter.dialects.bigquery.util.IntegrationTestSetup; +import com.exasol.adapter.dialects.bigquery.util.TestConfig; +import com.exasol.dbbuilder.dialects.exasol.VirtualSchema; +import com.exasol.matcher.ResultSetStructureMatcher; +import com.google.cloud.bigquery.*; + +@Tag("integration") +class BigQueryVirtualSchemaIT { + private static final TestConfig CONFIG = TestConfig.read(); + private static IntegrationTestSetup setup; + + @BeforeAll + static void beforeAll() { + assumeTrue(CONFIG.hasGoogleCloudCredentials(), "Local bigquery emulator not yet supported"); + setup = IntegrationTestSetup.create(CONFIG); + } + + @AfterAll + static void afterAll() throws Exception { + if (setup != null) { + setup.close(); + } + } + + @AfterEach + void after() { + setup.dropCreatedObjects(); + } + + @Test + void emptyResultMapsColumnTypeToSmallInt() throws SQLException { + final BigQueryTable table = setup.bigQueryDataset().createTable( + Schema.of(Field.of("id", StandardSQLTypeName.INT64), Field.of("name", StandardSQLTypeName.STRING))); + final VirtualSchema virtualSchema = setup.createVirtualSchema("virtualSchema"); + final ResultSet result = setup.getStatement() + .executeQuery("SELECT * FROM " + table.getQualifiedName(virtualSchema)); + assertThat(result, ResultSetStructureMatcher.table("SMALLINT", "SMALLINT").matches()); + } + + List createDataTypes() { + return List.of(DataTypeTestCase.of(StandardSQLTypeName.STRING, "val", "VARCHAR", "val"), + DataTypeTestCase.of(StandardSQLTypeName.NUMERIC, 123.456, "DOUBLE PRECISION", 123.456D), + DataTypeTestCase.of(StandardSQLTypeName.INT64, 123456, "DECIMAL", BigDecimal.valueOf(123456)), + DataTypeTestCase.of(StandardSQLTypeName.BIGNUMERIC, 423450983425L, "DOUBLE PRECISION", + 4.23450983425E11D), + DataTypeTestCase.of(StandardSQLTypeName.BOOL, true, "BOOLEAN", true), + DataTypeTestCase.of(StandardSQLTypeName.DATE, "2022-07-25", "DATE", date("2022-07-25")), + DataTypeTestCase.of(StandardSQLTypeName.DATETIME, "2022-03-15 15:40:30.123", "TIMESTAMP", + timestamp("2022-03-15T15:40:30.123Z")), + DataTypeTestCase.of(StandardSQLTypeName.TIMESTAMP, "2022-03-15 15:40:30.123", "TIMESTAMP", + timestamp("2022-03-15T15:40:30.123Z")), + DataTypeTestCase.of(StandardSQLTypeName.FLOAT64, 3.14, "DOUBLE PRECISION", 3.14D), + DataTypeTestCase.of(StandardSQLTypeName.GEOGRAPHY, "POINT(1 4)", "GEOMETRY", "POINT (1 4)"), + DataTypeTestCase.of(StandardSQLTypeName.TIME, "14:15:16.123", "VARCHAR", "14:15:16.123")); + } + + private static Date date(final String date) { + return Date.from(LocalDate.parse(date).atStartOfDay(ZoneId.of("UTC")).toInstant()); + } + + private static Timestamp timestamp(final String timestamp) { + return new Timestamp(Instant.parse(timestamp).toEpochMilli()); + } + + @TestFactory + Stream dataTypeConversion() { + final List tests = createDataTypes(); + final BigQueryTable table = prepareTable(tests); + final VirtualSchema virtualSchema = setup.createVirtualSchema("virtualSchema"); + return tests.stream().map(test -> DynamicTest.dynamicTest(test.getTestName(), () -> { + final ResultSet result = setup.getStatement().executeQuery("SELECT \"" + test.getColumnName() + "\" FROM " + + table.getQualifiedName(virtualSchema) + " ORDER BY \"id\" ASC"); + assertThat(result, + ResultSetStructureMatcher.table(test.expectedExasolType) + .withCalendar(Calendar.getInstance(TimeZone.getTimeZone("UTC"))) + .row(test.expectedExasolValue).row((Object) null).matches()); + })); + } + + private BigQueryTable prepareTable(final List tests) { + final List fields = new ArrayList<>(); + fields.add(Field.of("id", StandardSQLTypeName.INT64)); + fields.addAll(tests.stream().map(DataTypeTestCase::getField).collect(toList())); + final BigQueryTable table = setup.bigQueryDataset().createTable(Schema.of(fields)); + insertTestData(tests, table); + return table; + } + + private void insertTestData(final List tests, final BigQueryTable table) { + final Map rowWithData = tests.stream() + .collect(toMap(DataTypeTestCase::getColumnName, DataTypeTestCase::getBigQueryValue)); + rowWithData.put("id", 1); + final Map rowWithNulls = Map.of("id", 2); + table.insertRows(List.of(rowWithData, rowWithNulls)); + } + + static class DataTypeTestCase { + final Field field; + final StandardSQLTypeName bigQueryType; + final Object bigQueryValue; + final String expectedExasolType; + final Object expectedExasolValue; + + private DataTypeTestCase(final StandardSQLTypeName bigQueryType, final Object bigQueryValue, + final String expectedExasolType, final Object expectedExasolValue) { + this.bigQueryType = Objects.requireNonNull(bigQueryType); + this.bigQueryValue = Objects.requireNonNull(bigQueryValue); + this.expectedExasolType = expectedExasolType; + this.expectedExasolValue = expectedExasolValue; + this.field = Field.of("col_" + bigQueryType, bigQueryType); + } + + static DataTypeTestCase of(final StandardSQLTypeName bigQueryType, final Object bigQueryValue, + final String expectedExasolType, final Object expectedExasolValue) { + return new DataTypeTestCase(bigQueryType, bigQueryValue, expectedExasolType, expectedExasolValue); + } + + public String getTestName() { + return "Type " + bigQueryType + " mapped to " + expectedExasolType; + } + + public Field getField() { + return this.field; + } + + String getColumnName() { + return this.field.getName(); + } + + public Object getBigQueryValue() { + return this.bigQueryValue; + } + } +} diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryDatasetFixture.java b/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryDatasetFixture.java new file mode 100644 index 0000000..d396c7f --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryDatasetFixture.java @@ -0,0 +1,95 @@ +package com.exasol.adapter.dialects.bigquery.util; + +import static java.util.stream.Collectors.toList; + +import java.util.*; +import java.util.logging.Logger; + +import com.exasol.dbbuilder.dialects.exasol.VirtualSchema; +import com.google.cloud.bigquery.*; +import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; + +public class BigQueryDatasetFixture implements AutoCloseable { + private static final Logger LOGGER = Logger.getLogger(BigQueryDatasetFixture.class.getName()); + private final List tables = new ArrayList<>(); + private final BigQuery client; + private final DatasetId datasetId; + + private BigQueryDatasetFixture(final BigQuery client, final DatasetId datasetId) { + this.client = client; + this.datasetId = datasetId; + } + + public static BigQueryDatasetFixture create(final BigQuery client, final String projectId) { + final DatasetId datasetId = DatasetId.of(projectId, "bigqueryVirtualSchemaTest" + System.currentTimeMillis()); + client.create(DatasetInfo.newBuilder(datasetId).build()); + return new BigQueryDatasetFixture(client, datasetId); + } + + public DatasetId getDatasetId() { + return datasetId; + } + + @Override + public void close() throws Exception { + final boolean success = client.delete(datasetId); + if (!success) { + throw new IllegalStateException("Failed to delete dataset " + datasetId); + } + } + + public void dropCreatedObjects() { + for (final BigQueryTable table : tables) { + table.close(); + } + tables.clear(); + } + + public BigQueryTable createSingleColumnTable(final Field field) { + return createTable(Schema.of(field)); + } + + public BigQueryTable createTable(final Schema schema) { + final TableId tableId = TableId.of(datasetId.getDataset(), "table" + System.currentTimeMillis()); + final TableDefinition tableDefinition = StandardTableDefinition.of(schema); + final TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); + LOGGER.fine("Creating BigQuery table " + tableId); + client.create(tableInfo); + final BigQueryTable table = new BigQueryTable(client, tableInfo); + this.tables.add(table); + return table; + } + + public final class BigQueryTable implements AutoCloseable { + private final BigQuery client; + private final TableInfo tableInfo; + + public BigQueryTable(final BigQuery client, final TableInfo tableInfo) { + this.client = client; + this.tableInfo = tableInfo; + } + + @Override + public void close() { + LOGGER.fine("Deleting BigQuery table " + tableInfo.getTableId()); + final boolean success = client.delete(tableInfo.getTableId()); + if (!success) { + throw new IllegalStateException("Failed to delete table " + tableInfo.getTableId()); + } + } + + public void insertRows(final List> rows) { + final InsertAllResponse response = client + .insertAll(InsertAllRequest.of(tableInfo, rows.stream().map(RowToInsert::of).collect(toList()))); + final List insertErrors = response.getInsertErrors().values().stream().flatMap(List::stream) + .map(BigQueryError::toString).collect(toList()); + if (!insertErrors.isEmpty()) { + throw new IllegalStateException("Failed to insert row: " + insertErrors); + } + } + + public String getQualifiedName(final VirtualSchema virtualSchema) { + return "\"" + virtualSchema.getName() + "\".\"" + tableInfo.getTableId().getTable() + "\""; + } + } +} diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryEmulatorContainer.java b/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryEmulatorContainer.java new file mode 100644 index 0000000..37c2697 --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryEmulatorContainer.java @@ -0,0 +1,75 @@ +package com.exasol.adapter.dialects.bigquery.util; + +import java.nio.file.Path; +import java.time.Duration; +import java.util.logging.Logger; + +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.DockerImageName; + +import com.exasol.bucketfs.Bucket; +import com.exasol.exasoltestsetup.ServiceAddress; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; + +class BigQueryEmulatorContainer extends GenericContainer implements BigQueryTestSetup { + private static final Logger LOGGER = Logger.getLogger(IntegrationTestSetup.class.getName()); + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("ghcr.io/goccy/bigquery-emulator"); + private static final int PORT = 9050; + private static final String PROJECT_ID = "test"; + private Path dataYaml; + + BigQueryEmulatorContainer(final Path dataYaml) { + this(DEFAULT_IMAGE_NAME); + this.dataYaml = dataYaml; + } + + BigQueryEmulatorContainer(final DockerImageName dockerImageName) { + super(dockerImageName); + dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME); + withExposedPorts(PORT); + final String dataOption = dataYaml == null ? "" : " --data-from-yaml" + dataYaml.toAbsolutePath().toString(); + withCommand("/bin/sh", "-c", "/bin/bigquery-emulator --project=" + PROJECT_ID + " --port=" + PORT + dataOption); + waitingFor(Wait.forLogMessage("^\\[bigquery-emulator\\] listening at 0\\.0\\.0\\.0:" + PORT + ".*", 1)); + withStartupTimeout(Duration.ofSeconds(10)); + withStartupAttempts(1); + withReuse(false); + } + + @Override + public String getProjectId() { + return PROJECT_ID; + } + + private String getUrl() { + return "http://" + getServiceAddress(); + } + + @Override + public BigQuery getClient() { + final String url = getUrl(); + final String projectId = getProjectId(); + LOGGER.fine("Connecting to bigquery at " + url + " with project id '" + projectId + "'"); + return BigQueryOptions.newBuilder().setHost(url).setProjectId(projectId).build().getService(); + } + + @Override + public ServiceAddress getServiceAddress() { + return new ServiceAddress(getHost(), getMappedPort(PORT)); + } + + @Override + public String getJdbcUrl(final Bucket bucket, final ServiceAddress serviceAddress) { + final String hostAndPort = serviceAddress.toString(); + final String url = "http://" + hostAndPort; + return "jdbc:bigquery://" + url + ";ProjectId=" + getProjectId() // + + ";RootURL=" + url // + + ";OAuthType=2;OAuthAccessToken=dummy-token"; + } + + @Override + public void close() { + this.stop(); + } +} \ No newline at end of file diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryTestSetup.java b/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryTestSetup.java new file mode 100644 index 0000000..eb91391 --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/util/BigQueryTestSetup.java @@ -0,0 +1,31 @@ +package com.exasol.adapter.dialects.bigquery.util; + +import java.nio.file.Paths; + +import com.exasol.bucketfs.Bucket; +import com.exasol.exasoltestsetup.ServiceAddress; +import com.google.cloud.bigquery.BigQuery; + +public interface BigQueryTestSetup extends AutoCloseable { + + static BigQueryTestSetup createLocalSetup() { + return new BigQueryEmulatorContainer(Paths.get("src/test/resources/bigquery-data.yaml")); + } + + static BigQueryTestSetup createGoogleCloudSetup(final TestConfig config) { + return new GoogleCloudBigQuerySetup(config); + } + + BigQuery getClient(); + + ServiceAddress getServiceAddress(); + + String getProjectId(); + + String getJdbcUrl(Bucket bucket, ServiceAddress serviceAddress); + + void start(); + + @Override + void close(); +} diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/util/GoogleCloudBigQuerySetup.java b/src/test/java/com/exasol/adapter/dialects/bigquery/util/GoogleCloudBigQuerySetup.java new file mode 100644 index 0000000..7962947 --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/util/GoogleCloudBigQuerySetup.java @@ -0,0 +1,90 @@ +package com.exasol.adapter.dialects.bigquery.util; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.concurrent.TimeoutException; + +import com.exasol.bucketfs.Bucket; +import com.exasol.bucketfs.BucketAccessException; +import com.exasol.exasoltestsetup.ServiceAddress; +import com.google.auth.Credentials; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; + +public class GoogleCloudBigQuerySetup implements BigQueryTestSetup { + private final TestConfig config; + + public GoogleCloudBigQuerySetup(final TestConfig config) { + this.config = config; + } + + @Override + public BigQuery getClient() { + return BigQueryOptions.newBuilder() // + .setProjectId(getProjectId()) // + .setCredentials(createGoogleCredentials()) // + .build().getService(); + } + + private Credentials createGoogleCredentials() { + final Path privateKey = config.getGoogleCloudCredentials().privateKey; + try { + return GoogleCredentials.fromStream(Files.newInputStream(privateKey)); + } catch (final IOException exception) { + throw new UncheckedIOException("Failed to load credentials from " + privateKey, exception); + } + } + + @Override + public ServiceAddress getServiceAddress() { + return new ServiceAddress("www.googleapis.com", 443); + } + + @Override + public String getProjectId() { + return config.getGoogleProjectId(); + } + + @Override + public String getJdbcUrl(final Bucket bucket, final ServiceAddress serviceAddress) { + final String url = "https://" + serviceAddress.getHostName() + ":" + serviceAddress.getPort(); + final String bucketFsCredentialsPath = uploadCredentials(bucket); + return "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;RootURL=" + url + ";ProjectId=" + + getProjectId() + ";OAuthType=0;OAuthServiceAcctEmail=" + + config.getGoogleCloudCredentials().serviceAccountEmail + ";OAuthPvtKeyPath=" + + bucketFsCredentialsPath; + } + + private String uploadCredentials(final Bucket bucket) { + final Path privateKey = config.getGoogleCloudCredentials().privateKey; + try { + final String filename = privateKey.getFileName().toString(); + bucket.uploadFile(privateKey, filename); + return IntegrationTestSetup.BUCKETFS_ROOT_PATH + filename; + } catch (FileNotFoundException | BucketAccessException | TimeoutException exception) { + throw new IllegalStateException("Failed to upload google cloud credentials", exception); + } + } + + public static class GoogleCloudCredentials { + private final String serviceAccountEmail; + private final Path privateKey; + + public GoogleCloudCredentials(final String serviceAccountEmail, final Path privateKey) { + this.serviceAccountEmail = serviceAccountEmail; + this.privateKey = privateKey; + } + } + + @Override + public void start() { + // ignore + } + + @Override + public void close() { + // nothing to do + } +} diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/util/IntegrationTestSetup.java b/src/test/java/com/exasol/adapter/dialects/bigquery/util/IntegrationTestSetup.java new file mode 100644 index 0000000..568b652 --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/util/IntegrationTestSetup.java @@ -0,0 +1,176 @@ +package com.exasol.adapter.dialects.bigquery.util; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.sql.*; +import java.util.*; +import java.util.concurrent.TimeoutException; +import java.util.logging.Logger; + +import org.jetbrains.annotations.NotNull; + +import com.exasol.bucketfs.Bucket; +import com.exasol.bucketfs.BucketAccessException; +import com.exasol.dbbuilder.dialects.DatabaseObject; +import com.exasol.dbbuilder.dialects.exasol.*; +import com.exasol.exasoltestsetup.*; +import com.exasol.udfdebugging.UdfTestSetup; +import com.google.cloud.bigquery.BigQuery; + +public class IntegrationTestSetup implements AutoCloseable { + private static final Logger LOGGER = Logger.getLogger(IntegrationTestSetup.class.getName()); + private static final String ADAPTER_JAR = "virtual-schema-dist-9.0.5-bigquery-2.0.2.jar"; + public static final String BUCKETFS_ROOT_PATH = "/buckets/bfsdefault/default/"; + public static final Path ADAPTER_JAR_LOCAL_PATH = Path.of("target", ADAPTER_JAR); + + private final BigQueryTestSetup bigQueryTestSetup; + private final ExasolTestSetup exasolTestSetup; + private final Connection connection; + private final Statement statement; + private final ExasolObjectFactory exasolObjectFactory; + private final AdapterScript adapterScript; + private final ConnectionDefinition connectionDefinition; + private final List createdObjects = new LinkedList<>(); + private final UdfTestSetup udfTestSetup; + private final BigQueryDatasetFixture bigQueryDataset; + + private IntegrationTestSetup(final BigQueryTestSetup bigQueryTestSetup, final ExasolTestSetup exasolTestSetup) + throws SQLException, BucketAccessException, TimeoutException, IOException, URISyntaxException { + this.bigQueryTestSetup = bigQueryTestSetup; + this.bigQueryDataset = BigQueryDatasetFixture.create(bigQueryTestSetup.getClient(), + bigQueryTestSetup.getProjectId()); + this.exasolTestSetup = exasolTestSetup; + this.connection = this.exasolTestSetup.createConnection(); + this.statement = this.connection.createStatement(); + this.statement.executeUpdate("ALTER SESSION SET QUERY_CACHE = 'OFF';"); + this.udfTestSetup = new UdfTestSetup(this.exasolTestSetup, this.connection); + final List jvmOptions = new ArrayList<>(Arrays.asList(this.udfTestSetup.getJvmOptions())); + this.exasolObjectFactory = new ExasolObjectFactory(this.connection, + ExasolObjectConfiguration.builder().withJvmOptions(jvmOptions.toArray(String[]::new)).build()); + final ExasolSchema adapterSchema = this.exasolObjectFactory.createSchema("ADAPTER"); + this.connectionDefinition = createConnectionDefinition(); + this.adapterScript = createAdapterScript(adapterSchema); + } + + public static IntegrationTestSetup create(final TestConfig config) { + if (config.isUdfLoggingEnabled()) { + System.setProperty("test.udf-logs", "true"); + } + final BigQueryTestSetup bigQueryTestSetup = createBigQueryTestSetup(config); + bigQueryTestSetup.start(); + try { + final ExasolTestSetup exasolTestSetup = new ExasolTestSetupFactory( + Path.of("cloudSetup/generated/testConfig.json")).getTestSetup(); + final IntegrationTestSetup setup = new IntegrationTestSetup(bigQueryTestSetup, exasolTestSetup); + return setup; + } catch (SQLException | BucketAccessException | TimeoutException | IOException | URISyntaxException exception) { + throw new IllegalStateException("Failed to create test setup: " + exception.getMessage(), exception); + } + } + + private static BigQueryTestSetup createBigQueryTestSetup(final TestConfig config) { + if (config.hasGoogleCloudCredentials()) { + LOGGER.info("Using Google Cloud BigQuery setup"); + return BigQueryTestSetup.createGoogleCloudSetup(config); + } else { + LOGGER.info("Using local BigQuery setup"); + return BigQueryTestSetup.createLocalSetup(); + } + } + + public ConnectionDefinition createConnectionDefinition() { + final ServiceAddress bigQueryServiceAddress = this.exasolTestSetup + .makeTcpServiceAccessibleFromDatabase(bigQueryTestSetup.getServiceAddress()); + return this.exasolObjectFactory.createConnectionDefinition("BIGQUERY_CONNECTION", + bigQueryTestSetup.getJdbcUrl(getBucket(), bigQueryServiceAddress), "", ""); + } + + AdapterScript createAdapterScript(final ExasolSchema adapterSchema) + throws FileNotFoundException, BucketAccessException, TimeoutException { + getBucket().uploadFile(ADAPTER_JAR_LOCAL_PATH, ADAPTER_JAR); + return adapterSchema.createAdapterScriptBuilder("ADAPTER_SCRIPT_BIGQUERY") + .bucketFsContent("com.exasol.adapter.RequestDispatcher", getAdapterJarsInBucketFs()) + .language(AdapterScript.Language.JAVA).build(); + } + + @NotNull + private String[] getAdapterJarsInBucketFs() { + final JdbcDriverProvider uploader = new JdbcDriverProvider(getBucket()); + final List jarFiles = uploader.uploadJdbcDriverToBucketFs( + "https://storage.googleapis.com/simba-bq-release/jdbc/SimbaJDBCDriverforGoogleBigQuery42_1.2.25.1029.zip"); + final List jars = new ArrayList<>(); + jars.add(BUCKETFS_ROOT_PATH + ADAPTER_JAR); + jars.addAll(jarFiles); + return jars.toArray(new String[0]); + } + + @Override + public void close() throws Exception { + this.bigQueryDataset.close(); + this.bigQueryTestSetup.close(); + this.udfTestSetup.close(); + this.statement.close(); + this.connection.close(); + this.exasolTestSetup.close(); + } + + public VirtualSchema createVirtualSchema(final String schemaName) { + final VirtualSchema virtualSchema = this.exasolObjectFactory.createVirtualSchemaBuilder(schemaName) + .connectionDefinition(this.connectionDefinition) // + .adapterScript(this.adapterScript) // + .connectionDefinition(this.connectionDefinition) // + .sourceSchemaName(this.bigQueryDataset.getDatasetId().getDataset()) // + .properties(getVirtualSchemaProperties()).build(); + this.createdObjects.add(virtualSchema); + return virtualSchema; + } + + private Map getVirtualSchemaProperties() { + final Map properties = new HashMap<>(); + properties.put("CATALOG_NAME", this.bigQueryDataset.getDatasetId().getProject()); + properties.put("LOG_LEVEL", "ALL"); + final String debugProperty = System.getProperty("test.debug", ""); + final String profileProperty = System.getProperty("test.jprofiler", ""); + if (!debugProperty.isBlank() || !profileProperty.isBlank()) { + properties.put("MAX_PARALLEL_UDFS", "1"); + } + if (System.getProperty("test.vs-logs", "false").equals("true")) { + properties.put("DEBUG_ADDRESS", "127.0.0.1:3001"); + } + return properties; + } + + public void dropCreatedObjects() { + for (final DatabaseObject createdObject : this.createdObjects) { + createdObject.drop(); + } + this.createdObjects.clear(); + this.bigQueryDataset.dropCreatedObjects(); + } + + public Connection getConnection() { + return connection; + } + + public Statement getStatement() { + return statement; + } + + public Bucket getBucket() { + return this.exasolTestSetup.getDefaultBucket(); + } + + public BigQuery getBigQueryClient() { + return this.bigQueryTestSetup.getClient(); + } + + public ExasolObjectFactory getExasolObjectFactory() { + return this.exasolObjectFactory; + } + + public BigQueryDatasetFixture bigQueryDataset() { + return this.bigQueryDataset; + } +} diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/util/JdbcDriverProvider.java b/src/test/java/com/exasol/adapter/dialects/bigquery/util/JdbcDriverProvider.java new file mode 100644 index 0000000..b30d9f7 --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/util/JdbcDriverProvider.java @@ -0,0 +1,96 @@ +package com.exasol.adapter.dialects.bigquery.util; + +import static java.util.stream.Collectors.toList; + +import java.io.*; +import java.net.*; +import java.nio.file.*; +import java.util.*; +import java.util.concurrent.TimeoutException; +import java.util.logging.Logger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.StreamSupport; + +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipFile; + +import com.exasol.bucketfs.Bucket; +import com.exasol.bucketfs.BucketAccessException; + +class JdbcDriverProvider { + private static final Logger LOGGER = Logger.getLogger(JdbcDriverProvider.class.getName()); + private static final Pattern FILENAME_WITH_EXTENSION = Pattern.compile("([^/]*)$"); + private final Bucket bucket; + + JdbcDriverProvider(final Bucket bucket) { + this.bucket = bucket; + } + + List uploadJdbcDriverToBucketFs(final String jdbcDriverUrl) { + final String zipFileName = getFileName(jdbcDriverUrl); + final Path localFile = Paths.get("target").resolve(zipFileName).toAbsolutePath(); + + download(jdbcDriverUrl, localFile); + + uploadToBucketFs(localFile, zipFileName); + + return listZipContent(localFile).stream() // + .filter(name -> name.toLowerCase().endsWith(".jar")) // + .map(name -> getUdfPath(getFileNameWithoutExtension(zipFileName), name)) // + .collect(toList()); + } + + private void uploadToBucketFs(final Path localFile, final String fileName) { + try { + if (!bucket.listContents().contains(fileName.toString())) { + bucket.uploadFile(localFile, "/"); + } + } catch (FileNotFoundException | BucketAccessException | TimeoutException exception) { + throw new IllegalStateException("Error uploading to bucketfs", exception); + } + } + + private String getUdfPath(final String folder, final String fileName) { + return "/buckets/" + bucket.getFullyQualifiedBucketName() + "/" + folder + "/" + fileName; + } + + private List listZipContent(final Path localFile) { + try (ZipFile zip = new ZipFile(localFile.toFile())) { + return StreamSupport + .stream(Spliterators.spliteratorUnknownSize(zip.getEntries().asIterator(), Spliterator.ORDERED), + false) + .map(ZipArchiveEntry::getName).collect(toList()); + } catch (final IOException exception) { + throw new UncheckedIOException(exception); + } + } + + private String getFileName(final String jdbcDriverUrl) { + final Matcher matcher = FILENAME_WITH_EXTENSION.matcher(jdbcDriverUrl); + if (!matcher.find()) { + throw new RuntimeException("Could not find filename without extension in URL '" + jdbcDriverUrl + "'"); + } + return matcher.group(1); + } + + private String getFileNameWithoutExtension(final String name) { + return name.substring(0, name.lastIndexOf(".")); + } + + private void download(final String downloadUrl, final Path localCopy) { + if (Files.exists(localCopy)) { + LOGGER.info("File " + localCopy + " already exists, no need to download it"); + return; + } + try { + final URL remote = new URI(downloadUrl).toURL(); + LOGGER.info("Download " + remote + " to " + localCopy); + try (InputStream input = remote.openStream()) { + Files.copy(input, localCopy); + } + } catch (URISyntaxException | IOException exception) { + throw new IllegalStateException("Error downloading file from " + downloadUrl, exception); + } + } +} diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/util/TestConfig.java b/src/test/java/com/exasol/adapter/dialects/bigquery/util/TestConfig.java new file mode 100644 index 0000000..feb73a3 --- /dev/null +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/util/TestConfig.java @@ -0,0 +1,80 @@ +package com.exasol.adapter.dialects.bigquery.util; + +import java.io.*; +import java.nio.file.*; +import java.util.Optional; +import java.util.Properties; +import java.util.logging.Logger; + +import com.exasol.adapter.dialects.bigquery.util.GoogleCloudBigQuerySetup.GoogleCloudCredentials; + +public class TestConfig { + private static final Logger LOG = Logger.getLogger(TestConfig.class.getName()); + private static final Path CONFIG_FILE = Paths.get("test.properties"); + + private final Properties properties; + + TestConfig(final Properties properties) { + this.properties = properties; + } + + public static TestConfig read() { + if (Files.exists(CONFIG_FILE)) { + return read(CONFIG_FILE); + } else { + return new TestConfig(new Properties()); + } + } + + static TestConfig read(final Path configFile) { + final Path file = configFile.normalize(); + return new TestConfig(loadProperties(file)); + } + + private static Properties loadProperties(final Path configFile) { + if (!Files.exists(configFile)) { + throw new IllegalStateException("Config file not found at '" + configFile + "'"); + } + LOG.info("Reading config file from " + configFile); + try (InputStream stream = Files.newInputStream(configFile)) { + final Properties props = new Properties(); + props.load(stream); + return props; + } catch (final IOException e) { + throw new UncheckedIOException("Error reading config file " + configFile, e); + } + } + + public boolean hasGoogleCloudCredentials() { + return getOptionalValue("serviceAccountEmail").isPresent(); + } + + public GoogleCloudCredentials getGoogleCloudCredentials() { + final String serviceAccountEmail = getMandatoryValue("serviceAccountEmail"); + final Path privateKey = Paths.get(getMandatoryValue("privateKeyPath")).toAbsolutePath(); + if (!Files.exists(privateKey)) { + throw new IllegalArgumentException("Private key does not exist at " + privateKey); + } + return new GoogleCloudCredentials(serviceAccountEmail, privateKey); + } + + public String getGoogleProjectId() { + return getMandatoryValue("googleProjectId"); + } + + public boolean isUdfLoggingEnabled() { + return getOptionalValue("udfLoggingEnabled") // + .filter(v -> v.equalsIgnoreCase("true")) // + .isPresent(); + } + + public String getMandatoryValue(final String param) { + return getOptionalValue(param) + .orElseThrow(() -> new IllegalStateException("Property '" + param + "' not found in config file")); + } + + public Optional getOptionalValue(final String param) { + return Optional.ofNullable(this.properties.getProperty(param)); + } + +} diff --git a/src/test/resources/bigquery-data.yaml b/src/test/resources/bigquery-data.yaml new file mode 100644 index 0000000..4babbb6 --- /dev/null +++ b/src/test/resources/bigquery-data.yaml @@ -0,0 +1,16 @@ +projects: +- id: test + datasets: + - id: dataset1 + tables: + - id: table_a + columns: + - name: id + type: INTEGER + - name: name + type: STRING + data: + - id: 1 + name: alice + - id: 2 + name: bob