diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml index 77fa3780e7f..09c7089cf02 100644 --- a/.github/workflows/core.yml +++ b/.github/workflows/core.yml @@ -40,7 +40,7 @@ jobs: strategy: fail-fast: false matrix: - hadoop: [hadoop2, hadoop3] + hadoop: [hadoop3] java: [ 8, 11 ] steps: - name: Checkout @@ -178,7 +178,7 @@ jobs: R -e "IRkernel::installspec()" - name: install environment run: | - ./mvnw install -DskipTests -pl python,rlang,zeppelin-jupyter-interpreter -am -Phadoop2 ${MAVEN_ARGS} + ./mvnw install -DskipTests -pl python,rlang,zeppelin-jupyter-interpreter -am -Phadoop3 ${MAVEN_ARGS} - name: run tests with ${{ matrix.python }} run: | ./mvnw test -pl python,rlang,zeppelin-jupyter-interpreter -DfailIfNoTests=false ${MAVEN_ARGS} @@ -216,7 +216,7 @@ jobs: ${{ runner.os }}-zeppelin- - name: install environment run: | - ./mvnw install -DskipTests -Phadoop2 -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.11,flink/flink-scala-2.12,jdbc,shell -am -Pflink-114 ${MAVEN_ARGS} + ./mvnw install -DskipTests -Phadoop3 -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.11,flink/flink-scala-2.12,jdbc,shell -am -Pflink-114 ${MAVEN_ARGS} ./mvnw package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS} - name: Setup conda environment with python 3.7 and R uses: conda-incubator/setup-miniconda@v2 @@ -233,7 +233,7 @@ jobs: run: | R -e "IRkernel::installspec()" - name: run tests - run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest + run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop3 -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi @@ -268,12 +268,12 @@ jobs: - name: install environment for flink before 1.15 (exclusive) if: matrix.flink < '115' run: | - ./mvnw install -DskipTests -am -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration ${MAVEN_ARGS} + ./mvnw install -DskipTests -am -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop3 -Pintegration ${MAVEN_ARGS} ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS} - name: install environment for flink after 1.15 (inclusive) if: matrix.flink >= '115' run: | - ./mvnw install -DskipTests -am -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration ${MAVEN_ARGS} + ./mvnw install -DskipTests -am -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop3 -Pintegration ${MAVEN_ARGS} ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS} - name: Setup conda environment with python 3.7 and uses: conda-incubator/setup-miniconda@v2 @@ -288,10 +288,10 @@ jobs: use-mamba: true - name: run tests for flink before 1.15 (exclusive) if: matrix.flink < '115' - run: ./mvnw verify -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} ${MAVEN_ARGS} + run: ./mvnw verify -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop3 -Pintegration -DfailIfNoTests=false -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} ${MAVEN_ARGS} - name: run tests for flink after 1.15 (inclusive) if: matrix.flink >= '115' - run: ./mvnw verify -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -am -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} ${MAVEN_ARGS} + run: ./mvnw verify -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -am -Phadoop3 -Pintegration -DfailIfNoTests=false -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} ${MAVEN_ARGS} - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi @@ -327,7 +327,7 @@ jobs: ${{ runner.os }}-zeppelin- - name: install environment run: | - ./mvnw install -DskipTests -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown -am -Phadoop2 -Pintegration ${MAVEN_ARGS} + ./mvnw install -DskipTests -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown -am -Phadoop3 -Pintegration ${MAVEN_ARGS} ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS} - name: Setup conda environment with python 3.7 and R uses: conda-incubator/setup-miniconda@v2 @@ -376,7 +376,7 @@ jobs: restore-keys: | ${{ runner.os }}-zeppelin- - name: install environment - run: ./mvnw install -DskipTests -pl spark-submit,spark/scala-2.12,spark/scala-2.13 -am -Phadoop2 ${MAVEN_ARGS} + run: ./mvnw install -DskipTests -pl spark-submit,spark/scala-2.12,spark/scala-2.13 -am -Phadoop3 ${MAVEN_ARGS} - name: Setup conda environment with python ${{ matrix.python }} and R uses: conda-incubator/setup-miniconda@v2 with: @@ -394,11 +394,11 @@ jobs: - name: run spark-3.2 tests with scala-2.12 and python-${{ matrix.python }} run: | rm -rf spark/interpreter/metastore_db - ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.12 -Phadoop2 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} + ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.12 -Phadoop3 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - name: run spark-3.2 tests with scala-2.13 and python-${{ matrix.python }} run: | rm -rf spark/interpreter/metastore_db - ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.13 -Phadoop2 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} + ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.13 -Phadoop3 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - name: run spark-3.3 tests with scala-2.12 and python-${{ matrix.python }} run: | rm -rf spark/interpreter/metastore_db diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index 56a2378ffb0..ee20c1caa0b 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -53,9 +53,9 @@ jobs: restore-keys: | ${{ runner.os }}-zeppelin- - name: Install application - run: ./mvnw clean install -DskipTests -am -pl zeppelin-web -Pscala-2.11 -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist ${MAVEN_ARGS} + run: ./mvnw clean install -DskipTests -am -pl zeppelin-web -Pscala-2.11 -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist ${MAVEN_ARGS} - name: Run headless test - run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw verify -pl zeppelin-web -Pscala-2.12 -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist -Pweb-e2e ${MAVEN_ARGS} + run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw verify -pl zeppelin-web -Pscala-2.12 -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist -Pweb-e2e ${MAVEN_ARGS} - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi @@ -128,10 +128,10 @@ jobs: R -e "IRkernel::installspec()" - name: Install Environment run: | - ./mvnw clean install -DskipTests -am -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist ${MAVEN_ARGS} + ./mvnw clean install -DskipTests -am -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist ${MAVEN_ARGS} - name: run tests run: | - source ./testing/downloadSpark.sh "3.4.1" "3" && echo "SPARK_HOME: ${SPARK_HOME}" && xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" ./mvnw verify -DfailIfNoTests=false -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist -Pusing-source-tree ${MAVEN_ARGS} + source ./testing/downloadSpark.sh "3.4.1" "3" && echo "SPARK_HOME: ${SPARK_HOME}" && xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" ./mvnw verify -DfailIfNoTests=false -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist -Pusing-source-tree ${MAVEN_ARGS} - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi diff --git a/.github/workflows/quick.yml b/.github/workflows/quick.yml index 5bc8ac17356..1b01389e558 100644 --- a/.github/workflows/quick.yml +++ b/.github/workflows/quick.yml @@ -41,7 +41,7 @@ jobs: strategy: fail-fast: false matrix: - hadoop: [hadoop2, hadoop3] + hadoop: [hadoop3] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.gitignore b/.gitignore index 2d58c179ade..82ad003b7dd 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,10 @@ /interpreter/* !/interpreter/lib +# metals +.bloop +.metals + # interpreter temp files derby.log spark/metastore_db diff --git a/alluxio/pom.xml b/alluxio/pom.xml index d2a35e8986f..9175d4638e6 100644 --- a/alluxio/pom.xml +++ b/alluxio/pom.xml @@ -70,13 +70,6 @@ test - - org.apache.hadoop - hadoop-common - 3.2.4 - test - - diff --git a/docs/setup/basics/how_to_build.md b/docs/setup/basics/how_to_build.md index 32f9918c1a0..2fa61c5ffb5 100644 --- a/docs/setup/basics/how_to_build.md +++ b/docs/setup/basics/how_to_build.md @@ -134,7 +134,6 @@ Set hadoop major version (default hadoop3). Available profiles are ``` --Phadoop2 -Phadoop3 ``` diff --git a/docs/setup/deployment/flink_and_spark_cluster.md b/docs/setup/deployment/flink_and_spark_cluster.md index d395ccab67f..76f9063cf13 100644 --- a/docs/setup/deployment/flink_and_spark_cluster.md +++ b/docs/setup/deployment/flink_and_spark_cluster.md @@ -225,16 +225,16 @@ Building from source is recommended where possible, for simplicity in this tuto To download the Flink Binary use `wget` ```bash -wget "http://mirror.cogentco.com/pub/apache/flink/flink-1.1.3/flink-1.1.3-bin-hadoop24-scala_2.10.tgz" -tar -xzvf flink-1.1.3-bin-hadoop24-scala_2.10.tgz +wget "http://mirror.cogentco.com/pub/apache/flink/flink-1.16.2/flink-1.16.2-bin-scala_2.12.tgz" +tar -xzvf flink-1.16.2-bin-scala_2.12.tgz ``` -This will download Flink 1.1.3, compatible with Hadoop 2.4. You do not have to install Hadoop for this binary to work, but if you are using Hadoop, please change `24` to your appropriate version. +This will download Flink 1.16.2. Start the Flink Cluster. ```bash -flink-1.1.3/bin/start-cluster.sh +flink-1.16.2/bin/start-cluster.sh ``` ###### Building From source @@ -295,12 +295,12 @@ Using binaries is also To download the Spark Binary use `wget` ```bash -wget "http://d3kbcqa49mib13.cloudfront.net/spark-1.6.3-bin-hadoop2.6.tgz" -tar -xzvf spark-1.6.3-bin-hadoop2.6.tgz -mv spark-1.6.3-bin-hadoop2.6 spark +wget "https://dlcdn.apache.org/spark/spark-3.4.1/spark-3.4.1-bin-hadoop3.tgz" +tar -xzvf spark-3.4.1-bin-hadoop3.tgz +mv spark-3.4.1-bin-hadoop3 spark ``` -This will download Spark 1.6.3, compatible with Hadoop 2.6. You do not have to install Hadoop for this binary to work, but if you are using Hadoop, please change `2.6` to your appropriate version. +This will download Spark 3.4.1, compatible with Hadoop 3. You do not have to install Hadoop for this binary to work, but if you are using Hadoop, please change `3` to your appropriate version. ###### Building From source diff --git a/flink-cmd/pom.xml b/flink-cmd/pom.xml index 1a772face21..b5bf468a6f5 100644 --- a/flink-cmd/pom.xml +++ b/flink-cmd/pom.xml @@ -44,14 +44,7 @@ org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - - - org.apache.hadoop - hadoop-yarn-client + hadoop-client-runtime ${hadoop.version} provided diff --git a/flink/flink-scala-2.11/pom.xml b/flink/flink-scala-2.11/pom.xml index 33c40a3b733..289f8087682 100644 --- a/flink/flink-scala-2.11/pom.xml +++ b/flink/flink-scala-2.11/pom.xml @@ -25,9 +25,7 @@ 4.0.0 - org.apache.zeppelin flink-scala-2.11 - 0.11.0-SNAPSHOT jar Zeppelin: Flink Interpreter Scala_2.11 diff --git a/flink/flink-scala-2.12/pom.xml b/flink/flink-scala-2.12/pom.xml index b8e7cc9232b..f71ae8248df 100644 --- a/flink/flink-scala-2.12/pom.xml +++ b/flink/flink-scala-2.12/pom.xml @@ -25,9 +25,7 @@ 4.0.0 - org.apache.zeppelin flink-scala-2.12 - 0.11.0-SNAPSHOT jar Zeppelin: Flink Interpreter Scala_2.12 diff --git a/flink/flink-scala-parent/pom.xml b/flink/flink-scala-parent/pom.xml index 135fdd8617d..1878fb39336 100644 --- a/flink/flink-scala-parent/pom.xml +++ b/flink/flink-scala-parent/pom.xml @@ -36,17 +36,73 @@ flink ${flink1.13.version} - ${hadoop2.7.version} - 2.3.4 - 4.0.0 + ${hadoop3.2.version} + 3.1.2 + 6.1.0 1.15.0 _${flink.scala.binary.version} https://archive.apache.org/dist/flink/flink-${flink.version}/flink-${flink.version}-bin-scala_${flink.scala.binary.version}.tgz + + 9.4.52.v20230823 - + + + + org.eclipse.jetty + jetty-http + ${jetty.version} + + + org.eclipse.jetty + jetty-util + ${jetty.version} + + + org.eclipse.jetty + jetty-util-ajax + ${jetty.version} + + + org.eclipse.jetty + jetty-webapp + ${jetty.version} + + + org.eclipse.jetty + jetty-security + ${jetty.version} + + + org.eclipse.jetty + jetty-server + ${jetty.version} + + + org.eclipse.jetty + jetty-servlet + ${jetty.version} + + + org.eclipse.jetty + jetty-rewrite + ${jetty.version} + + + org.eclipse.jetty + jetty-runner + ${jetty.version} + + + org.eclipse.jetty + jetty-io + ${jetty.version} + + + + @@ -166,10 +222,6 @@ org.apache.hadoop * - - org.eclipse.jetty - * - @@ -298,35 +350,7 @@ org.apache.hadoop - hadoop-common - ${flink.hadoop.version} - provided - - - - org.apache.hadoop - hadoop-hdfs - ${flink.hadoop.version} - provided - - - - org.apache.hadoop - hadoop-yarn-common - ${flink.hadoop.version} - provided - - - - org.apache.hadoop - hadoop-yarn-client - ${flink.hadoop.version} - provided - - - - org.apache.hadoop - hadoop-mapreduce-client-core + hadoop-client-runtime ${flink.hadoop.version} provided @@ -351,14 +375,6 @@ com.google.guava guava - - io.netty - netty - - - io.netty - netty-all - com.google.protobuf protobuf-java @@ -440,14 +456,6 @@ com.google.guava guava - - io.netty - netty - - - io.netty - netty-all - com.google.protobuf protobuf-java @@ -473,10 +481,6 @@ com.google.guava guava - - io.netty - netty - javax.jms jms @@ -512,14 +516,6 @@ com.google.guava guava - - io.netty - netty - - - io.netty - netty-all - org.apache.logging.log4j log4j-slf4j-impl @@ -539,7 +535,7 @@ - com.klarna + io.github.hiverunner hiverunner ${hiverunner.version} test @@ -552,6 +548,16 @@ org.pentaho pentaho-aggdesigner-algorithm + + + io.netty + netty + + + + io.netty + netty-all + @@ -843,10 +849,10 @@ - + com.google org.apache.zeppelin.shaded.com.google @@ -1008,29 +1014,14 @@ - hive2 + hive3 true - 2.3.4 - 4.0.0 + 3.1.2 + 6.1.0 - - hive1 - - 1.2.1 - 3.2.1 - - - - org.apache.hadoop - hadoop-common - 2.7.5 - provided - - - diff --git a/flink/flink1.13-shims/pom.xml b/flink/flink1.13-shims/pom.xml index 8f4765ed8cc..1ad00463c3b 100644 --- a/flink/flink1.13-shims/pom.xml +++ b/flink/flink1.13-shims/pom.xml @@ -26,9 +26,7 @@ 4.0.0 - org.apache.zeppelin flink1.13-shims - 0.11.0-SNAPSHOT jar Zeppelin: Flink1.13 Shims diff --git a/flink/flink1.14-shims/pom.xml b/flink/flink1.14-shims/pom.xml index 8e7246f3d71..510f6e26363 100644 --- a/flink/flink1.14-shims/pom.xml +++ b/flink/flink1.14-shims/pom.xml @@ -26,9 +26,7 @@ 4.0.0 - org.apache.zeppelin flink1.14-shims - 0.11.0-SNAPSHOT jar Zeppelin: Flink1.14 Shims diff --git a/flink/flink1.15-shims/pom.xml b/flink/flink1.15-shims/pom.xml index 053c969c182..29d9adc618a 100644 --- a/flink/flink1.15-shims/pom.xml +++ b/flink/flink1.15-shims/pom.xml @@ -26,9 +26,7 @@ 4.0.0 - org.apache.zeppelin flink1.15-shims - 0.11.0-SNAPSHOT jar Zeppelin: Flink1.15 Shims diff --git a/flink/flink1.16-shims/pom.xml b/flink/flink1.16-shims/pom.xml index a94acd4d1e4..b92d76e08ae 100644 --- a/flink/flink1.16-shims/pom.xml +++ b/flink/flink1.16-shims/pom.xml @@ -26,9 +26,7 @@ 4.0.0 - org.apache.zeppelin flink1.16-shims - 0.11.0-SNAPSHOT jar Zeppelin: Flink1.16 Shims diff --git a/hbase/pom.xml b/hbase/pom.xml index c3b998a87f7..cdd07545f97 100644 --- a/hbase/pom.xml +++ b/hbase/pom.xml @@ -34,7 +34,7 @@ hbase 2.4.12 - ${hadoop2.7.version} + ${hadoop3.2.version} 1.6.8 2.5.0 2.12.1 diff --git a/jdbc/pom.xml b/jdbc/pom.xml index 4e9c4365adc..2e0d6d7087a 100644 --- a/jdbc/pom.xml +++ b/jdbc/pom.xml @@ -35,7 +35,7 @@ jdbc 42.4.3 - ${hadoop3.1.version} + ${hadoop3.2.version} 2.2.220 2.0.1 3.1.3 @@ -85,65 +85,11 @@ org.apache.hadoop - hadoop-client + hadoop-client-runtime ${hadoop.version} provided - - org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-server - - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - - org.apache.hive hive-jdbc @@ -173,18 +119,6 @@ - - org.apache.httpcomponents - httpcore - 4.4.1 - provided - - - org.apache.httpcomponents - httpclient - 4.5.13 - - net.jodah concurrentunit @@ -233,128 +167,6 @@ - - - jdbc-hadoop2 - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - ${hadoop-common.version} - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-server - - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - - - - - - jdbc-hadoop3 - - ${hadoop3.0.version} - - - - org.apache.hadoop - hadoop-common - ${hadoop-common.version} - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-server - - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - - - diff --git a/livy/pom.xml b/livy/pom.xml index 70743f438b4..ede0a103c00 100644 --- a/livy/pom.xml +++ b/livy/pom.xml @@ -41,7 +41,7 @@ 0.7.1-incubating 2.4.8 - ${hadoop2.7.version} + ${hadoop3.2.version} @@ -79,30 +79,6 @@ org.apache.spark spark-yarn_${scala.binary.version} - - org.apache.hadoop - hadoop-auth - - - org.apache.hadoop - hadoop-common - - - org.apache.hadoop - hadoop-hdfs - - - org.apache.hadoop - hadoop-yarn-client - - - org.apache.hadoop - hadoop-client - - - org.apache.hadoop - hadoop-yarn-server-tests - @@ -177,125 +153,11 @@ org.apache.hadoop - hadoop-auth - ${hadoop.version} - test - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-common - tests - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-hdfs + hadoop-client-runtime ${hadoop.version} test - - - io.netty - netty - - - com.google.guava - guava - - - - org.apache.hadoop - hadoop-hdfs - tests - ${hadoop.version} - test - - - io.netty - netty - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-yarn-client - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-yarn-api - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-yarn-server-tests - tests - ${hadoop.version} - test - - - com.google.guava - guava - - - diff --git a/pom.xml b/pom.xml index 5991a316f4b..e0a96ec21a4 100644 --- a/pom.xml +++ b/pom.xml @@ -102,8 +102,8 @@ ${java.version} ${java.version} - ${scala.2.11.version} - 2.11 + ${scala.2.12.version} + 2.12 2.11.12 2.12.17 3.0.7 @@ -139,17 +139,12 @@ 3.6.3 4.1.14 1.6.0 + 3.0.2 - 2.7.7 - 3.0.3 - 3.1.3 3.2.4 3.3.6 - ${hadoop2.7.version} + ${hadoop3.2.version} provided - hadoop-client - hadoop-yarn-api - hadoop-client 2.3.2 1.5.4 @@ -327,14 +322,6 @@ ${commons.configuration2.version} - - - commons-lang - commons-lang - 2.6 - - commons-codec commons-codec @@ -359,6 +346,13 @@ ${commons.cli.version} + + + com.google.code.findbugs + jsr305 + ${findbugs.jsr305.version} + + org.apache.shiro @@ -390,144 +384,9 @@ org.apache.hadoop - ${hadoop-client-api.artifact} + hadoop-client-api ${hadoop.version} ${hadoop.deps.scope} - - - org.apache.zookeeper - zookeeper - - - org.apache.hadoop - hadoop-common - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - io.netty - netty-all - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - com.fasterxml.jackson.core - jackson-annotations - - - com.nimbusds - nimbus-jose-jwt - - - org.eclipse.jetty - jetty-xml - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-util - - - commons-beanutils - commons-beanutils - - - org.apache.commons - commons-configuration2 - - - commons-beanutils - commons-beanutils-core - - - org.eclipse.jetty - jetty-webapp - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - - commons-logging - commons-logging - - @@ -535,48 +394,6 @@ hadoop-yarn-common ${hadoop.version} ${hadoop.deps.scope} - - - asm - asm - - - org.ow2.asm - asm - - - org.jboss.netty - netty - - - javax.servlet - servlet-api - - - commons-logging - commons-logging - - - com.sun.jersey - * - - - com.sun.jersey.jersey-test-framework - * - - - com.sun.jersey.contribs - * - - - com.google.guava - guava - - - org.apache.commons - commons-compress - - @@ -584,73 +401,6 @@ hadoop-yarn-client ${hadoop.version} ${hadoop.deps.scope} - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - - commons-logging - commons-logging - - @@ -658,73 +408,6 @@ hadoop-yarn-api ${hadoop.version} ${hadoop.deps.scope} - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - - commons-logging - commons-logging - - @@ -735,125 +418,6 @@ ${hadoop.version} tests test - - - org.apache.hadoop - hadoop-yarn-common - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-xc - - - org.codehaus.jackson - jackson-mapper-asl - - - com.google.guava - guava - - - javax.xml.bind - jaxb-api - - - com.fasterxml.jackson.core - jackson-core - - - org.eclipse.jetty - jetty-util - - - com.zaxxer - HikariCP-java7 - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - - - - commons-logging - commons-logging - - - io.dropwizard.metrics - metrics-core - - - com.google.guava - guava - - - com.fasterxml.jackson.core - jackson-databind - - @@ -861,137 +425,6 @@ hadoop-common ${hadoop.version} ${hadoop.deps.scope} - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - commons-beanutils - commons-beanutils - - - commons-beanutils - commons-beanutils-core - - - org.apache.commons - commons-configuration2 - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-server - - - com.nimbusds - nimbus-jose-jwt - - - com.fasterxml.jackson.core - jackson-databind - - - - commons-logging - commons-logging - - - org.ow2.asm - asm - - - com.jamesmurty.utils - java-xmlbuilder - - @@ -1000,129 +433,6 @@ ${hadoop.version} tests test - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - commons-beanutils - commons-beanutils - - - org.apache.commons - commons-configuration2 - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-server - - - com.nimbusds - nimbus-jose-jwt - - - com.fasterxml.jackson.core - jackson-databind - - - - commons-logging - commons-logging - - - org.ow2.asm - asm - - @@ -1177,192 +487,22 @@ org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - test - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - com.google.guava - guava - - - io.netty - netty-all - - - org.eclipse.jetty - jetty-util - - - com.fasterxml.jackson.core - jackson-annotations - - - - commons-logging - commons-logging - - - com.fasterxml.jackson.core - jackson-databind - - - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - tests - test - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - com.google.guava - guava - - - io.netty - netty-all - - - org.eclipse.jetty - jetty-util - - - com.fasterxml.jackson.core - jackson-annotations - - - - commons-logging - commons-logging - - - com.fasterxml.jackson.core - jackson-databind - - - - - - org.apache.hadoop - ${hadoop-client-runtime.artifact} + hadoop-client-runtime ${hadoop.version} ${hadoop.deps.scope} - commons-logging commons-logging - - - com.google.code.findbugs - jsr305 + commons-logging org.apache.hadoop - ${hadoop-client-minicluster.artifact} + hadoop-client-minicluster ${hadoop.version} test - @@ -1557,7 +697,7 @@ org.apache.maven.plugins maven-surefire-plugin ${plugin.surefire.version} - + -Xmx2g -Xms1g -Dfile.encoding=UTF-8 true diff --git a/rlang/pom.xml b/rlang/pom.xml index a291c63b51d..f9c819dcd91 100644 --- a/rlang/pom.xml +++ b/rlang/pom.xml @@ -116,18 +116,10 @@ org.apache.hadoop - hadoop-client + hadoop-client-runtime ${hadoop.version} compile - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - compile - - com.mashape.unirest unirest-java diff --git a/spark/interpreter/pom.xml b/spark/interpreter/pom.xml index f3686a6238e..a45b8a1a866 100644 --- a/spark/interpreter/pom.xml +++ b/spark/interpreter/pom.xml @@ -179,36 +179,6 @@ spark-core_${spark.scala.binary.version} ${spark.version} provided - - - org.apache.hadoop - hadoop-client - - - - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - provided - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - - com.google.protobuf - protobuf-java - - - commons-lang - commons-lang - - diff --git a/spark/spark-scala-parent/pom.xml b/spark/spark-scala-parent/pom.xml index 8675d9db01a..a1424de37fe 100644 --- a/spark/spark-scala-parent/pom.xml +++ b/spark/spark-scala-parent/pom.xml @@ -80,21 +80,6 @@ provided - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - provided - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - org.scala-lang scala-compiler diff --git a/submarine/pom.xml b/submarine/pom.xml index 55e6638af38..4d6ede76370 100644 --- a/submarine/pom.xml +++ b/submarine/pom.xml @@ -33,7 +33,7 @@ submarine - ${hadoop2.7.version} + ${hadoop3.2.version} 2.5.4 0.3.8 24.1.1-jre @@ -79,49 +79,8 @@ org.apache.hadoop - hadoop-common + hadoop-client-runtime ${hadoop.version} - - - org.apache.commons - commons-compress - - - com.google.guava - guava - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-xc - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-core-asl - - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - - - com.google.guava - guava - - - io.netty - netty - - com.hubspot.jinjava diff --git a/zeppelin-interpreter-integration/pom.xml b/zeppelin-interpreter-integration/pom.xml index 1eb815d59cd..0fed1688235 100644 --- a/zeppelin-interpreter-integration/pom.xml +++ b/zeppelin-interpreter-integration/pom.xml @@ -27,7 +27,6 @@ zeppelin-interpreter-integration - 0.11.0-SNAPSHOT jar Zeppelin: Interpreter Integration Test @@ -58,10 +57,6 @@ zeppelin-zengine ${project.version} - - com.google.guava - guava - org.ow2.asm asm @@ -69,12 +64,6 @@ - - com.google.guava - guava - 20.0 - - org.apache.zeppelin zeppelin-server @@ -99,12 +88,6 @@ ${project.version} tests test - - - com.google.guava - guava - - @@ -178,21 +161,19 @@ ${hadoop3.3.version} - hadoop-client-runtime - hadoop-client-minicluster org.apache.hadoop - ${hadoop-client-runtime.artifact} + hadoop-client-runtime test org.apache.hadoop - ${hadoop-client-minicluster.artifact} + hadoop-client-minicluster test diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java index 72dff00c9e4..343487c0cf6 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java @@ -18,6 +18,7 @@ package org.apache.zeppelin.integration; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.records.YarnApplicationState; @@ -55,7 +56,6 @@ public abstract class FlinkIntegrationTest { private static InterpreterSettingManager interpreterSettingManager; private String flinkVersion; - private String scalaVersion; private String hadoopHome; private String flinkHome; @@ -63,15 +63,15 @@ public void download(String flinkVersion, String scalaVersion) throws IOExceptio LOGGER.info("Testing FlinkVersion: " + flinkVersion); LOGGER.info("Testing ScalaVersion: " + scalaVersion); this.flinkVersion = flinkVersion; - this.scalaVersion = scalaVersion; this.flinkHome = DownloadUtils.downloadFlink(flinkVersion, scalaVersion); - this.hadoopHome = DownloadUtils.downloadHadoop("2.7.7"); + this.hadoopHome = DownloadUtils.downloadHadoop("3.2.4"); } @BeforeAll public static void setUp() throws IOException { Configuration conf = new Configuration(); conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, true); + conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, "target/hadoop-minicluster"); hadoopCluster = new MiniHadoopCluster(conf); hadoopCluster.start(); @@ -141,7 +141,6 @@ public void testLocalMode() throws IOException, YarnException, InterpreterExcept interpreterSettingManager.close(); } - // TODO(zjffdu) enable it when make yarn integration test work @Test public void testYarnMode() throws IOException, InterpreterException, YarnException { InterpreterSetting flinkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("flink"); diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java index dfd9be366da..27c511e64e1 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java @@ -17,7 +17,6 @@ package org.apache.zeppelin.integration; -import org.apache.zeppelin.interpreter.InterpreterSetting; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -25,16 +24,6 @@ public class SparkIntegrationTest32 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends SparkIntegrationTest { - - @BeforeEach - public void downloadSpark() throws IOException { - prepareSpark("3.2.0", "2.7"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends SparkIntegrationTest { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java index 73846fcf647..9183257184c 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java @@ -24,16 +24,6 @@ public class SparkIntegrationTest33 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends SparkIntegrationTest { - - @BeforeEach - public void downloadSpark() throws IOException { - prepareSpark("3.3.0", "2"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends SparkIntegrationTest { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java index 8af3ee6d5e7..d66bdad0536 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java @@ -22,17 +22,7 @@ import org.junit.jupiter.api.Nested; import java.io.IOException; -public class SparkIntegrationTest34 extends SparkIntegrationTest { - - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends SparkIntegrationTest { - - @BeforeEach - public void downloadSpark() throws IOException { - prepareSpark("3.4.0", "2"); - } - } +public class SparkIntegrationTest34 { @Nested @DisplayName("Hadoop3") diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java index ac1952494d4..c14e002650d 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java @@ -45,11 +45,9 @@ public abstract class ZeppelinFlinkClusterTest extends AbstractTestRestApi { private static final Logger LOGGER = LoggerFactory.getLogger(ZeppelinFlinkClusterTest.class); - private String flinkVersion; private String flinkHome; public void download(String flinkVersion, String scalaVersion) { - this.flinkVersion = flinkVersion; LOGGER.info("Testing FlinkVersion: " + flinkVersion); LOGGER.info("Testing ScalaVersion: " + scalaVersion); this.flinkHome = DownloadUtils.downloadFlink(flinkVersion, scalaVersion); diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java index 18d5b701777..1f1b7692450 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java @@ -23,16 +23,6 @@ public class ZeppelinSparkClusterTest32 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends ZeppelinSparkClusterTest { - - @BeforeEach - public void downloadSpark() throws Exception { - prepareSpark("3.2.0", "2.7"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends ZeppelinSparkClusterTest { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java index dbfacd9e400..43eb620f2d6 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java @@ -23,16 +23,6 @@ public class ZeppelinSparkClusterTest33 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends ZeppelinSparkClusterTest { - - @BeforeEach - public void downloadSpark() throws Exception { - prepareSpark("3.3.0", "2"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends ZeppelinSparkClusterTest { diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml index f9104498c42..19faaec0306 100644 --- a/zeppelin-interpreter/pom.xml +++ b/zeppelin-interpreter/pom.xml @@ -248,58 +248,19 @@ - hadoop2 - + hadoop3 true - - ${hadoop2.7.version} - hadoop-client - hadoop-yarn-api - hadoop-client - - - - - org.apache.hadoop - hadoop-common - - - - org.apache.hadoop - hadoop-yarn-client - - - - - - hadoop3 - ${hadoop3.2.version} - hadoop-client-api - hadoop-client-runtime - hadoop-client-minicluster org.apache.hadoop - ${hadoop-client-runtime.artifact} - - - - org.apache.hadoop - ${hadoop-client-minicluster.artifact} - test - - - junit - junit - - + hadoop-client-runtime diff --git a/zeppelin-plugins/launcher/yarn/pom.xml b/zeppelin-plugins/launcher/yarn/pom.xml index d3e3f8265d4..72504c96aff 100644 --- a/zeppelin-plugins/launcher/yarn/pom.xml +++ b/zeppelin-plugins/launcher/yarn/pom.xml @@ -73,38 +73,12 @@ - hadoop2 - + hadoop3 true - - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - provided - ${hadoop.version} - - - - org.apache.hadoop - hadoop-client - provided - ${hadoop.version} - - - - - - hadoop3 - ${hadoop3.2.version} - hadoop-client-runtime diff --git a/zeppelin-plugins/notebookrepo/filesystem/pom.xml b/zeppelin-plugins/notebookrepo/filesystem/pom.xml index baa7ae743f6..6a0711fc204 100644 --- a/zeppelin-plugins/notebookrepo/filesystem/pom.xml +++ b/zeppelin-plugins/notebookrepo/filesystem/pom.xml @@ -49,42 +49,15 @@ - hadoop2 + hadoop3 true - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - provided - ${hadoop.version} - - - org.apache.hadoop - hadoop-client - provided - ${hadoop.version} - - - - - - hadoop3 ${hadoop3.2.version} - - org.apache.hadoop - hadoop-client - provided - ${hadoop.version} - org.apache.hadoop hadoop-client-runtime @@ -100,85 +73,10 @@ - - hadoop2-azure - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-azure - ${hadoop.version} - - - com.fasterxml.jackson.core - jackson-core - - - com.google.guava - guava - - - org.apache.commons - commons-lang3 - - - com.jcraf - jsch - - - org.apache.commons - commons-compress - - - - - com.microsoft.azure - azure-data-lake-store-sdk - ${adl.sdk.version} - - - com.fasterxml.jackson.core - jackson-core - - - - - - - - hadoop2-aws - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-aws - ${hadoop.version} - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - - - - hadoop3-azure - ${hadoop3.0.version} + ${hadoop3.2.version} @@ -257,7 +155,7 @@ hadoop3-aws - ${hadoop3.0.version} + ${hadoop3.2.version} diff --git a/zeppelin-plugins/notebookrepo/s3/pom.xml b/zeppelin-plugins/notebookrepo/s3/pom.xml index d85aa3fa118..6d1469693ae 100644 --- a/zeppelin-plugins/notebookrepo/s3/pom.xml +++ b/zeppelin-plugins/notebookrepo/s3/pom.xml @@ -66,30 +66,6 @@ - - - com.google.inject - guice - 5.0.1 - - - - org.apache.hadoop - hadoop-yarn-client - ${hadoop2.7.version} - - - javax.xml.bind - jaxb-api - - - diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml index c39b1a2a4a9..e45de961d60 100644 --- a/zeppelin-server/pom.xml +++ b/zeppelin-server/pom.xml @@ -409,7 +409,7 @@ maven-surefire-plugin - + 1 false -Xmx3g -Xms1g -Dfile.encoding=UTF-8 diff --git a/zeppelin-zengine/pom.xml b/zeppelin-zengine/pom.xml index 9cd879c815b..d3257297dbe 100644 --- a/zeppelin-zengine/pom.xml +++ b/zeppelin-zengine/pom.xml @@ -38,10 +38,22 @@ 0.9.8 1.4.01 2.6.0 + + 2.9.8 4.5.4.201711221230-r 1.6 + + + + com.fasterxml.jackson.core + jackson-annotations + ${jackson.annocations.version} + + + + ${project.groupId} @@ -198,10 +210,7 @@ commons-vfs2 ${commons.vfs2.version} - - org.codehaus.plexus - plexus-utils - + org.apache.hadoop hadoop-hdfs-client @@ -308,56 +317,18 @@ - hadoop2 - + hadoop3 true - - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - - - - org.apache.hadoop - hadoop-yarn-client - - - - - - hadoop3 - ${hadoop3.2.version} - hadoop-client-api - hadoop-client-runtime - hadoop-client-minicluster - org.apache.hadoop - ${hadoop-client-runtime.artifact} - - - - org.apache.hadoop - ${hadoop-client-minicluster.artifact} - test - ${hadoop.version} - - - junit - junit - - + hadoop-client-runtime diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/DownloadUtils.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/DownloadUtils.java index bf07fcf28a8..9c6c4677fbd 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/DownloadUtils.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/DownloadUtils.java @@ -80,9 +80,6 @@ public static String downloadFlink(String flinkVersion, String scalaVersion) { runShellCommand(new String[]{"wget", "https://repo1.maven.org/maven2/org/apache/hive/hive-exec/2.3.4/hive-exec-2.3.4.jar", "-P", targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"wget", - "https://repo1.maven.org/maven2/org/apache/flink/flink-shaded-hadoop2-uber/2.7.5-1.8.1/flink-shaded-hadoop2-uber-2.7.5-1.8.1.jar", - "-P", targetFlinkHomeFolder + "/lib"}); } catch (Exception e) { throw new RuntimeException("Fail to download jar", e); }