Skip to content

Commit 50ca277

Browse files
committed
Bump Spark to 3.5.5
1 parent 9f9159f commit 50ca277

File tree

7 files changed

+114
-62
lines changed

7 files changed

+114
-62
lines changed

.github/workflows/util/install_spark_resources.sh

+10-10
Original file line numberDiff line numberDiff line change
@@ -63,26 +63,26 @@ case "$1" in
6363
3.5)
6464
# Spark-3.5
6565
cd ${INSTALL_DIR} && \
66-
wget -nv https://archive.apache.org/dist/spark/spark-3.5.2/spark-3.5.2-bin-hadoop3.tgz && \
67-
tar --strip-components=1 -xf spark-3.5.2-bin-hadoop3.tgz spark-3.5.2-bin-hadoop3/jars/ && \
68-
rm -rf spark-3.5.2-bin-hadoop3.tgz && \
66+
wget -nv https://archive.apache.org/dist/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz && \
67+
tar --strip-components=1 -xf spark-3.5.5-bin-hadoop3.tgz spark-3.5.5-bin-hadoop3/jars/ && \
68+
rm -rf spark-3.5.5-bin-hadoop3.tgz && \
6969
mkdir -p ${INSTALL_DIR}/shims/spark35/spark_home/assembly/target/scala-2.12 && \
7070
mv jars ${INSTALL_DIR}/shims/spark35/spark_home/assembly/target/scala-2.12 && \
71-
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.2.tar.gz && \
72-
tar --strip-components=1 -xf v3.5.2.tar.gz spark-3.5.2/sql/core/src/test/resources/ && \
71+
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.5.tar.gz && \
72+
tar --strip-components=1 -xf v3.5.5.tar.gz spark-3.5.5/sql/core/src/test/resources/ && \
7373
mkdir -p shims/spark35/spark_home/ && \
7474
mv sql shims/spark35/spark_home/
7575
;;
7676
3.5-scala2.13)
7777
# Spark-3.5, scala 2.13
7878
cd ${INSTALL_DIR} && \
79-
wget -nv https://archive.apache.org/dist/spark/spark-3.5.2/spark-3.5.2-bin-hadoop3.tgz && \
80-
tar --strip-components=1 -xf spark-3.5.2-bin-hadoop3.tgz spark-3.5.2-bin-hadoop3/jars/ && \
81-
rm -rf spark-3.5.2-bin-hadoop3.tgz && \
79+
wget -nv https://archive.apache.org/dist/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz && \
80+
tar --strip-components=1 -xf spark-3.5.5-bin-hadoop3.tgz spark-3.5.5-bin-hadoop3/jars/ && \
81+
rm -rf spark-3.5.5-bin-hadoop3.tgz && \
8282
mkdir -p ${INSTALL_DIR}/shims/spark35-scala2.13/spark_home/assembly/target/scala-2.13 && \
8383
mv jars ${INSTALL_DIR}/shims/spark35-scala2.13/spark_home/assembly/target/scala-2.13 && \
84-
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.2.tar.gz && \
85-
tar --strip-components=1 -xf v3.5.2.tar.gz spark-3.5.2/sql/core/src/test/resources/ && \
84+
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.5.tar.gz && \
85+
tar --strip-components=1 -xf v3.5.5.tar.gz spark-3.5.5/sql/core/src/test/resources/ && \
8686
mkdir -p shims/spark35-scala2.13/spark_home/ && \
8787
mv sql shims/spark35-scala2.13/spark_home/
8888
;;

.github/workflows/velox_backend.yml

+97-45
Original file line numberDiff line numberDiff line change
@@ -616,7 +616,7 @@ jobs:
616616
with:
617617
name: arrow-jars-centos-7-${{github.sha}}
618618
path: /root/.m2/repository/org/apache/arrow/
619-
- name: Prepare
619+
- name: Prepare Python3.9 and PySpark 3.2.2
620620
run: |
621621
dnf module -y install python39 && \
622622
alternatives --set python3 /usr/bin/python3.9 && \
@@ -687,7 +687,7 @@ jobs:
687687
with:
688688
name: arrow-jars-centos-7-${{github.sha}}
689689
path: /root/.m2/repository/org/apache/arrow/
690-
- name: Prepare
690+
- name: Prepare Python3.9 and PySpark 3.3.1
691691
run: |
692692
dnf module -y install python39 && \
693693
alternatives --set python3 /usr/bin/python3.9 && \
@@ -823,7 +823,7 @@ jobs:
823823
ls -l /opt/shims/spark34/spark_home/
824824
$MVN_CMD clean test -Pspark-3.4 -Pjava-8 -Pbackends-velox -Pceleborn -Pdelta -Phudi -Pspark-ut \
825825
-DtagsToExclude=org.apache.spark.tags.ExtendedSQLTest,org.apache.gluten.tags.UDFTest,org.apache.gluten.tags.SkipTestTags \
826-
-DargLine="-Dspark.test.home=/opt/shims/spark34/spark_home/"
826+
-DargLine="-Dspark.test.home=/opt/shims/spark34/spark_home/"
827827
- name: Upload test report
828828
if: always()
829829
uses: actions/upload-artifact@v4
@@ -892,15 +892,15 @@ jobs:
892892
ls -l /opt/shims/spark34/spark_home/
893893
$MVN_CMD clean test -Pspark-3.4 -Pjava-8 -Pbackends-velox -Pceleborn -Pdelta -Pspark-ut -Phudi \
894894
-DtagsToInclude=org.apache.spark.tags.ExtendedSQLTest \
895-
-DargLine="-Dspark.test.home=/opt/shims/spark34/spark_home/"
895+
-DargLine="-Dspark.test.home=/opt/shims/spark34/spark_home/"
896896
- name: Upload test report
897897
if: always()
898898
uses: actions/upload-artifact@v4
899899
with:
900900
name: test-report-spark34-slow-jdk8
901901
path: '**/surefire-reports/TEST-*.xml'
902902

903-
run-spark-test-spark35:
903+
run-spark-test-spark35-jdk8:
904904
needs: build-native-lib-centos-7
905905
runs-on: ubuntu-20.04
906906
container: apache/gluten:centos-8
@@ -916,34 +916,34 @@ jobs:
916916
with:
917917
name: arrow-jars-centos-7-${{github.sha}}
918918
path: /root/.m2/repository/org/apache/arrow/
919-
- name: Prepare
919+
- name: Prepare Python3.9 and PySpark 3.5.5
920920
run: |
921921
dnf module -y install python39 && \
922922
alternatives --set python3 /usr/bin/python3.9 && \
923923
pip3 install setuptools && \
924-
pip3 install pyspark==3.5.2 cython && \
924+
pip3 install pyspark==3.5.5 cython && \
925925
pip3 install pandas pyarrow
926-
- name: Build and Run unit test for Spark 3.5.2 (other tests)
926+
- name: Build and Run unit test for Spark 3.5.5 (other tests)
927927
run: |
928928
cd $GITHUB_WORKSPACE/
929929
export SPARK_SCALA_VERSION=2.12
930-
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Phudi -Pspark-ut \
930+
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Pdelta -Phudi -Pspark-ut \
931931
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/" \
932932
-DtagsToExclude=org.apache.spark.tags.ExtendedSQLTest,org.apache.gluten.tags.UDFTest,org.apache.gluten.tags.SkipTestTags
933933
- name: Upload test report
934934
if: always()
935935
uses: actions/upload-artifact@v4
936936
with:
937-
name: test-report-spark35
937+
name: test-report-spark35-jdk8
938938
path: '**/surefire-reports/TEST-*.xml'
939939
- name: Upload golden files
940940
if: failure()
941941
uses: actions/upload-artifact@v4
942942
with:
943-
name: golden-files-spark35
943+
name: golden-files-spark35-jdk8
944944
path: /tmp/tpch-approved-plan/**
945945

946-
run-spark-test-spark35-jdk17:
946+
run-spark-test-spark35:
947947
needs: build-native-lib-centos-7
948948
runs-on: ubuntu-20.04
949949
container: apache/gluten:centos-8-jdk17
@@ -978,7 +978,7 @@ jobs:
978978
if: always()
979979
uses: actions/upload-artifact@v4
980980
with:
981-
name: test-report-spark35-jdk17
981+
name: test-report-spark35
982982
path: '**/surefire-reports/TEST-*.xml'
983983
- name: Upload golden files
984984
if: failure()
@@ -990,7 +990,7 @@ jobs:
990990
run-spark-test-spark35-scala213:
991991
needs: build-native-lib-centos-7
992992
runs-on: ubuntu-20.04
993-
container: apache/gluten:centos-8
993+
container: apache/gluten:centos-8-jdk17
994994
steps:
995995
- uses: actions/checkout@v2
996996
- name: Download All Artifacts
@@ -1003,19 +1003,20 @@ jobs:
10031003
with:
10041004
name: arrow-jars-centos-7-${{github.sha}}
10051005
path: /root/.m2/repository/org/apache/arrow/
1006-
- name: Prepare
1006+
- name: Prepare Python3.9 and PySpark 3.5.5
10071007
run: |
10081008
dnf module -y install python39 && \
10091009
alternatives --set python3 /usr/bin/python3.9 && \
10101010
pip3 install setuptools && \
1011-
pip3 install pyspark==3.5.2 cython && \
1011+
pip3 install pyspark==3.5.5 cython && \
10121012
pip3 install pandas pyarrow
1013-
- name: Build and Run unit test for Spark 3.5.2 with scala-2.13 (other tests)
1013+
- name: Build and Run unit test for Spark 3.5.5 with scala-2.13 (other tests)
10141014
run: |
10151015
cd $GITHUB_WORKSPACE/
10161016
export SPARK_SCALA_VERSION=2.13
1017-
$MVN_CMD clean test -Pspark-3.5 -Pscala-2.13 -Pbackends-velox -Pceleborn -Piceberg \
1018-
-Pdelta -Pspark-ut -DargLine="-Dspark.test.home=/opt/shims/spark35-scala2.13/spark_home/" \
1017+
export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1018+
$MVN_CMD clean test -Pspark-3.5 -Pscala-2.13 -Pjava-17 -Pbackends-velox -Pceleborn -Piceberg \
1019+
-Pdelta -Pspark-ut -DargLine="-Dspark.test.home=/opt/shims/spark35-scala2.13/spark_home/ ${EXTRA_FLAGS}" \
10191020
-DtagsToExclude=org.apache.spark.tags.ExtendedSQLTest,org.apache.gluten.tags.UDFTest,org.apache.gluten.tags.SkipTestTags
10201021
- name: Upload test report
10211022
if: always()
@@ -1024,7 +1025,7 @@ jobs:
10241025
name: test-report-spark35-scala213
10251026
path: '**/surefire-reports/TEST-*.xml'
10261027

1027-
run-spark-test-spark35-slow:
1028+
run-spark-test-spark35-slow-jdk8:
10281029
needs: build-native-lib-centos-7
10291030
runs-on: ubuntu-20.04
10301031
container: apache/gluten:centos-8
@@ -1040,12 +1041,50 @@ jobs:
10401041
with:
10411042
name: arrow-jars-centos-7-${{github.sha}}
10421043
path: /root/.m2/repository/org/apache/arrow/
1043-
- name: Build and Run unit test for Spark 3.5.2 (slow tests)
1044+
- name: Prepare Spark Resources for Spark 3.5.5
1045+
run: |
1046+
rm -rf /opt/shims/spark35
1047+
bash .github/workflows/util/install_spark_resources.sh 3.5
1048+
- name: Build and Run unit test for Spark 3.5.5 (slow tests)
10441049
run: |
10451050
cd $GITHUB_WORKSPACE/
1046-
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Phudi -Pspark-ut \
1051+
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Pdelta -Phudi -Pspark-ut \
10471052
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/" \
10481053
-DtagsToInclude=org.apache.spark.tags.ExtendedSQLTest
1054+
- name: Upload test report
1055+
if: always()
1056+
uses: actions/upload-artifact@v4
1057+
with:
1058+
name: test-report-spark35-slow-jdk8
1059+
path: '**/surefire-reports/TEST-*.xml'
1060+
1061+
run-spark-test-spark35-slow:
1062+
needs: build-native-lib-centos-7
1063+
runs-on: ubuntu-20.04
1064+
container: apache/gluten:centos-8-jdk17
1065+
steps:
1066+
- uses: actions/checkout@v2
1067+
- name: Download All Artifacts
1068+
uses: actions/download-artifact@v4
1069+
with:
1070+
name: velox-native-lib-centos-7-${{github.sha}}
1071+
path: ./cpp/build/releases
1072+
- name: Download Arrow Jars
1073+
uses: actions/download-artifact@v4
1074+
with:
1075+
name: arrow-jars-centos-7-${{github.sha}}
1076+
path: /root/.m2/repository/org/apache/arrow/
1077+
- name: Prepare Spark Resources for Spark 3.5.5
1078+
run: |
1079+
rm -rf /opt/shims/spark35
1080+
bash .github/workflows/util/install_spark_resources.sh 3.5
1081+
- name: Build and Run unit test for Spark 3.5.5 (slow tests)
1082+
run: |
1083+
cd $GITHUB_WORKSPACE/
1084+
export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1085+
$MVN_CMD clean test -Pspark-3.5 -Pjava-17 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Phudi -Pspark-ut \
1086+
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ ${EXTRA_FLAGS}" \
1087+
-DtagsToInclude=org.apache.spark.tags.ExtendedSQLTest
10491088
- name: Upload test report
10501089
if: always()
10511090
uses: actions/upload-artifact@v4
@@ -1056,7 +1095,7 @@ jobs:
10561095
run-spark-test-spark35-ras:
10571096
needs: build-native-lib-centos-7
10581097
runs-on: ubuntu-20.04
1059-
container: apache/gluten:centos-8
1098+
container: apache/gluten:centos-8-jdk17
10601099
steps:
10611100
- uses: actions/checkout@v2
10621101
- name: Download All Artifacts
@@ -1069,19 +1108,20 @@ jobs:
10691108
with:
10701109
name: arrow-jars-centos-7-${{github.sha}}
10711110
path: /root/.m2/repository/org/apache/arrow/
1072-
- name: Prepare
1111+
- name: Prepare Python3.9 and PySpark 3.5.5
10731112
run: |
10741113
dnf module -y install python39 && \
10751114
alternatives --set python3 /usr/bin/python3.9 && \
10761115
pip3 install setuptools && \
1077-
pip3 install pyspark==3.5.2 cython && \
1116+
pip3 install pyspark==3.5.5 cython && \
10781117
pip3 install pandas pyarrow
1079-
- name: Build and Run unit test for Spark 3.5.2 (other tests)
1118+
- name: Build and Run unit test for Spark 3.5.5 (other tests)
10801119
run: |
10811120
cd $GITHUB_WORKSPACE/
10821121
export SPARK_SCALA_VERSION=2.12
1083-
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1084-
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.ras.enabled=true" \
1122+
export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1123+
$MVN_CMD clean test -Pspark-3.5 -Pjava-17 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1124+
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.ras.enabled=true ${EXTRA_FLAGS}" \
10851125
-DtagsToExclude=org.apache.spark.tags.ExtendedSQLTest,org.apache.gluten.tags.UDFTest,org.apache.gluten.tags.SkipTestTags
10861126
- name: Upload test report
10871127
uses: actions/upload-artifact@v4
@@ -1092,7 +1132,7 @@ jobs:
10921132
run-spark-test-spark35-slow-ras:
10931133
needs: build-native-lib-centos-7
10941134
runs-on: ubuntu-20.04
1095-
container: apache/gluten:centos-8
1135+
container: apache/gluten:centos-8-jdk17
10961136
steps:
10971137
- uses: actions/checkout@v2
10981138
- name: Download All Artifacts
@@ -1105,11 +1145,16 @@ jobs:
11051145
with:
11061146
name: arrow-jars-centos-7-${{github.sha}}
11071147
path: /root/.m2/repository/org/apache/arrow/
1108-
- name: Build and Run unit test for Spark 3.5.2 (slow tests)
1148+
- name: Prepare Spark Resources for Spark 3.5.5
1149+
run: |
1150+
rm -rf /opt/shims/spark35
1151+
bash .github/workflows/util/install_spark_resources.sh 3.5
1152+
- name: Build and Run unit test for Spark 3.5.5 (slow tests)
11091153
run: |
11101154
cd $GITHUB_WORKSPACE/
1111-
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1112-
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.ras.enabled=true" \
1155+
export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1156+
$MVN_CMD clean test -Pspark-3.5 -Pjava-17 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1157+
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.ras.enabled=true ${EXTRA_FLAGS}" \
11131158
-DtagsToInclude=org.apache.spark.tags.ExtendedSQLTest
11141159
- name: Upload test report
11151160
uses: actions/upload-artifact@v4
@@ -1120,7 +1165,7 @@ jobs:
11201165
run-spark-test-spark35-smj:
11211166
needs: build-native-lib-centos-7
11221167
runs-on: ubuntu-20.04
1123-
container: apache/gluten:centos-8
1168+
container: apache/gluten:centos-8-jdk17
11241169
steps:
11251170
- uses: actions/checkout@v2
11261171
- name: Download All Artifacts
@@ -1133,19 +1178,20 @@ jobs:
11331178
with:
11341179
name: arrow-jars-centos-7-${{github.sha}}
11351180
path: /root/.m2/repository/org/apache/arrow/
1136-
- name: Prepare
1181+
- name: Prepare Python3.9 and PySpark 3.5.5
11371182
run: |
11381183
dnf module -y install python39 && \
11391184
alternatives --set python3 /usr/bin/python3.9 && \
11401185
pip3 install setuptools && \
1141-
pip3 install pyspark==3.5.2 cython && \
1186+
pip3 install pyspark==3.5.5 cython && \
11421187
pip3 install pandas pyarrow
1143-
- name: Build and Run unit test for Spark 3.5.2 (other tests)
1188+
- name: Build and Run unit test for Spark 3.5.5 (other tests)
11441189
run: |
11451190
cd $GITHUB_WORKSPACE/
11461191
export SPARK_SCALA_VERSION=2.12
1147-
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1148-
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.sql.columnar.forceShuffledHashJoin=false" \
1192+
export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1193+
$MVN_CMD clean test -Pspark-3.5 -Pjava-17 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1194+
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.sql.columnar.forceShuffledHashJoin=false ${EXTRA_FLAGS}" \
11491195
-DtagsToExclude=org.apache.spark.tags.ExtendedSQLTest,org.apache.gluten.tags.UDFTest,org.apache.gluten.tags.SkipTestTags
11501196
- name: Upload test report
11511197
uses: actions/upload-artifact@v4
@@ -1156,7 +1202,7 @@ jobs:
11561202
run-spark-test-spark35-slow-smj:
11571203
needs: build-native-lib-centos-7
11581204
runs-on: ubuntu-20.04
1159-
container: apache/gluten:centos-8
1205+
container: apache/gluten:centos-8-jdk17
11601206
steps:
11611207
- uses: actions/checkout@v2
11621208
- name: Download All Artifacts
@@ -1169,11 +1215,16 @@ jobs:
11691215
with:
11701216
name: arrow-jars-centos-7-${{github.sha}}
11711217
path: /root/.m2/repository/org/apache/arrow/
1172-
- name: Build and Run unit test for Spark 3.5.2 (slow tests)
1218+
- name: Prepare Spark Resources for Spark 3.5.5
1219+
run: |
1220+
rm -rf /opt/shims/spark35
1221+
bash .github/workflows/util/install_spark_resources.sh 3.5
1222+
- name: Build and Run unit test for Spark 3.5.5 (slow tests)
11731223
run: |
11741224
cd $GITHUB_WORKSPACE/
1175-
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1176-
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.sql.columnar.forceShuffledHashJoin=false" \
1225+
export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1226+
$MVN_CMD clean test -Pspark-3.5 -Pjava-17 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
1227+
-DargLine="-Dspark.test.home=/opt/shims/spark35/spark_home/ -Dspark.gluten.sql.columnar.forceShuffledHashJoin=false ${EXTRA_FLAGS}" \
11771228
-DtagsToInclude=org.apache.spark.tags.ExtendedSQLTest
11781229
- name: Upload test report
11791230
uses: actions/upload-artifact@v4
@@ -1183,7 +1234,7 @@ jobs:
11831234

11841235
run-cpp-test-udf-test:
11851236
runs-on: ubuntu-20.04
1186-
container: apache/gluten:centos-8
1237+
container: apache/gluten:centos-8-jdk17
11871238
steps:
11881239
- uses: actions/checkout@v2
11891240
- name: Get Ccache
@@ -1219,5 +1270,6 @@ jobs:
12191270
- name: Run UDF test
12201271
run: |
12211272
# Depends on --build_example=ON.
1222-
$MVN_CMD test -Pspark-3.5 -Pbackends-velox -Piceberg -Pdelta -DtagsToExclude=None \
1223-
-DtagsToInclude=org.apache.gluten.tags.UDFTest
1273+
export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1274+
$MVN_CMD test -Pspark-3.5 -Pjava-17 -Pbackends-velox -Piceberg -Pdelta -DtagsToExclude=None \
1275+
-DtagsToInclude=org.apache.gluten.tags.UDFTest -DargLine="${EXTRA_FLAGS}"

docs/get-started/Velox.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ parent: Getting-Started
99

1010
| Type | Version |
1111
|-------|------------------------------|
12-
| Spark | 3.2.2, 3.3.1, 3.4.4, 3.5.2 |
12+
| Spark | 3.2.2, 3.3.1, 3.4.4, 3.5.5 |
1313
| OS | Ubuntu20.04/22.04, Centos7/8 |
1414
| jdk | openjdk8/jdk17 |
1515
| scala | 2.12 |
@@ -18,7 +18,7 @@ parent: Getting-Started
1818

1919
Currently, with static build Gluten+Velox backend supports all the Linux OSes, but is only tested on **Ubuntu20.04/Ubuntu22.04/Centos7/Centos8**. With dynamic build, Gluten+Velox backend support **Ubuntu20.04/Ubuntu22.04/Centos7/Centos8** and their variants.
2020

21-
Currently, the officially supported Spark versions are 3.2.2, 3.3.1, 3.4.4 and 3.5.2.
21+
Currently, the officially supported Spark versions are 3.2.2, 3.3.1, 3.4.4 and 3.5.5.
2222

2323
We need to set up the `JAVA_HOME` env. Currently, Gluten supports **java 8** and **java 17**.
2424

docs/get-started/build-guide.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -74,4 +74,4 @@ It's name pattern is `gluten-<backend_type>-bundle-spark<spark.bundle.version>_<
7474
| 3.2.2 | 3.2 | 2.12 |
7575
| 3.3.1 | 3.3 | 2.12 |
7676
| 3.4.4 | 3.4 | 2.12 |
77-
| 3.5.2 | 3.5 | 2.12 |
77+
| 3.5.5 | 3.5 | 2.12 |

0 commit comments

Comments
 (0)