Skip to content

Commit d11cf79

Browse files
committed
Support python3.11
1 parent c5ad27b commit d11cf79

File tree

3 files changed

+18
-12
lines changed

3 files changed

+18
-12
lines changed

.github/workflows/main.yml

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -53,22 +53,26 @@ jobs:
5353
standalone_spark3_with_S3:
5454
runs-on: ubuntu-latest
5555

56+
strategy:
57+
matrix:
58+
version: [ {python: 3.9, spark: 3.3.1}, {python: 3.11, spark: 3.4.2} ]
59+
5660
steps:
5761
- uses: actions/checkout@v2
5862

5963
- name: Build spark-docker
60-
run: docker build -t spark-docker ./examples/spark-with-S3 --build-arg SPARK_INPUT_VERSION=3.3.1 --build-arg PYTHON_VERSION=3.9.15
64+
run: docker build -t spark-docker ./examples/spark-with-S3 --build-arg SPARK_INPUT_VERSION=${{ matrix.version.spark }} --build-arg PYTHON_VERSION=${{ matrix.version.python }}
6165

6266
- name: Build the docker-compose stack
6367
run: |
64-
export PYTHON_VERSION=3.9
68+
export PYTHON_VERSION=${{ matrix.version.python }}
6569
docker compose -f ./examples/spark-with-S3/docker-compose.yml up -d
6670
6771
- name: Check running containers
6872
run: docker ps -a
6973

7074
- name: Run spark Job
71-
run: docker exec spark-master ./examples/spark-with-S3/scripts/run_spark_example.sh python3.9 3.3.1
75+
run: docker exec spark-master ./examples/spark-with-S3/scripts/run_spark_example.sh python${{ matrix.version.python }} ${{ matrix.version.spark }}
7276

7377
# hadoop_hdfs:
7478
# runs-on: ubuntu-latest

examples/spark-with-S3/Dockerfile

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,20 @@ FROM debian:bookworm
44

55
RUN apt-get update
66
ARG SPARK_INPUT_VERSION=3.3.1
7-
ARG PYTHON_VERSION=3.9.15
7+
ARG PYTHON_VERSION=3.9
88

99
# Python
1010
RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \
1111
libreadline-dev libsqlite3-dev wget curl unzip llvm libncurses5-dev libncursesw5-dev \
12-
xz-utils tk-dev libffi-dev liblzma-dev
13-
RUN curl https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz --output /usr/python.tgz
14-
RUN tar xf /usr/python.tgz -C /usr && rm /usr/python.tgz
15-
RUN cd /usr/Python-${PYTHON_VERSION} \
16-
&& ./configure --enable-shared \
17-
&& make -j8 \
18-
&& make altinstall \
19-
&& cd -
12+
xz-utils tk-dev libffi-dev liblzma-dev git
13+
RUN curl https://pyenv.run | bash
14+
15+
ENV PATH=/root/.pyenv/bin:$PATH
16+
17+
RUN eval "$(pyenv init -)"
18+
19+
RUN pyenv install ${PYTHON_VERSION}
20+
RUN pyenv global ${PYTHON_VERSION}
2021

2122
# Java
2223
RUN apt-get install -y openjdk-17-jre \

examples/spark-with-S3/scripts/run_spark_example.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
#!/bin/bash
2+
eval "$(pyenv init -)"
23
rm -rf /tmp/pyspark_env
34
$1 -m venv /tmp/pyspark_env
45
. /tmp/pyspark_env/bin/activate

0 commit comments

Comments
 (0)