docker.io/apache/spark:3.4.2-python3 linux/amd64

docker.io/apache/spark:3.4.2-python3 - 国内下载镜像源 浏览次数:22 安全受验证的发布者-apache

Apache Spark 镜像

该镜像包含 Apache Spark 的预构建环境,可用于运行 Spark 作业和应用程序。它提供了 Spark 的核心组件,包括:

  • Spark Core
  • Spark SQL
  • Spark Streaming
  • Spark MLlib
  • Spark GraphX

使用该镜像可以快速开始使用 Spark,无需手动安装和配置。它是一个易于使用且可靠的解决方案,适用于各种 Spark 应用场景。

源镜像 docker.io/apache/spark:3.4.2-python3
国内镜像 swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3
镜像ID sha256:14003ecb13485e9135564d907e8cb6bd0bf2e5dcf55339bfd2a166d8079dba54
镜像TAG 3.4.2-python3
大小 973.14MB
镜像源 docker.io
项目信息 Docker-Hub主页 🚀项目TAG 🚀
CMD
启动入口 /opt/entrypoint.sh
工作目录 /opt/spark/work-dir
OS/平台 linux/amd64
浏览量 22 次
贡献者
镜像创建 2023-12-02T13:32:34.326926817Z
同步时间 2025-10-21 15:46
更新时间 2025-10-22 09:20
环境变量
PATH=/opt/java/openjdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin JAVA_HOME=/opt/java/openjdk LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 JAVA_VERSION=jdk-11.0.21+9 SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.4.2/spark-3.4.2-bin-hadoop3.tgz SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.4.2/spark-3.4.2-bin-hadoop3.tgz.asc GPG_KEY=F28C9C925C188C35E345614DEDA00CE834F0FC5C SPARK_HOME=/opt/spark
镜像标签
ubuntu: org.opencontainers.image.ref.name 20.04: org.opencontainers.image.version

Docker拉取命令

docker pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3
docker tag  swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3  docker.io/apache/spark:3.4.2-python3

Containerd拉取命令

ctr images pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3
ctr images tag  swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3  docker.io/apache/spark:3.4.2-python3

Shell快速替换命令

sed -i 's#apache/spark:3.4.2-python3#swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3#' deployment.yaml

Ansible快速分发-Docker

#ansible k8s -m shell -a 'docker pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3 && docker tag  swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3  docker.io/apache/spark:3.4.2-python3'

Ansible快速分发-Containerd

#ansible k8s -m shell -a 'ctr images pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3 && ctr images tag  swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3  docker.io/apache/spark:3.4.2-python3'

镜像构建历史


# 2023-12-02 21:32:34  0.00B 指定运行容器时使用的用户
USER spark
                        
# 2023-12-02 21:32:34  302.99MB 执行命令并创建新的镜像层
RUN /bin/sh -c set -ex;     apt-get update;     apt-get install -y python3 python3-pip;     rm -rf /var/lib/apt/lists/* # buildkit
                        
# 2023-12-02 21:32:34  0.00B 指定运行容器时使用的用户
USER root
                        
# 2023-12-02 21:19:19  0.00B 配置容器启动时运行的命令
ENTRYPOINT ["/opt/entrypoint.sh"]
                        
# 2023-12-02 21:19:19  0.00B 指定运行容器时使用的用户
USER spark
                        
# 2023-12-02 21:19:19  0.00B 设置工作目录为/opt/spark/work-dir
WORKDIR /opt/spark/work-dir
                        
# 2023-12-02 21:19:19  0.00B 设置环境变量 SPARK_HOME
ENV SPARK_HOME=/opt/spark
                        
# 2023-12-02 21:19:19  4.54KB 复制新文件或目录到容器中
COPY entrypoint.sh /opt/ # buildkit
                        
# 2023-12-02 21:19:19  353.46MB 执行命令并创建新的镜像层
RUN |1 spark_uid=185 /bin/sh -c set -ex;     export SPARK_TMP="$(mktemp -d)";     cd $SPARK_TMP;     wget -nv -O spark.tgz "$SPARK_TGZ_URL";     wget -nv -O spark.tgz.asc "$SPARK_TGZ_ASC_URL";     export GNUPGHOME="$(mktemp -d)";     gpg --batch --keyserver hkps://keys.openpgp.org --recv-key "$GPG_KEY" ||     gpg --batch --keyserver hkps://keyserver.ubuntu.com --recv-keys "$GPG_KEY";     gpg --batch --verify spark.tgz.asc spark.tgz;     gpgconf --kill all;     rm -rf "$GNUPGHOME" spark.tgz.asc;         tar -xf spark.tgz --strip-components=1;     chown -R spark:spark .;     mv jars /opt/spark/;     mv bin /opt/spark/;     mv sbin /opt/spark/;     mv kubernetes/dockerfiles/spark/decom.sh /opt/;     mv examples /opt/spark/;     mv kubernetes/tests /opt/spark/;     mv data /opt/spark/;     mv python/pyspark /opt/spark/python/pyspark/;     mv python/lib /opt/spark/python/lib/;     mv R /opt/spark/;     chmod a+x /opt/decom.sh;     cd ..;     rm -rf "$SPARK_TMP"; # buildkit
                        
# 2023-12-02 21:18:47  0.00B 设置环境变量 SPARK_TGZ_URL SPARK_TGZ_ASC_URL GPG_KEY
ENV SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.4.2/spark-3.4.2-bin-hadoop3.tgz SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.4.2/spark-3.4.2-bin-hadoop3.tgz.asc GPG_KEY=F28C9C925C188C35E345614DEDA00CE834F0FC5C
                        
# 2023-12-02 21:18:47  57.33MB 执行命令并创建新的镜像层
RUN |1 spark_uid=185 /bin/sh -c set -ex;     apt-get update;     apt-get install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu libnss-wrapper;     mkdir -p /opt/spark;     mkdir /opt/spark/python;     mkdir -p /opt/spark/examples;     mkdir -p /opt/spark/work-dir;     chmod g+w /opt/spark/work-dir;     touch /opt/spark/RELEASE;     chown -R spark:spark /opt/spark;     echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su;     rm -rf /var/lib/apt/lists/* # buildkit
                        
# 2023-12-02 21:18:37  64.84KB 执行命令并创建新的镜像层
RUN |1 spark_uid=185 /bin/sh -c groupadd --system --gid=${spark_uid} spark &&     useradd --system --uid=${spark_uid} --gid=spark spark # buildkit
                        
# 2023-12-02 21:18:37  0.00B 定义构建参数
ARG spark_uid=185
                        
# 2023-12-02 09:59:51  0.00B 
/bin/sh -c #(nop)  ENTRYPOINT ["/__cacert_entrypoint.sh"]
                        
# 2023-12-02 09:59:51  1.18KB 
/bin/sh -c #(nop) COPY file:8b8864b3e02a33a579dc216fd51b28a6047bc8eeaa03045b258980fe0cf7fcb3 in /__cacert_entrypoint.sh 
                        
# 2023-12-02 09:59:51  0.00B 
/bin/sh -c set -eux;     echo "Verifying install ...";     echo "java --version"; java --version;     echo "Complete."
                        
# 2023-12-02 09:59:50  140.70MB 
/bin/sh -c set -eux;     ARCH="$(dpkg --print-architecture)";     case "${ARCH}" in        aarch64|arm64)          ESUM='8dc527e5c5da62f80ad3b6a2cd7b1789f745b1d90d5e83faba45f7a1d0b6cab8';          BINARY_URL='https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.21%2B9/OpenJDK11U-jre_aarch64_linux_hotspot_11.0.21_9.tar.gz';          ;;        amd64|i386:x86-64)          ESUM='156861bb901ef18759e05f6f008595220c7d1318a46758531b957b0c950ef2c3';          BINARY_URL='https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.21%2B9/OpenJDK11U-jre_x64_linux_hotspot_11.0.21_9.tar.gz';          ;;        armhf|arm)          ESUM='7c12ca8f195bf719368016a1c3e7f06f8f06e4a573dc3dce0befbe30a388ffa3';          BINARY_URL='https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.21%2B9/OpenJDK11U-jre_arm_linux_hotspot_11.0.21_9.tar.gz';          ;;        ppc64el|powerpc:common64)          ESUM='286e37ce06316185377eea847d2aa9f1523b9f1428684e59e772f2f6055e89b9';          BINARY_URL='https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.21%2B9/OpenJDK11U-jre_ppc64le_linux_hotspot_11.0.21_9.tar.gz';          ;;        s390x|s390:64-bit)          ESUM='78f18503970715c03b8e6e70191d9001c883edab23d9f51ff434e4a03c6237bd';          BINARY_URL='https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.21%2B9/OpenJDK11U-jre_s390x_linux_hotspot_11.0.21_9.tar.gz';          ;;        *)          echo "Unsupported arch: ${ARCH}";          exit 1;          ;;     esac;     wget --progress=dot:giga -O /tmp/openjdk.tar.gz ${BINARY_URL};     echo "${ESUM} */tmp/openjdk.tar.gz" | sha256sum -c -;     mkdir -p "$JAVA_HOME";     tar --extract         --file /tmp/openjdk.tar.gz         --directory "$JAVA_HOME"         --strip-components 1         --no-same-owner     ;     rm -f /tmp/openjdk.tar.gz ${JAVA_HOME}/lib/src.zip;     find "$JAVA_HOME/lib" -name '*.so' -exec dirname '{}' ';' | sort -u > /etc/ld.so.conf.d/docker-openjdk.conf;     ldconfig;     java -Xshare:dump;
                        
# 2023-12-02 09:59:10  0.00B 
/bin/sh -c #(nop)  ENV JAVA_VERSION=jdk-11.0.21+9
                        
# 2023-12-02 09:57:57  45.78MB 
/bin/sh -c set -eux;     apt-get update;     DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends         curl         wget         fontconfig         ca-certificates p11-kit         tzdata         locales     ;     echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen;     locale-gen en_US.UTF-8;     rm -rf /var/lib/apt/lists/*
                        
# 2023-12-02 09:57:27  0.00B 
/bin/sh -c #(nop)  ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8
                        
# 2023-12-02 09:57:27  0.00B 
/bin/sh -c #(nop)  ENV PATH=/opt/java/openjdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
                        
# 2023-12-02 09:57:27  0.00B 
/bin/sh -c #(nop)  ENV JAVA_HOME=/opt/java/openjdk
                        
# 2023-11-28 13:17:41  0.00B 
/bin/sh -c #(nop)  CMD ["/bin/bash"]
                        
# 2023-11-28 13:17:41  72.81MB 
/bin/sh -c #(nop) ADD file:9169bb1d6ef21313aed17e924538fee03d858460ae6b05e01968457dfc043bd7 in / 
                        
# 2023-11-28 13:17:39  0.00B 
/bin/sh -c #(nop)  LABEL org.opencontainers.image.version=20.04
                        
# 2023-11-28 13:17:39  0.00B 
/bin/sh -c #(nop)  LABEL org.opencontainers.image.ref.name=ubuntu
                        
# 2023-11-28 13:17:39  0.00B 
/bin/sh -c #(nop)  ARG LAUNCHPAD_BUILD_ARCH
                        
# 2023-11-28 13:17:39  0.00B 
/bin/sh -c #(nop)  ARG RELEASE
                        
                    

镜像信息

{
    "Id": "sha256:14003ecb13485e9135564d907e8cb6bd0bf2e5dcf55339bfd2a166d8079dba54",
    "RepoTags": [
        "apache/spark:3.4.2-python3",
        "swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark:3.4.2-python3"
    ],
    "RepoDigests": [
        "apache/spark@sha256:68424ef0ed1ddb41aa42e7b36bb2b17d3ab43a1330eb64de8de04e1e46c3ff52",
        "swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/apache/spark@sha256:92c19b6862df83941c15ad1f5e077af9184c3be319901bb8eb7559b0a2a94f1b"
    ],
    "Parent": "",
    "Comment": "buildkit.dockerfile.v0",
    "Created": "2023-12-02T13:32:34.326926817Z",
    "Container": "",
    "ContainerConfig": null,
    "DockerVersion": "",
    "Author": "",
    "Config": {
        "Hostname": "",
        "Domainname": "",
        "User": "spark",
        "AttachStdin": false,
        "AttachStdout": false,
        "AttachStderr": false,
        "Tty": false,
        "OpenStdin": false,
        "StdinOnce": false,
        "Env": [
            "PATH=/opt/java/openjdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
            "JAVA_HOME=/opt/java/openjdk",
            "LANG=en_US.UTF-8",
            "LANGUAGE=en_US:en",
            "LC_ALL=en_US.UTF-8",
            "JAVA_VERSION=jdk-11.0.21+9",
            "SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.4.2/spark-3.4.2-bin-hadoop3.tgz",
            "SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.4.2/spark-3.4.2-bin-hadoop3.tgz.asc",
            "GPG_KEY=F28C9C925C188C35E345614DEDA00CE834F0FC5C",
            "SPARK_HOME=/opt/spark"
        ],
        "Cmd": null,
        "Image": "",
        "Volumes": null,
        "WorkingDir": "/opt/spark/work-dir",
        "Entrypoint": [
            "/opt/entrypoint.sh"
        ],
        "OnBuild": null,
        "Labels": {
            "org.opencontainers.image.ref.name": "ubuntu",
            "org.opencontainers.image.version": "20.04"
        }
    },
    "Architecture": "amd64",
    "Os": "linux",
    "Size": 973135692,
    "GraphDriver": {
        "Data": {
            "LowerDir": "/var/lib/docker/overlay2/e79a608c845bb257de49b1c38411391a749aeb5058cdd11422738625bf637a06/diff:/var/lib/docker/overlay2/056a332e8c200f3b394b749be9c4bf26545658d17e135a4d925afa0459d8fe23/diff:/var/lib/docker/overlay2/ca1b45f723b5504f2410189f370fa800ee6bdd96cd86d9d3fd1d6ffe21914a91/diff:/var/lib/docker/overlay2/9a40e4865c4972fcf00f660768c3d328ea15ab001e4e3c4c93815ef16a6c8cc2/diff:/var/lib/docker/overlay2/c470cee4ec77f7e7c8080c3a67770bc93940ec22768c1e84ba179b82d012cb97/diff:/var/lib/docker/overlay2/6879bdcb2cac7b9e9619f19bec4a846b313d6c06fae90cf1d22bb553b41f233a/diff:/var/lib/docker/overlay2/b03353377e851ef4c68771c672a3f187baf7edafa87312dda79165502c92b009/diff:/var/lib/docker/overlay2/79bccc5f3c28783d0b28c59fe2fea0368a20c1108df21f9c153132a9f437aa7d/diff:/var/lib/docker/overlay2/39c5fdee6a963541897c14908f2a404d974d486d0f8ad75a621c783beee0a32a/diff:/var/lib/docker/overlay2/8742bbbb5fff455f8ed3f57d61863922cde1629b93c54ebd1323101fec21db04/diff",
            "MergedDir": "/var/lib/docker/overlay2/0e6b55fd66d47b23d2dad1239ab44871d5c4ae46e974a8bda01fe9cb00b41abd/merged",
            "UpperDir": "/var/lib/docker/overlay2/0e6b55fd66d47b23d2dad1239ab44871d5c4ae46e974a8bda01fe9cb00b41abd/diff",
            "WorkDir": "/var/lib/docker/overlay2/0e6b55fd66d47b23d2dad1239ab44871d5c4ae46e974a8bda01fe9cb00b41abd/work"
        },
        "Name": "overlay2"
    },
    "RootFS": {
        "Type": "layers",
        "Layers": [
            "sha256:d3fa9d362c05df229c3770dc43fd881038e35a594de5d2f0a02defd22b8c0f6d",
            "sha256:170651b9fa5f4ea04f0808a2b475f581f7b25ea684d87afaa2909ec73ada4801",
            "sha256:81fcf058e7ab984ec713ce2dac5435730798cb9557baf7d1be62b21c24ec7488",
            "sha256:42fa62ad97243fd3143ae13144c5096c7a6e47ddbdc9dcdd1aaebd7313f38f0c",
            "sha256:57e555be4ff86fdce7be4b61e8bb167465e27389c74dcab5b1dabde3723fe6ce",
            "sha256:c01da23f6b0e54c562a24b29dfe29859b3c3b702785635061f3c9e4d0559c9a1",
            "sha256:76bf81b5222d54cde70bde3be5d303746ed03f7b88b87d9e9c1f4558c4684b16",
            "sha256:8bd30819955c3ea8e479f3c62bffc764d795d9a9cc2ae2a0704bd768e3aaa756",
            "sha256:19681ab4591bdcbad0930d0228ffb3078d0a453c4571b2b40af99ebc1acc3f50",
            "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef",
            "sha256:e794ee51f04a958da5ef86054535f2c835081aa5e77ee7495138d5d30cf6d2ef"
        ]
    },
    "Metadata": {
        "LastTagTime": "2025-10-21T15:45:39.554257828+08:00"
    }
}

更多版本

docker.io/apache/spark:3.4.4

linux/amd64 docker.io974.48MB2024-10-29 01:26
573

docker.io/apache/spark:v3.2.3

linux/amd64 docker.io612.37MB2024-11-11 16:11
366

docker.io/apache/spark:3.5.3

linux/amd64 docker.io984.74MB2024-11-23 19:03
271

docker.io/apache/spark:3.5.3-java17

linux/amd64 docker.io1.15GB2024-11-24 00:41
314

docker.io/apache/spark:3.5.3-scala2.12-java11-r-ubuntu

linux/amd64 docker.io1.32GB2024-11-24 01:13
230

docker.io/apache/spark:3.5.3-scala2.12-java11-ubuntu

linux/amd64 docker.io681.32MB2024-11-24 01:31
245

docker.io/apache/spark:3.5.3-scala2.12-java17-ubuntu

linux/amd64 docker.io828.98MB2024-11-24 01:33
199

docker.io/apache/spark:3.3.3

linux/amd64 docker.io939.31MB2024-12-03 11:56
335

docker.io/apache/spark-py:v3.1.3

linux/amd64 docker.io886.30MB2024-12-11 19:44
253

docker.io/apache/spark:3.4.0-python3

linux/amd64 docker.io963.13MB2025-02-26 17:21
334

docker.io/apache/spark:3.5.5-scala2.12-java17-python3-ubuntu

linux/amd64 docker.io1.16GB2025-04-02 15:43
245

docker.io/apache/spark:3.5.5-scala2.12-java11-python3-ubuntu

linux/amd64 docker.io996.92MB2025-04-02 15:47
264

docker.io/apache/spark:3.5.5-scala2.12-java17-ubuntu

linux/amd64 docker.io842.80MB2025-04-02 15:49
196

docker.io/apache/spark-py:v3.4.0

linux/arm64 docker.io986.50MB2025-07-16 14:37
88

docker.io/apache/spark:3.4.2-python3

linux/amd64 docker.io973.14MB2025-10-21 15:46
21