spark 部署到k8s

Last updated on September 15, 2024 pm

🧙 Questions

☄️ Ideas

容器中打开的yml
apiVersion: v1
kind: Pod
metadata:
  annotations:
    cni.projectcalico.org/containerID: 25bbe0e379f50c75a3e27dc3e73ea005ad16878e4480c07939ab0301a3c5e4b7
    cni.projectcalico.org/podIP: ""
    cni.projectcalico.org/podIPs: ""
  creationTimestamp: "2023-07-22T07:15:17Z"
  labels:
    spark-app-name: zhiqingyun-job
    spark-app-selector: spark-b34b4e8b02c949fe8d664840f8552ec8
    spark-role: driver
    spark-version: 3.4.0
  name: zhiqingyun-job-8f864f897c73dba7-driver
  namespace: spark-yun
  resourceVersion: "3684331"
  uid: 506835de-54bc-4aeb-bbdd-32750174e748
spec:
  containers:
    - args:
        - driver
        - --properties-file
        - /opt/spark/conf/spark.properties
        - --class
        - com.isxcode.star.plugin.query.sql.Execute
        - local:///opt/spark/examples/jars/spark-query-sql-plugin.jar
        - eyJsaW1pdCI6MjAwLCJzcGFya0NvbmZpZyI6eyJzcGFyay5leGVjdXRvci5tZW1vcnkiOiIxZyIsInNwYXJrLmhhZG9vcC5oaXZlLmV4ZWMuZHluYW1pYy5wYXJ0aXRpb24ubW9kZSI6Im5vbnN0cmljdCIsInNwYXJrLmRyaXZlci5tZW1vcnkiOiIxZyIsInNwYXJrLnNxbC5sZWdhY3kudGltZVBhcnNlclBvbGljeSI6IkxFR0FDWSIsInNwYXJrLnNxbC5zdG9yZUFzc2lnbm1lbnRQb2xpY3kiOiJMRUdBQ1kiLCJzcGFyay5oYWRvb3AuaGl2ZS5leGVjLmR5bmFtaWMucGFydGl0aW9uIjoidHJ1ZSIsImhpdmUubWV0YXN0b3JlLnVyaXMiOiJ0aHJpZnQ6Ly9pc3hjb2RlOjkwODMifSwic3FsIjoic2hvdyBkYXRhYmFzZXM7In0=
      env:
        - name: SPARK_USER
          value: zhiqingyun
        - name: SPARK_APPLICATION_ID
          value: spark-b34b4e8b02c949fe8d664840f8552ec8
        - name: SPARK_DRIVER_BIND_ADDRESS
          valueFrom:
            fieldRef:
              apiVersion: v1
              fieldPath: status.podIP
        - name: HADOOP_CONF_DIR
          value: /opt/hadoop/conf
        - name: SPARK_LOCAL_DIRS
          value: /var/data/spark-fc076486-ccc6-4979-b144-dca226d1c339
        - name: SPARK_CONF_DIR
          value: /opt/spark/conf
      image: apache/spark:v3.1.3
      imagePullPolicy: IfNotPresent
      name: spark-kubernetes-driver
      ports:
        - containerPort: 7078
          name: driver-rpc-port
          protocol: TCP
        - containerPort: 7079
          name: blockmanager
          protocol: TCP
        - containerPort: 4040
          name: spark-ui
          protocol: TCP
      resources:
        limits:
          memory: 1408Mi
        requests:
          cpu: "1"
          memory: 1408Mi
      terminationMessagePath: /dev/termination-log
      terminationMessagePolicy: File
      volumeMounts:
        - mountPath: /etc/passwd
          name: users-volume
        - mountPath: /opt/spark/examples/jars/lib/fastjson2-extension-2.0.16.jar
          name: "7"
        - mountPath: /opt/spark/examples/jars/lib/ngdbc-2.15.12.jar
          name: "5"
        - mountPath: /opt/spark/examples/jars/lib/mysql-connector-j-8.0.32.jar
          name: "14"
        - mountPath: /opt/spark/examples/jars/lib/Dm8JdbcDriver18-8.1.1.49.jar
          name: "6"
        - mountPath: /opt/spark/examples/jars/lib/ojdbc8-23.2.0.0.jar
          name: "11"
        - mountPath: /opt/spark/examples/jars/lib/log4j-api-2.17.2.jar
          name: "0"
        - mountPath: /opt/spark/examples/jars/lib/fastjson2-2.0.16.jar
          name: "12"
        - mountPath: /opt/spark/examples/jars/spark-query-sql-plugin.jar
          name: jar
        - mountPath: /opt/spark/examples/jars/lib/spark-yun-api-0.0.7-plain.jar
          name: "10"
        - mountPath: /opt/spark/examples/jars/lib/mysql-connector-java-5.1.49.jar
          name: "9"
        - mountPath: /opt/spark/examples/jars/lib/oceanbase-client-2.4.3.jar
          name: "8"
        - mountPath: /opt/spark/examples/jars/lib/spark-yun-agent.jar
          name: "3"
        - mountPath: /opt/spark/examples/jars/lib/clickhouse-jdbc-0.4.6.jar
          name: "1"
        - mountPath: /opt/spark/examples/jars/lib/fastjson-2.0.16.jar
          name: "2"
        - mountPath: /opt/spark/examples/jars/lib/postgresql-42.6.0.jar
          name: "4"
        - mountPath: /opt/spark/examples/jars/lib/mssql-jdbc-12.2.0.jre8.jar
          name: "13"
        - mountPath: /opt/hadoop/conf
          name: hadoop-properties
        - mountPath: /opt/spark/pod-template
          name: pod-template-volume
        - mountPath: /var/data/spark-fc076486-ccc6-4979-b144-dca226d1c339
          name: spark-local-dir-1
        - mountPath: /opt/spark/conf
          name: spark-conf-volume-driver
        - mountPath: /var/run/secrets/kubernetes.io/serviceaccount
          name: kube-api-access-s2dx2
          readOnly: true
  dnsPolicy: ClusterFirst
  enableServiceLinks: true
  hostAliases:
    - hostnames:
        - isxcode
      ip: 172.16.215.83
  nodeName: 172.16.215.83
  preemptionPolicy: PreemptLowerPriority
  priority: 0
  restartPolicy: Never
  schedulerName: default-scheduler
  securityContext:
    runAsUser: 1000
  serviceAccount: zhiqingyun
  serviceAccountName: zhiqingyun
  terminationGracePeriodSeconds: 30
  tolerations:
    - effect: NoExecute
      key: node.kubernetes.io/not-ready
      operator: Exists
      tolerationSeconds: 300
    - effect: NoExecute
      key: node.kubernetes.io/unreachable
      operator: Exists
      tolerationSeconds: 300
  volumes:
    - hostPath:
        path: /etc/passwd
        type: ""
      name: users-volume
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/fastjson2-extension-2.0.16.jar
        type: ""
      name: "7"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/ngdbc-2.15.12.jar
        type: ""
      name: "5"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/mysql-connector-j-8.0.32.jar
        type: ""
      name: "14"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/Dm8JdbcDriver18-8.1.1.49.jar
        type: ""
      name: "6"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/ojdbc8-23.2.0.0.jar
        type: ""
      name: "11"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/log4j-api-2.17.2.jar
        type: ""
      name: "0"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/fastjson2-2.0.16.jar
        type: ""
      name: "12"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/plugins/spark-query-sql-plugin.jar
        type: ""
      name: jar
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/spark-yun-api-0.0.7-plain.jar
        type: ""
      name: "10"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/mysql-connector-java-5.1.49.jar
        type: ""
      name: "9"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/oceanbase-client-2.4.3.jar
        type: ""
      name: "8"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/spark-yun-agent.jar
        type: ""
      name: "3"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/clickhouse-jdbc-0.4.6.jar
        type: ""
      name: "1"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/fastjson-2.0.16.jar
        type: ""
      name: "2"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/postgresql-42.6.0.jar
        type: ""
      name: "4"
    - hostPath:
        path: /home/zhiqingyun/spark-yun-node/lib/mssql-jdbc-12.2.0.jre8.jar
        type: ""
      name: "13"
    - configMap:
        defaultMode: 420
        items:
          - key: hadoop-policy.xml
            path: hadoop-policy.xml
          - key: mapred-env.sh
            path: mapred-env.sh
          - key: mapred-env.cmd
            path: mapred-env.cmd
          - key: httpfs-site.xml
            path: httpfs-site.xml
          - key: yarn-env.sh
            path: yarn-env.sh
          - key: httpfs-log4j.properties
            path: httpfs-log4j.properties
          - key: yarn-site.xml
            path: yarn-site.xml
          - key: hadoop-user-functions.sh.example
            path: hadoop-user-functions.sh.example
          - key: hdfs-rbf-site.xml
            path: hdfs-rbf-site.xml
          - key: workers
            path: workers
          - key: yarn-env.cmd
            path: yarn-env.cmd
          - key: capacity-scheduler.xml
            path: capacity-scheduler.xml
          - key: core-site.xml
            path: core-site.xml
          - key: hadoop-env.sh
            path: hadoop-env.sh
          - key: ssl-server.xml.example
            path: ssl-server.xml.example
          - key: kms-env.sh
            path: kms-env.sh
          - key: hadoop-metrics2.properties
            path: hadoop-metrics2.properties
          - key: kms-acls.xml
            path: kms-acls.xml
          - key: user_ec_policies.xml.template
            path: user_ec_policies.xml.template
          - key: ssl-client.xml.example
            path: ssl-client.xml.example
          - key: yarnservice-log4j.properties
            path: yarnservice-log4j.properties
          - key: configuration.xsl
            path: configuration.xsl
          - key: log4j.properties
            path: log4j.properties
          - key: mapred-site.xml
            path: mapred-site.xml
          - key: hdfs-site.xml
            path: hdfs-site.xml
          - key: container-executor.cfg
            path: container-executor.cfg
          - key: httpfs-env.sh
            path: httpfs-env.sh
          - key: mapred-queues.xml.template
            path: mapred-queues.xml.template
          - key: kms-site.xml
            path: kms-site.xml
          - key: kms-log4j.properties
            path: kms-log4j.properties
          - key: hadoop-env.cmd
            path: hadoop-env.cmd
        name: zhiqingyun-job-8f864f897c73dba7-hadoop-config
      name: hadoop-properties
    - configMap:
        defaultMode: 420
        items:
          - key: podspec-configmap-key
            path: pod-spec-template.yml
        name: zhiqingyun-job-8f864f897c73dba7-driver-podspec-conf-map
      name: pod-template-volume
    - emptyDir: {}
      name: spark-local-dir-1
    - configMap:
        defaultMode: 420
        items:
          - key: spark.properties
            mode: 420
            path: spark.properties
        name: spark-drv-80a9bc897c73e366-conf-map
      name: spark-conf-volume-driver
    - name: kube-api-access-s2dx2
      projected:
        defaultMode: 420
        sources:
          - serviceAccountToken:
              expirationSeconds: 3607
              path: token
          - configMap:
              items:
                - key: ca.crt
                  path: ca.crt
              name: kube-root-ca.crt
          - downwardAPI:
              items:
                - fieldRef:
                    apiVersion: v1
                    fieldPath: metadata.namespace
                  path: namespace
status:
  conditions:
    - lastProbeTime: null
      lastTransitionTime: "2023-07-22T07:15:17Z"
      reason: PodCompleted
      status: "True"
      type: Initialized
    - lastProbeTime: null
      lastTransitionTime: "2023-07-22T07:15:40Z"
      reason: PodCompleted
      status: "False"
      type: Ready
    - lastProbeTime: null
      lastTransitionTime: "2023-07-22T07:15:40Z"
      reason: PodCompleted
      status: "False"
      type: ContainersReady
    - lastProbeTime: null
      lastTransitionTime: "2023-07-22T07:15:17Z"
      status: "True"
      type: PodScheduled
  containerStatuses:
    - containerID: docker://bf0358b4408ccdc2058d91d1dda4b9e5809cddc0ba4fbfe2303123704771f8e6
      image: apache/spark:v3.1.3
      imageID: docker-pullable://apache/spark@sha256:1e3f2bfea55ba1e1281a96f338050bf95b1b4e29d14cfb320e7a21f204245cdb
      lastState: {}
      name: spark-kubernetes-driver
      ready: false
      restartCount: 0
      started: false
      state:
        terminated:
          containerID: docker://bf0358b4408ccdc2058d91d1dda4b9e5809cddc0ba4fbfe2303123704771f8e6
          exitCode: 0
          finishedAt: "2023-07-22T07:15:39Z"
          reason: Completed
          startedAt: "2023-07-22T07:15:18Z"
  hostIP: 172.16.215.83
  phase: Succeeded
  podIP: 10.42.0.215
  podIPs:
    - ip: 10.42.0.215
  qosClass: Burstable
  startTime: "2023-07-22T07:15:17Z"
vim podTemplate.yml
# k8s作业配置host映射
# "spark.kubernetes.driver.podTemplateFile":"/Users/ispong/zhiqingyun-agent/conf/podTemplate.yml",
# "spark.kubernetes.executor.podTemplateFile":"/Users/ispong/zhiqingyun-agent/conf/podTemplate.yml"
apiVersion: v1
kind: Pod
metadata:
  name: host-pod
spec:
  restartPolicy: never
#  securityContext:
#    privileged: true
#    runAsUser: 1000
#    runAsGroup: 1001
  hostAliases:
    - ip: "192.168.18.146"
      hostnames:
        - "localhost"
#  volumes:
#    - name: users
#      hostPath:
#        path: /etc/passwd
# 查看所有pods
kubectl get pods -n zhiqingyun-space

# 查看pods日志
kubectl logs -f zhiqingyun-spark-container-sy-1815692501588373504-2f557190e3a0ffe5-driver -n zhiqingyun-space 

# 删除pods
kubectl delete pod zzhiqingyun-data-sync-jdbc-sy-1813828728925601792-sy-1813889095713140736-ae77aa90c5773210-driver -n zhiqingyun-space --force
kubectl delete pod $(kubectl get pods -n zhiqingyun-space --no-headers | grep 'zhiqingyun-' | awk '{print $1}') -n zhiqingyun-space --force

# 查看pod
kubectl describe pod zhiqingyun-spark-sql-sy-1823644530935844864-sy-1823667389099917312-d8e14491506c5e78-driver -n zhiqingyun-space

# 查看地址
kubectl port-forward zhiqingyun-spark-container-sy-1815692501588373504-7a53cc90df3fd2cb-driver 34624:34624 -n zhiqingyun-space &

#查看pod
kubectl get pod zhiqingyun-spark-sql-sy-1823644530935844864-sy-1823667389099917312-d8e14491506c5e78-driver  -n zhiqingyun-space -o yaml

# 查看镜像dockerfile
docker history --no-trunc 51ac287c39fd

# 停止
kubectl delete pod zhiqingyun-spark-container-sy-1815692501588373504-e582a490df343738-exec-1  -n zhiqingyun-space --force
apiVersion: v1
kind: Pod
metadata:
  name: host-pod
spec:
  hostAliases:
    - ip: "172.23.39.236"
      hostnames:
        - "ispong-qa"
apiVersion: v1
kind: Pod
metadata:
  name: host-pod
spec:
  containers:
  - name: spark-kubernetes-executor
    volumeMounts:
      - mountPath: /etc/passwd
        name: users-volume
  volumes:
    - name: users-volume
      hostPath:
        path: /etc/passwd
  hostAliases:
    - ip: "172.23.39.236"
      hostnames:
        - "ispong-qa"

spark 部署到k8s
https://ispong.isxcode.com/hadoop/spark/spark 部署到k8s/
Author
ispong
Posted on
August 7, 2023
Licensed under