Update spark operator version
This commit is contained in:
parent
0a2956d81f
commit
fa90a9dbe0
|
@ -0,0 +1,7 @@
|
|||
# docker build -t spark-operator:2.0.2 . && kind load docker-image -n dnet-data-platform spark-operator:2.0.2
|
||||
FROM kubeflow/spark-operator:2.0.2
|
||||
|
||||
ENV SPARK_HOME /opt/spark
|
||||
USER root
|
||||
RUN curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar -o ${SPARK_HOME}/jars/hadoop-aws-3.3.4.jar
|
||||
RUN curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.12.262/aws-java-sdk-bundle-1.12.262.jar -o ${SPARK_HOME}/jars/aws-java-sdk-bundle-1.12.262.jar
|
|
@ -22,14 +22,16 @@ resource "kubernetes_role" "airflow_spark_role" {
|
|||
|
||||
rule {
|
||||
api_groups = ["sparkoperator.k8s.io"]
|
||||
resources = ["sparkapplications", "sparkapplications/status",
|
||||
"scheduledsparkapplications", "scheduledsparkapplications/status"]
|
||||
resources = [
|
||||
"sparkapplications", "sparkapplications/status",
|
||||
"scheduledsparkapplications", "scheduledsparkapplications/status"
|
||||
]
|
||||
verbs = ["*"]
|
||||
}
|
||||
|
||||
rule {
|
||||
api_groups = [""]
|
||||
resources = ["pods/log"]
|
||||
resources = ["pods", "pods/log"]
|
||||
verbs = ["*"]
|
||||
}
|
||||
}
|
||||
|
@ -73,28 +75,6 @@ resource "kubernetes_role_binding_v1" "airflow_spark_role_binding2" {
|
|||
name = "spark-role"
|
||||
}
|
||||
}
|
||||
#
|
||||
#
|
||||
# resource "kubernetes_role_binding_v1" "spark_role_binding" {
|
||||
# depends_on = [kubernetes_namespace.spark_jobs_namespace]
|
||||
# metadata {
|
||||
# name = "spark-role-binding"
|
||||
# namespace = "${var.namespace_prefix}spark-jobs"
|
||||
# }
|
||||
#
|
||||
# subject {
|
||||
# kind = "ServiceAccount"
|
||||
# name = "spark"
|
||||
# namespace = "${var.namespace_prefix}spark-jobs"
|
||||
# }
|
||||
#
|
||||
# role_ref {
|
||||
# api_group = "rbac.authorization.k8s.io"
|
||||
# kind = "Role"
|
||||
# name = "spark-role"
|
||||
# }
|
||||
# }
|
||||
#
|
||||
|
||||
resource "helm_release" "gcp_spark_operator" {
|
||||
depends_on = [kubernetes_namespace.spark_jobs_namespace]
|
||||
|
@ -106,23 +86,38 @@ resource "helm_release" "gcp_spark_operator" {
|
|||
dependency_update = "true"
|
||||
version = "2.0.2"
|
||||
|
||||
# set {
|
||||
# name = "image.repository"
|
||||
# value = "kubeflow/spark-operator"
|
||||
# }
|
||||
|
||||
# set {
|
||||
# name = "image.tag"
|
||||
# value = "v1beta2-1.4.5-3.5.0"
|
||||
# }
|
||||
set {
|
||||
name = "image.repository"
|
||||
value = "spark-operator"
|
||||
}
|
||||
|
||||
set {
|
||||
name = "sparkJobNamespaces"
|
||||
name = "image.tag"
|
||||
value = "2.0.2"
|
||||
}
|
||||
|
||||
set {
|
||||
name = "spark.jobNamespaces"
|
||||
value = "{${var.namespace_prefix}spark-jobs}"
|
||||
}
|
||||
|
||||
set {
|
||||
name = "serviceAccounts.spark.name"
|
||||
name = "spark.serviceAccount.create"
|
||||
value = "true"
|
||||
}
|
||||
|
||||
set {
|
||||
name = "spark.serviceAccount.name"
|
||||
value = "spark"
|
||||
}
|
||||
|
||||
set {
|
||||
name = "controller.serviceAccount.create"
|
||||
value = "true"
|
||||
}
|
||||
|
||||
set {
|
||||
name = "controller.serviceAccount.name"
|
||||
value = "spark"
|
||||
}
|
||||
|
||||
|
@ -171,7 +166,6 @@ EOT
|
|||
}
|
||||
|
||||
|
||||
|
||||
resource "helm_release" "airflow" {
|
||||
depends_on = [kubernetes_secret.s3_conn_secrets]
|
||||
|
||||
|
@ -230,10 +224,10 @@ resource "helm_release" "airflow" {
|
|||
# value = "gbloisi/airflow"
|
||||
# }
|
||||
|
||||
# set {
|
||||
# name = "images.airflow.tag"
|
||||
# value = "2.8.3rc1-python3.11"
|
||||
# }
|
||||
set {
|
||||
name = "images.airflow.tag"
|
||||
value = "2.9.3-python3.11"
|
||||
}
|
||||
|
||||
set {
|
||||
name = "ingress.web.host"
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
provider "helm" {
|
||||
# Several Kubernetes authentication methods are possible: https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#authentication
|
||||
kubernetes {
|
||||
config_path = pathexpand(var.kube_config)
|
||||
config_context = var.kube_context
|
||||
}
|
||||
terraform {
|
||||
required_providers {
|
||||
helm = {
|
||||
}
|
||||
|
||||
provider "kubernetes" {
|
||||
config_path = pathexpand(var.kube_config)
|
||||
config_context = var.kube_context
|
||||
kubernetes = {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,12 +1,9 @@
|
|||
provider "helm" {
|
||||
# Several Kubernetes authentication methods are possible: https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#authentication
|
||||
kubernetes {
|
||||
config_path = pathexpand(var.kube_config)
|
||||
config_context = var.kube_context
|
||||
}
|
||||
terraform {
|
||||
required_providers {
|
||||
helm = {
|
||||
}
|
||||
|
||||
provider "kubernetes" {
|
||||
config_path = pathexpand(var.kube_config)
|
||||
config_context = var.kube_context
|
||||
kubernetes = {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
provider "helm" {
|
||||
# Several Kubernetes authentication methods are possible: https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#authentication
|
||||
kubernetes {
|
||||
config_path = pathexpand(var.kube_config)
|
||||
config_context = var.kube_context
|
||||
}
|
||||
}
|
||||
|
||||
provider "kubernetes" {
|
||||
config_path = pathexpand(var.kube_config)
|
||||
config_context = var.kube_context
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
FROM spark:3.5.1-scala2.12-java17-ubuntu
|
||||
# docker build -t dnet-spark:1.0.0 . && kind load docker-image -n dnet-data-platform dnet-spark:1.0.0
|
||||
FROM spark:3.5.3-scala2.12-java17-ubuntu
|
||||
|
||||
user root
|
||||
USER root
|
||||
RUN curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar -o ${SPARK_HOME}/jars/hadoop-aws-3.3.4.jar
|
||||
RUN curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.12.262/aws-java-sdk-bundle-1.12.262.jar -o ${SPARK_HOME}/jars/aws-java-sdk-bundle-1.12.262.jar
|
||||
|
||||
|
|
Loading…
Reference in New Issue