diff --git a/CHANGELOG.md b/CHANGELOG.md index e57b2b1e..5cb6c48c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,8 +5,16 @@ All notable changes to this project will be documented in this file. ## [Unreleased] - Document Helm deployed RBAC permissions and remove unnecessary permissions ([#674]). +- BREAKING: Each custom resource accepts now only the known config files in `configOverrides`: + - `SparkApplication`: `spark-env.sh` and `security.properties` + - `SparkHistoryServer`: `spark-defaults.conf`, `spark-env.sh` and `security.properties` + - `SparkConnectServer`: `spark-defaults.conf`, `metrics.properties` and `security.properties` + + Previously, arbitrary file names were silently accepted and ignored ([#679]). +- Bump `stackable-operator` to 0.110.1 ([#679]). [#674]: https://github.com/stackabletech/spark-k8s-operator/pull/674 +[#679]: https://github.com/stackabletech/hbase-operator/pull/679 ## [26.3.0] - 2026-03-16 diff --git a/Cargo.lock b/Cargo.lock index 11a643fd..81925f25 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1123,6 +1123,12 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "humantime" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" + [[package]] name = "hyper" version = "1.8.1" @@ -1511,17 +1517,18 @@ dependencies = [ [[package]] name = "k8s-version" version = "0.1.3" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "darling", "regex", - "snafu 0.8.9", + "snafu 0.9.0", ] [[package]] name = "kube" -version = "3.0.1" -source = "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acc5a6a69da2975ed9925d56b5dcfc9cc739b66f37add06785b7c9f6d1e88741" dependencies = [ "k8s-openapi", "kube-client", @@ -1532,8 +1539,9 @@ dependencies = [ [[package]] name = "kube-client" -version = "3.0.1" -source = "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcaf2d1f1a91e1805d4cd82e8333c022767ae8ffd65909bbef6802733a7dd40" dependencies = [ "base64", "bytes", @@ -1566,8 +1574,9 @@ dependencies = [ [[package]] name = "kube-core" -version = "3.0.1" -source = "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f126d2db7a8b532ec1d839ece2a71e2485dc3bbca6cc3c3f929becaa810e719e" dependencies = [ "derive_more", "form_urlencoded", @@ -1584,8 +1593,9 @@ dependencies = [ [[package]] name = "kube-derive" -version = "3.0.1" -source = "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b9b97e121fce957f9cafc6da534abc4276983ab03190b76c09361e2df849fa" dependencies = [ "darling", "proc-macro2", @@ -1597,8 +1607,9 @@ dependencies = [ [[package]] name = "kube-runtime" -version = "3.0.1" -source = "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c072737075826ee74d3e615e80334e41e617ca3d14fb46ef7cdfda822d6f15f2" dependencies = [ "ahash", "async-broadcast", @@ -2482,9 +2493,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.9" +version = "0.103.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +checksum = "61c429a8649f110dddef65e2a5ad240f747e85f7758a6bccc7e5777bd33f756e" dependencies = [ "ring", "rustls-pki-types", @@ -2869,7 +2880,7 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stackable-certs" version = "0.4.0" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "const-oid", "ecdsa", @@ -2881,7 +2892,7 @@ dependencies = [ "rsa", "sha2", "signature", - "snafu 0.8.9", + "snafu 0.9.0", "stackable-shared", "tokio", "tokio-rustls", @@ -2892,9 +2903,10 @@ dependencies = [ [[package]] name = "stackable-operator" -version = "0.108.0" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +version = "0.110.1" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ + "base64", "clap", "const_format", "delegate", @@ -2909,13 +2921,14 @@ dependencies = [ "k8s-openapi", "kube", "product-config", + "rand 0.9.2", "regex", "schemars", "semver", "serde", "serde_json", "serde_yaml", - "snafu 0.8.9", + "snafu 0.9.0", "stackable-operator-derive", "stackable-shared", "stackable-telemetry", @@ -2932,7 +2945,7 @@ dependencies = [ [[package]] name = "stackable-operator-derive" version = "0.3.1" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "darling", "proc-macro2", @@ -2943,7 +2956,7 @@ dependencies = [ [[package]] name = "stackable-shared" version = "0.1.0" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "jiff", "k8s-openapi", @@ -2952,7 +2965,7 @@ dependencies = [ "semver", "serde", "serde_yaml", - "snafu 0.8.9", + "snafu 0.9.0", "strum", "time", ] @@ -2984,8 +2997,8 @@ dependencies = [ [[package]] name = "stackable-telemetry" -version = "0.6.2" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +version = "0.6.3" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "axum", "clap", @@ -2996,7 +3009,7 @@ dependencies = [ "opentelemetry-semantic-conventions", "opentelemetry_sdk", "pin-project", - "snafu 0.8.9", + "snafu 0.9.0", "strum", "tokio", "tower", @@ -3008,21 +3021,21 @@ dependencies = [ [[package]] name = "stackable-versioned" -version = "0.8.3" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +version = "0.9.0" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "schemars", "serde", "serde_json", "serde_yaml", - "snafu 0.8.9", + "snafu 0.9.0", "stackable-versioned-macros", ] [[package]] name = "stackable-versioned-macros" -version = "0.8.3" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +version = "0.9.0" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "convert_case", "convert_case_extras", @@ -3039,13 +3052,14 @@ dependencies = [ [[package]] name = "stackable-webhook" -version = "0.9.0" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#7486017f60827d1d769d7bf17bf56adb21f8bb02" +version = "0.9.1" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#96f42571ea185a3cd76fedde351fcabbeefcae16" dependencies = [ "arc-swap", "async-trait", "axum", "futures-util", + "humantime", "hyper", "hyper-util", "k8s-openapi", @@ -3055,7 +3069,7 @@ dependencies = [ "rand 0.9.2", "serde", "serde_json", - "snafu 0.8.9", + "snafu 0.9.0", "stackable-certs", "stackable-shared", "stackable-telemetry", diff --git a/Cargo.nix b/Cargo.nix index acc27537..675635de 100644 --- a/Cargo.nix +++ b/Cargo.nix @@ -3492,6 +3492,14 @@ rec { ]; }; + "humantime" = rec { + crateName = "humantime"; + version = "2.3.0"; + edition = "2021"; + sha256 = "092lpipp32ayz4kyyn4k3vz59j9blng36wprm5by0g2ykqr14nqk"; + features = { + }; + }; "hyper" = rec { crateName = "hyper"; version = "1.8.1"; @@ -4814,8 +4822,8 @@ rec { workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "k8s_version"; authors = [ @@ -4833,7 +4841,7 @@ rec { } { name = "snafu"; - packageId = "snafu 0.8.9"; + packageId = "snafu 0.9.0"; } ]; features = { @@ -4844,14 +4852,9 @@ rec { }; "kube" = rec { crateName = "kube"; - version = "3.0.1"; + version = "3.1.0"; edition = "2024"; - workspace_member = null; - src = pkgs.fetchgit { - url = "https://github.com/kube-rs/kube-rs"; - rev = "fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5"; - sha256 = "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n"; - }; + sha256 = "0hc7x38zdjdphmkx1b9pdyv3kiwwzkfbamjxjbcmx5x2knkadidc"; authors = [ "clux " "Natalie Klestrup Röijezon " @@ -4922,14 +4925,9 @@ rec { }; "kube-client" = rec { crateName = "kube-client"; - version = "3.0.1"; + version = "3.1.0"; edition = "2024"; - workspace_member = null; - src = pkgs.fetchgit { - url = "https://github.com/kube-rs/kube-rs"; - rev = "fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5"; - sha256 = "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n"; - }; + sha256 = "0h6xlwrjg07npsdr0rgxiyp6f9q27hryi0ndsh2ih7m9y78z5jhg"; libName = "kube_client"; authors = [ "clux " @@ -5055,7 +5053,7 @@ rec { name = "tokio"; packageId = "tokio"; optional = true; - features = [ "time" "signal" "sync" ]; + features = [ "time" "signal" "sync" "rt" ]; } { name = "tokio-util"; @@ -5155,14 +5153,9 @@ rec { }; "kube-core" = rec { crateName = "kube-core"; - version = "3.0.1"; + version = "3.1.0"; edition = "2024"; - workspace_member = null; - src = pkgs.fetchgit { - url = "https://github.com/kube-rs/kube-rs"; - rev = "fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5"; - sha256 = "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n"; - }; + sha256 = "17ki1s0smv4vj8zkrk56phxxr1943sky5v1rv30jwlwbgbdx49pi"; libName = "kube_core"; authors = [ "clux " @@ -5242,14 +5235,9 @@ rec { }; "kube-derive" = rec { crateName = "kube-derive"; - version = "3.0.1"; + version = "3.1.0"; edition = "2024"; - workspace_member = null; - src = pkgs.fetchgit { - url = "https://github.com/kube-rs/kube-rs"; - rev = "fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5"; - sha256 = "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n"; - }; + sha256 = "1yj9z0niwdh9djvr0cdh7ac7chmw999xmimgkizrbkhz29zbkffn"; procMacro = true; libName = "kube_derive"; authors = [ @@ -5296,14 +5284,9 @@ rec { }; "kube-runtime" = rec { crateName = "kube-runtime"; - version = "3.0.1"; + version = "3.1.0"; edition = "2024"; - workspace_member = null; - src = pkgs.fetchgit { - url = "https://github.com/kube-rs/kube-rs"; - rev = "fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5"; - sha256 = "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n"; - }; + sha256 = "1whmdwnq5nnzgkpldyql7p51grj19qrq0pk17r6yfvl2fmq76wn0"; libName = "kube_runtime"; authors = [ "clux " @@ -8318,9 +8301,9 @@ rec { }; "rustls-webpki" = rec { crateName = "rustls-webpki"; - version = "0.103.9"; + version = "0.103.13"; edition = "2021"; - sha256 = "0lwg1nnyv7pp2lfwwjhy81bxm233am99jnsp3iymdhd6k8827pyp"; + sha256 = "0vkm7z9pnxz5qz66p2kmyy2pwx0g4jnsbqk5xzfhs4czcjl2ki31"; libName = "webpki"; dependencies = [ { @@ -9487,8 +9470,8 @@ rec { workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_certs"; authors = [ @@ -9546,7 +9529,7 @@ rec { } { name = "snafu"; - packageId = "snafu 0.8.9"; + packageId = "snafu 0.9.0"; } { name = "stackable-shared"; @@ -9585,19 +9568,23 @@ rec { }; "stackable-operator" = rec { crateName = "stackable-operator"; - version = "0.108.0"; + version = "0.110.1"; edition = "2024"; workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_operator"; authors = [ "Stackable GmbH " ]; dependencies = [ + { + name = "base64"; + packageId = "base64"; + } { name = "clap"; packageId = "clap"; @@ -9661,6 +9648,10 @@ rec { name = "product-config"; packageId = "product-config"; } + { + name = "rand"; + packageId = "rand 0.9.2"; + } { name = "regex"; packageId = "regex"; @@ -9689,7 +9680,7 @@ rec { } { name = "snafu"; - packageId = "snafu 0.8.9"; + packageId = "snafu 0.9.0"; } { name = "stackable-operator-derive"; @@ -9762,8 +9753,8 @@ rec { workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; procMacro = true; libName = "stackable_operator_derive"; @@ -9797,8 +9788,8 @@ rec { workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_shared"; authors = [ @@ -9842,7 +9833,7 @@ rec { } { name = "snafu"; - packageId = "snafu 0.8.9"; + packageId = "snafu 0.9.0"; } { name = "strum"; @@ -9983,13 +9974,13 @@ rec { }; "stackable-telemetry" = rec { crateName = "stackable-telemetry"; - version = "0.6.2"; + version = "0.6.3"; edition = "2024"; workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_telemetry"; authors = [ @@ -10040,7 +10031,7 @@ rec { } { name = "snafu"; - packageId = "snafu 0.8.9"; + packageId = "snafu 0.9.0"; } { name = "strum"; @@ -10093,13 +10084,13 @@ rec { }; "stackable-versioned" = rec { crateName = "stackable-versioned"; - version = "0.8.3"; + version = "0.9.0"; edition = "2024"; workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_versioned"; authors = [ @@ -10126,7 +10117,7 @@ rec { } { name = "snafu"; - packageId = "snafu 0.8.9"; + packageId = "snafu 0.9.0"; } { name = "stackable-versioned-macros"; @@ -10137,13 +10128,13 @@ rec { }; "stackable-versioned-macros" = rec { crateName = "stackable-versioned-macros"; - version = "0.8.3"; + version = "0.9.0"; edition = "2024"; workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; procMacro = true; libName = "stackable_versioned_macros"; @@ -10205,13 +10196,13 @@ rec { }; "stackable-webhook" = rec { crateName = "stackable-webhook"; - version = "0.9.0"; + version = "0.9.1"; edition = "2024"; workspace_member = null; src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; - rev = "7486017f60827d1d769d7bf17bf56adb21f8bb02"; - sha256 = "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2"; + rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_webhook"; authors = [ @@ -10235,6 +10226,10 @@ rec { name = "futures-util"; packageId = "futures-util"; } + { + name = "humantime"; + packageId = "humantime"; + } { name = "hyper"; packageId = "hyper"; @@ -10279,7 +10274,7 @@ rec { } { name = "snafu"; - packageId = "snafu 0.8.9"; + packageId = "snafu 0.9.0"; } { name = "stackable-certs"; diff --git a/Cargo.toml b/Cargo.toml index 984ddff5..274d1352 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,7 @@ repository = "https://github.com/stackabletech/spark-k8s-operator" [workspace.dependencies] product-config = { git = "https://github.com/stackabletech/product-config.git", tag = "0.8.0" } -stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", tag = "stackable-operator-0.108.0", features = ["webhook"] } +stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", tag = "stackable-operator-0.110.1", features = ["webhook"] } anyhow = "1.0" built = { version = "0.8", features = ["chrono", "git2"] } diff --git a/crate-hashes.json b/crate-hashes.json index 2148b36f..e19b553d 100644 --- a/crate-hashes.json +++ b/crate-hashes.json @@ -1,17 +1,12 @@ { - "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#kube-client@3.0.1": "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n", - "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#kube-core@3.0.1": "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n", - "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#kube-derive@3.0.1": "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n", - "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#kube-runtime@3.0.1": "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n", - "git+https://github.com/kube-rs/kube-rs?rev=fe69cc486ff8e62a7da61d64ec3ebbd9e64c43b5#kube@3.0.1": "1irm4g79crlxjm3iqrgvx0f6wxdcj394ky84q89pk9i36y2mlw3n", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#k8s-version@0.1.3": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-certs@0.4.0": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-operator-derive@0.3.1": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-operator@0.108.0": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-shared@0.1.0": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-telemetry@0.6.2": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-versioned-macros@0.8.3": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-versioned@0.8.3": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.108.0#stackable-webhook@0.9.0": "1fgc7i8rhq1nl9m4s69sbfiywy2jx4narpynvm3g54vd5yd4c6m2", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#k8s-version@0.1.3": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-certs@0.4.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-operator-derive@0.3.1": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-operator@0.110.1": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-shared@0.1.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-telemetry@0.6.3": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-versioned-macros@0.9.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-versioned@0.9.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-webhook@0.9.1": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", "git+https://github.com/stackabletech/product-config.git?tag=0.8.0#product-config@0.8.0": "1dz70kapm2wdqcr7ndyjji0lhsl98bsq95gnb2lw487wf6yr7987" } \ No newline at end of file diff --git a/docs/modules/spark-k8s/partials/nav.adoc b/docs/modules/spark-k8s/partials/nav.adoc index f28352ae..71f2278e 100644 --- a/docs/modules/spark-k8s/partials/nav.adoc +++ b/docs/modules/spark-k8s/partials/nav.adoc @@ -21,6 +21,7 @@ * xref:spark-k8s:reference/index.adoc[] ** xref:spark-k8s:reference/crds.adoc[] *** {crd-docs}/spark.stackable.tech/sparkapplication/v1alpha1/[SparkApplication {external-link-icon}^] +*** {crd-docs}/spark.stackable.tech/sparkconnectserver/v1alpha1/[SparkConnectServer {external-link-icon}^] *** {crd-docs}/spark.stackable.tech/sparkhistoryserver/v1alpha1/[SparkHistoryServer {external-link-icon}^] ** xref:spark-k8s:reference/commandline-parameters.adoc[] ** xref:spark-k8s:reference/environment-variables.adoc[] diff --git a/extra/crds.yaml b/extra/crds.yaml index 3299fab7..5f832882 100644 --- a/extra/crds.yaml +++ b/extra/crds.yaml @@ -157,7 +157,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -237,7 +237,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -317,7 +317,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -397,7 +397,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -477,7 +477,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -557,7 +557,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -610,7 +610,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -682,17 +682,33 @@ spec: type: array type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -938,7 +954,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -1018,7 +1034,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -1098,7 +1114,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -1178,7 +1194,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -1258,7 +1274,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -1338,7 +1354,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -1391,7 +1407,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -1463,17 +1479,33 @@ spec: type: array type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -1663,17 +1695,33 @@ spec: type: array type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -2555,7 +2603,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -2635,7 +2683,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -2688,7 +2736,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -2754,17 +2802,43 @@ spec: type: object type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-defaults.conf: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -2948,7 +3022,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -3028,7 +3102,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -3081,7 +3155,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -3147,17 +3221,43 @@ spec: type: object type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-defaults.conf: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -3731,7 +3831,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -3811,7 +3911,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -3864,7 +3964,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -3930,17 +4030,43 @@ spec: type: object type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + metrics.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-defaults.conf: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -4117,7 +4243,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -4197,7 +4323,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -4250,7 +4376,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -4316,17 +4442,43 @@ spec: type: object type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + metrics.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-defaults.conf: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -4608,7 +4760,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -4688,7 +4840,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -4768,7 +4920,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -4848,7 +5000,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -4928,7 +5080,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5008,7 +5160,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5061,7 +5213,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -5133,17 +5285,33 @@ spec: type: array type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -5389,7 +5557,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5469,7 +5637,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5549,7 +5717,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5629,7 +5797,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5709,7 +5877,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5789,7 +5957,7 @@ spec: type: string type: object custom: - description: Custom log configuration provided in a ConfigMap + description: Log configuration provided in a ConfigMap properties: configMap: description: ConfigMap containing the log configuration files @@ -5842,7 +6010,7 @@ spec: type: object type: object enableVectorAgent: - description: Wether or not to deploy a container with the Vector log agent. + description: Whether or not to deploy a container with the Vector log agent. nullable: true type: boolean type: object @@ -5914,17 +6082,33 @@ spec: type: array type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: @@ -6114,17 +6298,33 @@ spec: type: array type: object configOverrides: - additionalProperties: - additionalProperties: - type: string - type: object - default: {} description: |- The `configOverrides` can be used to configure properties in product config files that are not exposed in the CRD. Read the [config overrides documentation](https://docs.stackable.tech/home/nightly/concepts/overrides#config-overrides) and consult the operator specific usage guide documentation for details on the available config files and settings for the specific product. + properties: + security.properties: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object + spark-env.sh: + additionalProperties: + type: string + description: |- + Flat key-value overrides for `*.properties`, Hadoop XML, etc. + + This is backwards-compatible with the existing flat key-value YAML format + used by `HashMap`. + nullable: true + type: object type: object envOverrides: additionalProperties: diff --git a/rust/operator-binary/src/connect/common.rs b/rust/operator-binary/src/connect/common.rs index 06128e83..bc190fde 100644 --- a/rust/operator-binary/src/connect/common.rs +++ b/rust/operator-binary/src/connect/common.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, HashMap}; +use std::collections::BTreeMap; use product_config::writer::to_java_properties_string; use snafu::{ResultExt, Snafu}; @@ -106,7 +106,7 @@ pub(crate) fn spark_properties( } pub(crate) fn security_properties( - config_overrides: Option<&HashMap>, + config_overrides: BTreeMap>, ) -> Result { let mut result: BTreeMap> = [ ( @@ -120,19 +120,13 @@ pub(crate) fn security_properties( ] .into(); - if let Some(user_config) = config_overrides { - result.extend( - user_config - .iter() - .map(|(k, v)| (k.clone(), Some(v.clone()))), - ); - } + result.extend(config_overrides); to_java_properties_string(result.iter()).context(JvmSecurityPropertiesSnafu) } pub(crate) fn metrics_properties( - config_overrides: Option<&HashMap>, + config_overrides: BTreeMap>, ) -> Result { let mut result: BTreeMap> = [ ( @@ -146,13 +140,7 @@ pub(crate) fn metrics_properties( ] .into(); - if let Some(user_config) = config_overrides { - result.extend( - user_config - .iter() - .map(|(k, v)| (k.clone(), Some(v.clone()))), - ); - } + result.extend(config_overrides); to_java_properties_string(result.iter()).context(MetricsPropertiesSnafu) } diff --git a/rust/operator-binary/src/connect/crd.rs b/rust/operator-binary/src/connect/crd.rs index 6fe36bc0..39653482 100644 --- a/rust/operator-binary/src/connect/crd.rs +++ b/rust/operator-binary/src/connect/crd.rs @@ -15,6 +15,7 @@ use stackable_operator::{ fragment::{self, Fragment, ValidationError}, merge::Merge, }, + config_overrides::KeyValueConfigOverrides, crd::s3, deep_merger::ObjectOverrides, k8s_openapi::{api::core::v1::PodAntiAffinity, apimachinery::pkg::api::resource::Quantity}, @@ -57,6 +58,18 @@ pub enum Error { FragmentValidationFailure { source: ValidationError }, } +type SparkConnectServerRoleType = CommonConfiguration< + v1alpha1::ServerConfigFragment, + JavaCommonConfig, + v1alpha1::ConfigOverrides, +>; + +type SparkConnectExecutorRoleType = CommonConfiguration< + v1alpha1::ExecutorConfigFragment, + JavaCommonConfig, + v1alpha1::ConfigOverrides, +>; + #[versioned( version(name = "v1alpha1"), crates( @@ -110,7 +123,7 @@ pub mod versioned { /// Spark Connect executor properties. #[serde(default, skip_serializing_if = "Option::is_none")] - pub executor: Option>, + pub executor: Option, } /// This struct is a wrapper for the `ServerConfig` in order to keep the `spec.server.roleConfig` setting consistent. @@ -119,7 +132,8 @@ pub mod versioned { #[serde(rename_all = "camelCase")] pub struct SparkConnectServerConfigWrapper { #[serde(flatten)] - pub config: Option>, + pub config: Option, + #[serde(default)] pub role_config: SparkConnectServerRoleConfig, } @@ -196,6 +210,30 @@ pub mod versioned { #[serde(default, skip_serializing_if = "Option::is_none")] pub s3connection: Option, } + + #[derive(Clone, Debug, Default, Deserialize, JsonSchema, PartialEq, Serialize)] + pub struct ConfigOverrides { + #[serde( + default, + rename = "spark-defaults.conf", + skip_serializing_if = "Option::is_none" + )] + pub spark_defaults_conf: Option, + + #[serde( + default, + rename = "metrics.properties", + skip_serializing_if = "Option::is_none" + )] + pub metrics_properties: Option, + + #[serde( + default, + rename = "security.properties", + skip_serializing_if = "Option::is_none" + )] + pub security_properties: Option, + } } #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/rust/operator-binary/src/connect/executor.rs b/rust/operator-binary/src/connect/executor.rs index 087b7efd..fb4414a8 100644 --- a/rust/operator-binary/src/connect/executor.rs +++ b/rust/operator-binary/src/connect/executor.rs @@ -12,6 +12,7 @@ use stackable_operator::{ product_image_selection::ResolvedProductImage, resources::{CpuLimits, MemoryLimits, Resources}, }, + config_overrides::KeyValueConfigOverrides, k8s_openapi::{ DeepMerge, api::core::v1::{ConfigMap, EnvVar, PodSecurityContext, PodTemplateSpec}, @@ -29,9 +30,9 @@ use crate::{ connect::{common, crd::v1alpha1, s3}, crd::constants::{ JVM_SECURITY_PROPERTIES_FILE, LOG4J2_CONFIG_FILE, MAX_SPARK_LOG_FILES_SIZE, - METRICS_PROPERTIES_FILE, POD_TEMPLATE_FILE, SPARK_DEFAULTS_FILE_NAME, - VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, - VOLUME_MOUNT_PATH_CONFIG, VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG, + METRICS_PROPERTIES_FILE, POD_TEMPLATE_FILE, VOLUME_MOUNT_NAME_CONFIG, + VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_PATH_CONFIG, + VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG, }, product_logging, }; @@ -137,7 +138,7 @@ pub fn executor_pod_template( .context(AddVolumeMountSnafu)?; let metadata = ObjectMetaBuilder::new() - .with_recommended_labels(common::labels( + .with_recommended_labels(&common::labels( scs, &resolved_product_image.app_version_label_value, &SparkConnectRole::Executor.to_string(), @@ -296,15 +297,11 @@ pub(crate) fn executor_properties( .spec .executor .as_ref() - .and_then(|s| s.config_overrides.get(SPARK_DEFAULTS_FILE_NAME)); - - if let Some(user_config) = config_overrides { - result.extend( - user_config - .iter() - .map(|(k, v)| (k.clone(), Some(v.clone()))), - ); - } + .and_then(|s| s.config_overrides.spark_defaults_conf.as_ref()) + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default(); + + result.extend(config_overrides); Ok(result) } @@ -346,23 +343,27 @@ pub(crate) fn executor_config_map( resolved_product_image: &ResolvedProductImage, ) -> Result { let cm_name = object_name(&scs.name_any(), SparkConnectRole::Executor); - let jvm_sec_props = common::security_properties( - scs.spec - .executor - .as_ref() - .and_then(|s| s.config_overrides.get(JVM_SECURITY_PROPERTIES_FILE)), - ) - .context(ExecutorJvmSecurityPropertiesSnafu)?; - let metrics_props = common::metrics_properties( - scs.spec - .executor - .as_ref() - .and_then(|s| s.config_overrides.get(METRICS_PROPERTIES_FILE)), - ) - .context(MetricsPropertiesSnafu { - name: scs.name_unchecked(), - })?; + let config_overrides = scs.spec.executor.as_ref().map(|s| &s.config_overrides); + + let security_properties_overrides = config_overrides + .and_then(|config_overrides| config_overrides.security_properties.as_ref()) + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default(); + + let jvm_sec_props = common::security_properties(security_properties_overrides) + .context(ExecutorJvmSecurityPropertiesSnafu)?; + + let metrics_properties_overrides = config_overrides + .and_then(|config_overrides| config_overrides.metrics_properties.as_ref()) + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default(); + + let metrics_props = common::metrics_properties(metrics_properties_overrides).context( + MetricsPropertiesSnafu { + name: scs.name_unchecked(), + }, + )?; let mut cm_builder = ConfigMapBuilder::new(); @@ -373,7 +374,7 @@ pub(crate) fn executor_config_map( .name(&cm_name) .ownerreference_from_resource(scs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(common::labels( + .with_recommended_labels(&common::labels( scs, &resolved_product_image.app_version_label_value, &SparkConnectRole::Executor.to_string(), diff --git a/rust/operator-binary/src/connect/server.rs b/rust/operator-binary/src/connect/server.rs index 0f8d8546..10a81e79 100644 --- a/rust/operator-binary/src/connect/server.rs +++ b/rust/operator-binary/src/connect/server.rs @@ -18,6 +18,7 @@ use stackable_operator::{ }, }, commons::product_image_selection::ResolvedProductImage, + config_overrides::KeyValueConfigOverrides, crd::listener, k8s_openapi::{ DeepMerge, @@ -154,27 +155,30 @@ pub(crate) fn server_config_map( executor_pod_template_spec: &str, ) -> Result { let cm_name = object_name(&scs.name_any(), SparkConnectRole::Server); - let jvm_sec_props = common::security_properties( - scs.spec - .server - .config - .as_ref() - .and_then(|s| s.config_overrides.get(JVM_SECURITY_PROPERTIES_FILE)), - ) - .context(ServerJvmSecurityPropertiesSnafu { - name: scs.name_unchecked(), - })?; - let metrics_props = common::metrics_properties( - scs.spec - .server - .config - .as_ref() - .and_then(|s| s.config_overrides.get(METRICS_PROPERTIES_FILE)), - ) - .context(MetricsPropertiesSnafu { - name: scs.name_unchecked(), - })?; + let config_overrides = scs.spec.server.config.as_ref().map(|s| &s.config_overrides); + + let security_properties_overrides = config_overrides + .and_then(|config_overrides| config_overrides.security_properties.as_ref()) + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default(); + + let jvm_sec_props = common::security_properties(security_properties_overrides).context( + ServerJvmSecurityPropertiesSnafu { + name: scs.name_unchecked(), + }, + )?; + + let metrics_properties_overrides = config_overrides + .and_then(|config_overrides| config_overrides.metrics_properties.as_ref()) + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default(); + + let metrics_props = common::metrics_properties(metrics_properties_overrides).context( + MetricsPropertiesSnafu { + name: scs.name_unchecked(), + }, + )?; let mut cm_builder = ConfigMapBuilder::new(); @@ -185,7 +189,7 @@ pub(crate) fn server_config_map( .name(&cm_name) .ownerreference_from_resource(scs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(common::labels( + .with_recommended_labels(&common::labels( scs, &resolved_product_image.app_version_label_value, &SparkConnectRole::Server.to_string(), @@ -234,10 +238,10 @@ pub(crate) fn build_stateful_set( ); let recommended_labels = - Labels::recommended(recommended_object_labels.clone()).context(LabelBuildSnafu)?; + Labels::recommended(&recommended_object_labels).context(LabelBuildSnafu)?; let metadata = ObjectMetaBuilder::new() - .with_recommended_labels(recommended_object_labels) + .with_recommended_labels(&recommended_object_labels) .context(MetadataBuildSnafu)? .with_label(Label::try_from(("prometheus.io/scrape", "true")).context(LabelBuildSnafu)?) .build(); @@ -396,7 +400,7 @@ pub(crate) fn build_stateful_set( .name(object_name(&scs.name_any(), SparkConnectRole::Server)) .ownerreference_from_resource(scs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(common::labels( + .with_recommended_labels(&common::labels( scs, &resolved_product_image.app_version_label_value, &SparkConnectRole::Server.to_string(), @@ -497,7 +501,9 @@ pub(crate) fn server_properties( .server .config .as_ref() - .and_then(|s| s.config_overrides.get(SPARK_DEFAULTS_FILE_NAME)); + .and_then(|s| s.config_overrides.spark_defaults_conf.as_ref()) + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default(); let mut result: BTreeMap> = [ // This needs to match the name of the headless service for the executors to be able @@ -543,13 +549,8 @@ pub(crate) fn server_properties( ] .into(); - if let Some(user_config) = config_overrides { - result.extend( - user_config - .iter() - .map(|(k, v)| (k.clone(), Some(v.clone()))), - ); - } + result.extend(config_overrides); + Ok(result) } diff --git a/rust/operator-binary/src/connect/service.rs b/rust/operator-binary/src/connect/service.rs index 408d28ad..f710f419 100644 --- a/rust/operator-binary/src/connect/service.rs +++ b/rust/operator-binary/src/connect/service.rs @@ -51,7 +51,7 @@ pub(crate) fn build_headless_service( .name(service_name) .ownerreference_from_resource(scs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(common::labels( + .with_recommended_labels(&common::labels( scs, app_version_label, &SparkConnectRole::Server.to_string(), @@ -106,7 +106,7 @@ pub(crate) fn build_metrics_service( .name(service_name) .ownerreference_from_resource(scs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(common::labels( + .with_recommended_labels(&common::labels( scs, app_version_label, &SparkConnectRole::Server.to_string(), diff --git a/rust/operator-binary/src/crd/history.rs b/rust/operator-binary/src/crd/history.rs index 45dbd77f..24459bee 100644 --- a/rust/operator-binary/src/crd/history.rs +++ b/rust/operator-binary/src/crd/history.rs @@ -16,6 +16,7 @@ use stackable_operator::{ fragment::{self, Fragment, ValidationError}, merge::Merge, }, + config_overrides::{KeyValueConfigOverrides, KeyValueOverridesProvider}, crd::s3, deep_merger::ObjectOverrides, k8s_openapi::{api::core::v1::EnvVar, apimachinery::pkg::api::resource::Quantity}, @@ -64,6 +65,13 @@ pub enum Error { }, } +pub type SparkHistoryRoleType = Role< + HistoryConfigFragment, + v1alpha1::ConfigOverrides, + SparkHistoryServerRoleConfig, + JavaCommonConfig, +>; + #[versioned( version(name = "v1alpha1"), crates( @@ -107,7 +115,7 @@ pub mod versioned { pub object_overrides: ObjectOverrides, /// A history server node role definition. - pub nodes: Role, + pub nodes: SparkHistoryRoleType, } // TODO: move generic version to op-rs? @@ -122,13 +130,49 @@ pub mod versioned { #[serde(default = "default_listener_class")] pub listener_class: String, } + + #[derive(Clone, Debug, Default, Deserialize, JsonSchema, PartialEq, Serialize)] + pub struct ConfigOverrides { + #[serde( + default, + rename = "spark-defaults.conf", + skip_serializing_if = "Option::is_none" + )] + pub spark_defaults_conf: Option, + + #[serde( + default, + rename = "spark-env.sh", + skip_serializing_if = "Option::is_none" + )] + pub spark_env_sh: Option, + + #[serde( + default, + rename = "security.properties", + skip_serializing_if = "Option::is_none" + )] + pub security_properties: Option, + } +} + +impl KeyValueOverridesProvider for v1alpha1::ConfigOverrides { + fn get_key_value_overrides(&self, file: &str) -> BTreeMap> { + let field = match file { + SPARK_DEFAULTS_FILE_NAME => self.spark_defaults_conf.as_ref(), + SPARK_ENV_SH_FILE_NAME => self.spark_env_sh.as_ref(), + JVM_SECURITY_PROPERTIES_FILE => self.security_properties.as_ref(), + _ => None, + }; + field + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default() + } } impl v1alpha1::SparkHistoryServer { /// Returns a reference to the role. Raises an error if the role is not defined. - pub fn role( - &self, - ) -> &Role { + pub fn role(&self) -> &SparkHistoryRoleType { &self.spec.nodes } @@ -136,7 +180,8 @@ impl v1alpha1::SparkHistoryServer { pub fn rolegroup( &self, rolegroup_ref: &RoleGroupRef, - ) -> Result, Error> { + ) -> Result, Error> + { self.spec .nodes .role_groups @@ -205,14 +250,7 @@ impl v1alpha1::SparkHistoryServer { resolved_product_image: &ResolvedProductImage, product_config: &ProductConfigManager, ) -> Result { - #[allow(clippy::type_complexity)] - let roles_to_validate: HashMap< - String, - ( - Vec, - Role, - ), - > = vec![( + let roles_to_validate = vec![( HISTORY_ROLE_NAME.to_string(), ( vec![ @@ -224,9 +262,9 @@ impl v1alpha1::SparkHistoryServer { ), )] .into_iter() - .collect(); + .collect::>(); - let role_config = transform_all_roles_to_config(self, roles_to_validate); + let role_config = transform_all_roles_to_config(self, &roles_to_validate); validate_all_roles_and_groups_config( &resolved_product_image.product_version, diff --git a/rust/operator-binary/src/crd/listener_ext.rs b/rust/operator-binary/src/crd/listener_ext.rs index f96786b0..0914d9f8 100644 --- a/rust/operator-binary/src/crd/listener_ext.rs +++ b/rust/operator-binary/src/crd/listener_ext.rs @@ -33,7 +33,7 @@ pub fn build_listener>( .name(listener_name) .ownerreference_from_resource(resource, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(listener_labels) + .with_recommended_labels(&listener_labels) .context(ObjectMetaSnafu)? .build(), spec: listener::v1alpha1::ListenerSpec { diff --git a/rust/operator-binary/src/crd/logdir.rs b/rust/operator-binary/src/crd/logdir.rs index 13de8060..cff400ba 100644 --- a/rust/operator-binary/src/crd/logdir.rs +++ b/rust/operator-binary/src/crd/logdir.rs @@ -6,7 +6,7 @@ use stackable_operator::{ SecretFormat, SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, VolumeBuilder, }, - commons::secret_class::SecretClassVolume, + commons::secret_class::{SecretClassVolume, SecretClassVolumeProvisionParts}, crd::s3, k8s_openapi::api::core::v1::{Volume, VolumeMount}, shared::time::Duration, @@ -272,11 +272,14 @@ impl S3LogDir { volumes.push( VolumeBuilder::new(secret_name) .ephemeral( - SecretOperatorVolumeSourceBuilder::new(secret_name) - .with_format(SecretFormat::TlsPkcs12) - .with_auto_tls_cert_lifetime(*requested_secret_lifetime) - .build() - .context(TlsCertSecretClassVolumeBuildSnafu)?, + SecretOperatorVolumeSourceBuilder::new( + secret_name, + SecretClassVolumeProvisionParts::PublicPrivate, + ) + .with_format(SecretFormat::TlsPkcs12) + .with_auto_tls_cert_lifetime(*requested_secret_lifetime) + .build() + .context(TlsCertSecretClassVolumeBuildSnafu)?, ) .build(), ); @@ -304,7 +307,10 @@ impl S3LogDir { self.credentials() .map(|credentials| { credentials - .to_volume(credentials.secret_class.as_ref()) + .to_volume( + credentials.secret_class.as_ref(), + SecretClassVolumeProvisionParts::PublicPrivate, + ) .context(CredentialsVolumeBuildSnafu) }) .transpose() diff --git a/rust/operator-binary/src/crd/mod.rs b/rust/operator-binary/src/crd/mod.rs index e4721f5d..051d73cd 100644 --- a/rust/operator-binary/src/crd/mod.rs +++ b/rust/operator-binary/src/crd/mod.rs @@ -19,11 +19,13 @@ use stackable_operator::{ commons::{ product_image_selection::{ProductImage, ResolvedProductImage}, resources::{CpuLimits, MemoryLimits, Resources}, + secret_class::SecretClassVolumeProvisionParts, }, config::{ fragment::{self, ValidationError}, merge::Merge, }, + config_overrides::{KeyValueConfigOverrides, KeyValueOverridesProvider}, crd::s3, k8s_openapi::{ api::core::v1::{EmptyDirVolumeSource, EnvVar, PodTemplateSpec, Volume, VolumeMount}, @@ -130,6 +132,15 @@ pub enum Error { ConstructJvmArguments { source: crate::config::jvm::Error }, } +pub type SparkApplicationJobRoleType = + CommonConfiguration; + +pub type SparkApplicationDriverRoleType = + CommonConfiguration; + +pub type SparkApplicationExecutorRoleType = + RoleGroup; + #[versioned( version(name = "v1alpha1"), crates( @@ -206,19 +217,19 @@ pub mod versioned { // IMPORTANT: Please note that the jvmArgumentOverrides have no effect here! // However, due to product-config things I wasn't able to remove them. #[serde(default, skip_serializing_if = "Option::is_none")] - pub job: Option>, + pub job: Option, /// The driver role specifies the configuration that, together with the driver pod template, is used by /// Spark to create driver pods. #[serde(default, skip_serializing_if = "Option::is_none")] - pub driver: Option>, + pub driver: Option, /// The executor role specifies the configuration that, together with the driver pod template, is used by /// Spark to create the executor pods. /// This is RoleGroup instead of plain CommonConfiguration because it needs to allow for the number of replicas. /// to be specified. #[serde(default, skip_serializing_if = "Option::is_none")] - pub executor: Option>, + pub executor: Option, /// A map of key/value strings that will be passed directly to spark-submit. #[serde(default)] @@ -253,6 +264,36 @@ pub mod versioned { #[serde(default, skip_serializing_if = "Option::is_none")] pub log_file_directory: Option, } + + #[derive(Clone, Debug, Default, Deserialize, JsonSchema, PartialEq, Serialize)] + pub struct ConfigOverrides { + #[serde( + default, + rename = "spark-env.sh", + skip_serializing_if = "Option::is_none" + )] + pub spark_env_sh: Option, + + #[serde( + default, + rename = "security.properties", + skip_serializing_if = "Option::is_none" + )] + pub security_properties: Option, + } +} + +impl KeyValueOverridesProvider for v1alpha1::ConfigOverrides { + fn get_key_value_overrides(&self, file: &str) -> BTreeMap> { + let field = match file { + SPARK_ENV_SH_FILE_NAME => self.spark_env_sh.as_ref(), + JVM_SECURITY_PROPERTIES_FILE => self.security_properties.as_ref(), + _ => None, + }; + field + .map(KeyValueConfigOverrides::as_product_config_overrides) + .unwrap_or_default() + } } impl v1alpha1::SparkApplication { @@ -333,7 +374,7 @@ impl v1alpha1::SparkApplication { result.insert( volume_name.clone(), secret_class_volume - .to_volume(volume_name) + .to_volume(volume_name, SecretClassVolumeProvisionParts::PublicPrivate) .context(S3CredentialsVolumeBuildSnafu)?, ); } @@ -385,11 +426,14 @@ impl v1alpha1::SparkApplication { cert_secret.to_string(), VolumeBuilder::new(cert_secret) .ephemeral( - SecretOperatorVolumeSourceBuilder::new(cert_secret) - .with_format(SecretFormat::TlsPkcs12) - .with_auto_tls_cert_lifetime(*requested_secret_lifetime) - .build() - .context(TlsCertSecretClassVolumeBuildSnafu)?, + SecretOperatorVolumeSourceBuilder::new( + cert_secret, + SecretClassVolumeProvisionParts::PublicPrivate, + ) + .with_format(SecretFormat::TlsPkcs12) + .with_auto_tls_cert_lifetime(*requested_secret_lifetime) + .build() + .context(TlsCertSecretClassVolumeBuildSnafu)?, ) .build(), ); @@ -971,7 +1015,7 @@ impl v1alpha1::SparkApplication { ), ); - let role_config = transform_all_roles_to_config(self, roles_to_validate); + let role_config = transform_all_roles_to_config(self, &roles_to_validate); validate_all_roles_and_groups_config( &resolved_product_image.product_version, diff --git a/rust/operator-binary/src/crd/template_merger.rs b/rust/operator-binary/src/crd/template_merger.rs index 57650619..505247a6 100644 --- a/rust/operator-binary/src/crd/template_merger.rs +++ b/rust/operator-binary/src/crd/template_merger.rs @@ -3,7 +3,9 @@ use std::collections::HashMap; use stackable_operator::{ - config::merge::Merge, k8s_openapi::apimachinery::pkg::apis::meta::v1::ObjectMeta, + config::merge::Merge, + k8s_openapi::apimachinery::pkg::apis::meta::v1::ObjectMeta, + role_utils::{CommonConfiguration, RoleGroup}, }; use super::v1alpha1::SparkApplication; @@ -152,13 +154,14 @@ fn merge_vec(base: &[T], overlay: &[T]) -> Vec { } /// Merge CommonConfiguration using the Merge trait -fn merge_common_config( - base: Option<&stackable_operator::role_utils::CommonConfiguration>, - overlay: Option<&stackable_operator::role_utils::CommonConfiguration>, -) -> Option> +fn merge_common_config( + base: Option<&CommonConfiguration>, + overlay: Option<&CommonConfiguration>, +) -> Option> where - C: Clone + Merge, - R: Clone, + Config: Clone + Merge, + CommonConfig: Clone, + ConfigOverrides: Clone, { match (base, overlay) { (None, None) => None, @@ -174,13 +177,14 @@ where } /// Merge RoleGroup -fn merge_role_group( - base: Option<&stackable_operator::role_utils::RoleGroup>, - overlay: Option<&stackable_operator::role_utils::RoleGroup>, -) -> Option> +fn merge_role_group( + base: Option<&RoleGroup>, + overlay: Option<&RoleGroup>, +) -> Option> where - C: Clone + Merge, - R: Clone, + Config: Clone + Merge, + CommonConfig: Clone, + ConfigOverrides: Clone, { match (base, overlay) { (None, None) => None, diff --git a/rust/operator-binary/src/crd/template_spec.rs b/rust/operator-binary/src/crd/template_spec.rs index bc53ea47..d77f8257 100644 --- a/rust/operator-binary/src/crd/template_spec.rs +++ b/rust/operator-binary/src/crd/template_spec.rs @@ -10,7 +10,6 @@ use stackable_operator::{ crd::s3, k8s_openapi::api::core::v1::{EnvVar, Volume}, kube::{Api, CustomResource, ResourceExt, api::ListParams}, - role_utils::{CommonConfiguration, JavaCommonConfig, RoleGroup}, schemars::{self, JsonSchema}, utils::crds::raw_object_list_schema, versioned::versioned, @@ -18,9 +17,8 @@ use stackable_operator::{ use strum::{EnumDiscriminants, IntoStaticStr}; use super::{ - history::LogFileDirectorySpec, - job_dependencies::JobDependencies, - roles::{RoleConfigFragment, SparkMode, SubmitConfigFragment}, + SparkApplicationDriverRoleType, SparkApplicationExecutorRoleType, SparkApplicationJobRoleType, + history::LogFileDirectorySpec, job_dependencies::JobDependencies, roles::SparkMode, }; use crate::crd::template_merger::deep_merge; @@ -112,19 +110,19 @@ pub mod versioned { // IMPORTANT: Please note that the jvmArgumentOverrides have no effect here! // However, due to product-config things I wasn't able to remove them. #[serde(default, skip_serializing_if = "Option::is_none")] - pub job: Option>, + pub job: Option, /// The driver role specifies the configuration that, together with the driver pod template, is used by /// Spark to create driver pods. #[serde(default, skip_serializing_if = "Option::is_none")] - pub driver: Option>, + pub driver: Option, /// The executor role specifies the configuration that, together with the driver pod template, is used by /// Spark to create the executor pods. /// This is RoleGroup instead of plain CommonConfiguration because it needs to allow for the number of replicas. /// to be specified. #[serde(default, skip_serializing_if = "Option::is_none")] - pub executor: Option>, + pub executor: Option, /// A map of key/value strings that will be passed directly to spark-submit. #[serde(default)] diff --git a/rust/operator-binary/src/history/config/jvm.rs b/rust/operator-binary/src/history/config/jvm.rs index b650aedb..9773616e 100644 --- a/rust/operator-binary/src/history/config/jvm.rs +++ b/rust/operator-binary/src/history/config/jvm.rs @@ -1,5 +1,5 @@ use snafu::{ResultExt, Snafu}; -use stackable_operator::role_utils::{self, JavaCommonConfig, JvmArgumentOverrides, Role}; +use stackable_operator::role_utils::{self, JvmArgumentOverrides}; use crate::crd::{ constants::{ @@ -7,7 +7,7 @@ use crate::crd::{ STACKABLE_TLS_STORE_PASSWORD, STACKABLE_TRUST_STORE, VOLUME_MOUNT_PATH_CONFIG, VOLUME_MOUNT_PATH_LOG_CONFIG, }, - history::{HistoryConfigFragment, v1alpha1::SparkHistoryServerRoleConfig}, + history::SparkHistoryRoleType, logdir::ResolvedLogDir, }; @@ -19,7 +19,7 @@ pub enum Error { /// JVM arguments that go into `SPARK_HISTORY_OPTS` pub fn construct_history_jvm_args( - role: &Role, + role: &SparkHistoryRoleType, role_group: &str, logdir: &ResolvedLogDir, ) -> Result { diff --git a/rust/operator-binary/src/history/history_controller.rs b/rust/operator-binary/src/history/history_controller.rs index bb9d3e89..a50d75b7 100644 --- a/rust/operator-binary/src/history/history_controller.rs +++ b/rust/operator-binary/src/history/history_controller.rs @@ -464,7 +464,7 @@ fn build_config_map( .name(&cm_name) .ownerreference_from_resource(shs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(recommended_labels( + .with_recommended_labels(&recommended_labels( shs, app_version_label_value, &rolegroupref.role_group, @@ -536,10 +536,10 @@ fn build_stateful_set( rolegroupref.role_group.as_ref(), ); let recommended_labels = - Labels::recommended(recommended_object_labels.clone()).context(LabelBuildSnafu)?; + Labels::recommended(&recommended_object_labels).context(LabelBuildSnafu)?; let pb_metadata = ObjectMetaBuilder::new() - .with_recommended_labels(recommended_object_labels.clone()) + .with_recommended_labels(&recommended_object_labels) .context(MetadataBuildSnafu)? .build(); @@ -677,7 +677,7 @@ fn build_stateful_set( .name(rolegroupref.object_name()) .ownerreference_from_resource(shs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(recommended_object_labels) + .with_recommended_labels(&recommended_object_labels) .context(MetadataBuildSnafu)? .build(); diff --git a/rust/operator-binary/src/history/service.rs b/rust/operator-binary/src/history/service.rs index 063c4075..a9a3b640 100644 --- a/rust/operator-binary/src/history/service.rs +++ b/rust/operator-binary/src/history/service.rs @@ -44,7 +44,7 @@ pub fn build_rolegroup_metrics_service( .name(rolegroup_ref.rolegroup_metrics_service_name()) .ownerreference_from_resource(shs, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(recommended_labels( + .with_recommended_labels(&recommended_labels( shs, &resolved_product_image.app_version_label_value, &rolegroup_ref.role_group, diff --git a/rust/operator-binary/src/main.rs b/rust/operator-binary/src/main.rs index 6d25370d..b24b8504 100644 --- a/rust/operator-binary/src/main.rs +++ b/rust/operator-binary/src/main.rs @@ -83,13 +83,13 @@ async fn main() -> anyhow::Result<()> { match opts.cmd { Command::Crd => { SparkApplication::merged_crd(crd::SparkApplicationVersion::V1Alpha1)? - .print_yaml_schema(built_info::PKG_VERSION, SerializeOptions::default())?; + .print_yaml_schema(built_info::PKG_VERSION, &SerializeOptions::default())?; SparkHistoryServer::merged_crd(crd::history::SparkHistoryServerVersion::V1Alpha1)? - .print_yaml_schema(built_info::PKG_VERSION, SerializeOptions::default())?; + .print_yaml_schema(built_info::PKG_VERSION, &SerializeOptions::default())?; SparkConnectServer::merged_crd(SparkConnectServerVersion::V1Alpha1)? - .print_yaml_schema(built_info::PKG_VERSION, SerializeOptions::default())?; + .print_yaml_schema(built_info::PKG_VERSION, &SerializeOptions::default())?; SparkApplicationTemplate::merged_crd(SparkApplicationTemplateVersion::V1Alpha1)? - .print_yaml_schema(built_info::PKG_VERSION, SerializeOptions::default())?; + .print_yaml_schema(built_info::PKG_VERSION, &SerializeOptions::default())?; } Command::Run(RunArguments { operator_environment, @@ -120,7 +120,7 @@ async fn main() -> anyhow::Result<()> { let sigterm_watcher = SignalWatcher::sigterm()?; let eos_checker = - EndOfSupportChecker::new(built_info::BUILT_TIME_UTC, maintenance.end_of_support)? + EndOfSupportChecker::new(built_info::BUILT_TIME_UTC, &maintenance.end_of_support)? .run(sigterm_watcher.handle()) .map(anyhow::Ok); diff --git a/rust/operator-binary/src/spark_k8s_controller.rs b/rust/operator-binary/src/spark_k8s_controller.rs index 6cf4eeaf..4769e22f 100644 --- a/rust/operator-binary/src/spark_k8s_controller.rs +++ b/rust/operator-binary/src/spark_k8s_controller.rs @@ -641,7 +641,7 @@ fn pod_template( .ownerreference_from_resource(spark_application, None, None) .context(ObjectMissingMetadataForOwnerRefSnafu)? .with_recommended_labels( - spark_application + &spark_application .build_recommended_labels(&spark_image.app_version_label_value, &container_name), ) .context(MetadataBuildSnafu)?; @@ -787,7 +787,7 @@ fn pod_template_config_map( .name(&cm_name) .ownerreference_from_resource(spark_application, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(spark_application.build_recommended_labels( + .with_recommended_labels(&spark_application.build_recommended_labels( &spark_image.app_version_label_value, "pod-templates", )) @@ -859,7 +859,7 @@ fn submit_job_config_map( .ownerreference_from_resource(spark_application, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? .with_recommended_labels( - spark_application + &spark_application .build_recommended_labels(&spark_image.app_version_label_value, "spark-submit"), ) .context(MetadataBuildSnafu)? @@ -979,7 +979,7 @@ fn spark_job( metadata: Some( ObjectMetaBuilder::new() .name("spark-submit") - .with_recommended_labels(spark_application.build_recommended_labels( + .with_recommended_labels(&spark_application.build_recommended_labels( &spark_image.app_version_label_value, "spark-job-template", )) @@ -1014,7 +1014,7 @@ fn spark_job( .ownerreference_from_resource(spark_application, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? .with_recommended_labels( - spark_application + &spark_application .build_recommended_labels(&spark_image.app_version_label_value, "spark-job"), ) .context(MetadataBuildSnafu)? @@ -1048,7 +1048,7 @@ fn build_spark_role_serviceaccount( .name(&sa_name) .ownerreference_from_resource(spark_app, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? - .with_recommended_labels(spark_app.build_recommended_labels( + .with_recommended_labels(&spark_app.build_recommended_labels( &spark_image.app_version_label_value, "service-account", )) @@ -1064,7 +1064,7 @@ fn build_spark_role_serviceaccount( .ownerreference_from_resource(spark_app, None, Some(true)) .context(ObjectMissingMetadataForOwnerRefSnafu)? .with_recommended_labels( - spark_app + &spark_app .build_recommended_labels(&spark_image.app_version_label_value, "role-binding"), ) .context(MetadataBuildSnafu)?