I recently deployed my spark,airflow, hive-metastore configuration to a new Kubernetes cluster and have been running into a HIVE_LOCAL_TIME_ZONE error that I didn't have on the previous cluster.
Anyone have any idea?
[2024-02-09, 00:07:49 UTC] {spark_submit.py:536} INFO - 24/02/09 00:07:49 INFO CreateDeltaTableCommand: Table is path-based table: false. Update catalog with mode: CreateOrReplace
[2024-02-09, 00:07:49 UTC] {spark_submit.py:536} INFO - 24/02/09 00:07:49 WARN HiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider delta. Persisting data source table `spark_catalog`.`staging`.`messages` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - 24/02/09 00:07:50 ERROR Utils: Aborting task
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - java.lang.NoSuchFieldError: HIVE_LOCAL_TIME_ZONE
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters.extractColumnInfo(LazySerDeParameters.java:166)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters.<init>(LazySerDeParameters.java:92)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.initialize(LazySimpleSerDe.java:116)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:540)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:453)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:440)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:281)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.ql.metadata.Table.checkValidity(Table.java:199)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:842)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:874)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.client.Shim_v0_12.createTable(HiveShim.scala:614)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$createTable$1(HiveClientImpl.scala:573)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:303)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:234)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:233)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:283)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.client.HiveClientImpl.createTable(HiveClientImpl.scala:571)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.HiveExternalCatalog.saveTableIntoHive(HiveExternalCatalog.scala:526)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.HiveExternalCatalog.createDataSourceTable(HiveExternalCatalog.scala:427)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$createTable$1(HiveExternalCatalog.scala:274)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.hive.HiveExternalCatalog.createTable(HiveExternalCatalog.scala:245)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.createTable(ExternalCatalogWithListener.scala:94)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.catalog.SessionCatalog.createTable(SessionCatalog.scala:402)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.updateCatalog(CreateDeltaTableCommand.scala:540)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.runPostCommitUpdates(CreateDeltaTableCommand.scala:192)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.handleCommit(CreateDeltaTableCommand.scala:172)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.$anonfun$run$2(CreateDeltaTableCommand.scala:108)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:140)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:138)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.recordFrameProfile(CreateDeltaTableCommand.scala:55)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:133)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.recordOperation(CreateDeltaTableCommand.scala:55)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:132)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:122)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:112)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.recordDeltaOperation(CreateDeltaTableCommand.scala:55)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.run(CreateDeltaTableCommand.scala:108)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.catalog.DeltaCatalog.$anonfun$createDeltaTable$1(DeltaCatalog.scala:178)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:140)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:138)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.catalog.DeltaCatalog.recordFrameProfile(DeltaCatalog.scala:59)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.catalog.DeltaCatalog.org$apache$spark$sql$delta$catalog$DeltaCatalog$$createDeltaTable(DeltaCatalog.scala:89)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.catalog.DeltaCatalog$StagedDeltaTableV2.$anonfun$commitStagedChanges$1(DeltaCatalog.scala:506)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:140)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:138)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.catalog.DeltaCatalog.recordFrameProfile(DeltaCatalog.scala:59)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.delta.catalog.DeltaCatalog$StagedDeltaTableV2.commitStagedChanges(DeltaCatalog.scala:468)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.V2CreateTableAsSelectBaseExec.$anonfun$writeToTable$1(WriteToDataSourceV2Exec.scala:580)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1397)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.V2CreateTableAsSelectBaseExec.writeToTable(WriteToDataSourceV2Exec.scala:573)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.V2CreateTableAsSelectBaseExec.writeToTable$(WriteToDataSourceV2Exec.scala:567)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableAsSelectExec.writeToTable(WriteToDataSourceV2Exec.scala:183)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableAsSelectExec.run(WriteToDataSourceV2Exec.scala:216)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:107)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:107)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:461)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:76)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:461)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:437)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:98)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:85)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:83)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:142)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:859)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:634)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:568)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at java.base/java.lang.reflect.Method.invoke(Method.java:568)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at py4j.Gateway.invoke(Gateway.java:282)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at py4j.commands.CallCommand.execute(CallCommand.java:79)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - at java.base/java.lang.Thread.run(Thread.java:840)
[2024-02-09, 00:07:50 UTC] {spark_submit.py:536} INFO - Traceback (most recent call last):