# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
# Default system properties included when running spark-submit. # This is useful for setting default environmental settings.
## Spark DRA config spark.dynamicAllocation.enabled=true # false if perfer shuffle tracking than ESS spark.shuffle.service.enabled=true # 理想情况下,三者的大小关系应为minExecutors<= initialExecutors< maxExecutors spark.dynamicAllocation.initialExecutors=10 spark.dynamicAllocation.minExecutors=10 spark.dynamicAllocation.maxExecutors=500 # adjust spark.dynamicAllocation.executorAllocationRatio a bit lower to reduce the number of executors w.r.t. full parallelism. spark.dynamicAllocation.executorAllocationRatio=0.5 # If one executor reached the maximum idle timeout, it will be removed. spark.dynamicAllocation.executorIdleTimeout=60s spark.dynamicAllocation.cachedExecutorIdleTimeout=30min # true if perfer shuffle tracking than ESS spark.dynamicAllocation.shuffleTracking.enabled=false spark.dynamicAllocation.shuffleTracking.timeout=30min # 如果 DRA 发现有待处理的任务积压超过超时,将请求新的执行程序,由以下配置控制。 spark.dynamicAllocation.schedulerBacklogTimeout=1s spark.dynamicAllocation.sustainedSchedulerBacklogTimeout=1s spark.cleaner.periodicGC.interval=5min
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
# Details in https://kyuubi.apache.org/docs/latest/deployment/settings.html
# For a user named kent ___kent___.spark.master=yarn ___kent___.spark.sql.adaptive.enabled=false # hudi conf ___kent___.spark.serializer=org.apache.spark.serializer.KryoSerializer ___kent___.spark.sql.catalog.spark_catalog=org.apache.spark.sql.hudi.catalog.HoodieCatalog ___kent___.spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension
# For a user named flink ___flink___.kyuubi.engine.type=FLINK_SQL
# For a user named bob ___bob___.spark.master=spark://master:7077 ___bob___.spark.executor.memory=8g
# Fpr a user named doris: doris conf ___doris___.kyuubi.engine.jdbc.connection.url=jdbc:mysql://xxx:xxx ___doris___.kyuubi.engine.jdbc.connection.user=*** ___doris___.engine.jdbc.connection.password=*** ___doris___.engine.jdbc.type=doris ___doris___.engine.jdbc.driver.class=com.mysql.cj.jdbc.Driver ___doris___.engine.type=jdbc