Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,14 @@ private void autoSetSparkConf(SparkConf sparkConf) throws Exception {
helper.setConf(SparkConfHelper.COUNT_DISTICT, hasCountDistinct().toString());
helper.generateSparkConf();
helper.applySparkConf(sparkConf);
helper.setConf(SparkConfHelper.COUNT_DISTICT, hasCountDistinct().toString());
if (Boolean.valueOf(configOverride.getOrDefault("spark.dynamicAllocation.enabled", "false"))) {
int maxExecutors = Integer.valueOf(configOverride.getOrDefault("spark.dynamicAllocation.maxExecutors",
String.valueOf(Integer.MAX_VALUE)));
helper.setConf(SparkConfHelper.MAX_EXECUTORS, String.valueOf(maxExecutors));
} else {
helper.setConf(SparkConfHelper.MAX_EXECUTORS, String.valueOf(Integer.MAX_VALUE));
}
}

protected String chooseContentSize(Path shareDir) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ public class SparkConfHelper {
public static final String DRIVER_OVERHEAD = "spark.driver.memoryOverhead";
public static final String DRIVER_CORES = "spark.driver.cores";
public static final String COUNT_DISTICT = "count_distinct";
public static final String MAX_EXECUTORS = "spark.dynamicAllocation.maxExecutors";


private static final List<SparkConfRule> EXECUTOR_RULES = ImmutableList.of(new ExecutorMemoryRule(),
new ExecutorCoreRule(), new ExecutorOverheadRule(), new ExecutorInstancesRule(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,8 @@ class ExecutorInstancesRule extends SparkConfRule {

val needInstance = Math.max(calculateExecutorInsByLayoutSize.toLong, requiredCores.toInt / executorCore)
val instance = Math.min(needInstance, queueAvailableInstance)
val executorInstance = Math.max(instance.toLong, baseExecutorInstances.toLong).toString
val dynamicMaxExecutors = helper.getConf(SparkConfHelper.MAX_EXECUTORS)
val executorInstance = Math.min(Math.max(instance.toLong, baseExecutorInstances.toLong), dynamicMaxExecutors.toLong).toString
logInfo(s"Current queueAvailableInstance is $queueAvailableInstance, " +
s"needInstance is $needInstance, instance is $instance")
helper.setConf(SparkConfHelper.EXECUTOR_INSTANCES, executorInstance)
Expand Down