sahara/sahara/plugins/cdh/v5_5_0/resources/spark-gateway.json

158 lines
8.2 KiB
JSON

[
{
"desc": "Whether to suppress configuration warnings produced by the built-in parameter validation for the Spark Data Serializer parameter.",
"display_name": "Suppress Parameter Validation: Spark Data Serializer",
"name": "role_config_suppression_spark_data_serializer",
"value": "false"
},
{
"desc": "Python library paths to add to PySpark applications.",
"display_name": "Extra Python Path",
"name": "spark_python_path",
"value": ""
},
{
"desc": "When dynamic allocation is enabled, time after which idle executors will be stopped.",
"display_name": "Executor Idle Timeout",
"name": "spark_dynamic_allocation_idle_timeout",
"value": "60"
},
{
"desc": "Enable Usage of External Shuffle Service. The External Shuffle Service is not robust to NodeManager restarts and so is not recommended for production use.",
"display_name": "Enable Shuffle Service",
"name": "spark_shuffle_service_enabled",
"value": "true"
},
{
"desc": "The directory where the client configs will be deployed",
"display_name": "Deploy Directory",
"name": "client_config_root_dir",
"value": "/etc/spark"
},
{
"desc": "The priority level that the client configuration will have in the Alternatives system on the hosts. Higher priority levels will cause Alternatives to prefer this configuration over any others.",
"display_name": "Alternatives Priority",
"name": "client_config_priority",
"value": "51"
},
{
"desc": "Enable dynamic allocation of executors in Spark applications.",
"display_name": "Enable Dynamic Allocation",
"name": "spark_dynamic_allocation_enabled",
"value": "true"
},
{
"desc": "Which deploy mode to use by default. Can be overridden by users when launching applications.",
"display_name": "Default Application Deploy Mode",
"name": "spark_deploy_mode",
"value": "client"
},
{
"desc": "For advanced use only, a string to be inserted into the client configuration for <strong>spark-conf/spark-env.sh</strong>.",
"display_name": "Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/spark-env.sh",
"name": "spark-conf/spark-env.sh_client_config_safety_valve",
"value": null
},
{
"desc": "Whether to suppress configuration warnings produced by the built-in parameter validation for the Deploy Directory parameter.",
"display_name": "Suppress Parameter Validation: Deploy Directory",
"name": "role_config_suppression_client_config_root_dir",
"value": "false"
},
{
"desc": "Whether to suppress configuration warnings produced by the CDH Version Validator configuration validator.",
"display_name": "Suppress Configuration Validator: CDH Version Validator",
"name": "role_config_suppression_cdh_version_validator",
"value": "false"
},
{
"desc": "When dynamic allocation is enabled, timeout before requesting new executors when there are backlogged tasks.",
"display_name": "Scheduler Backlog Timeout",
"name": "spark_dynamic_allocation_scheduler_backlog_timeout",
"value": "1"
},
{
"desc": "Whether to suppress configuration warnings produced by the built-in parameter validation for the Extra Python Path parameter.",
"display_name": "Suppress Parameter Validation: Extra Python Path",
"name": "role_config_suppression_spark_python_path",
"value": "false"
},
{
"desc": "For advanced use only, a string to be inserted into the client configuration for <strong>spark-conf/spark-defaults.conf</strong>.",
"display_name": "Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/spark-defaults.conf",
"name": "spark-conf/spark-defaults.conf_client_config_safety_valve",
"value": null
},
{
"desc": "When dynamic allocation is enabled, timeout before requesting new executors after the initial backlog timeout has already expired. By default this is the same value as the initial backlog timeout.",
"display_name": "Sustained Scheduler Backlog Timeout",
"name": "spark_dynamic_allocation_sustained_scheduler_backlog_timeout",
"value": null
},
{
"desc": "When dynamic allocation is enabled, maximum number of executors to allocate. By default, Spark relies on YARN to control the maximum number of executors for the application.",
"display_name": "Maximum Executor Count",
"name": "spark_dynamic_allocation_max_executors",
"value": null
},
{
"desc": "When dynamic allocation is enabled, time after which idle executors with cached RDDs blocks will be stopped. By default, they're never stopped. This configuration is only available starting in CDH 5.5.",
"display_name": "Caching Executor Idle Timeout",
"name": "spark_dynamic_allocation_cached_idle_timeout",
"value": null
},
{
"desc": "Write Spark application history logs to HDFS.",
"display_name": "Enable History",
"name": "spark_history_enabled",
"value": "true"
},
{
"desc": "When dynamic allocation is enabled, minimum number of executors to keep alive while the application is running.",
"display_name": "Minimum Executor Count",
"name": "spark_dynamic_allocation_min_executors",
"value": "0"
},
{
"desc": "When set, Cloudera Manager will send alerts when this entity's configuration changes.",
"display_name": "Enable Configuration Change Alerts",
"name": "enable_config_alerts",
"value": "false"
},
{
"desc": "Whether to suppress configuration warnings produced by the built-in parameter validation for the Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/spark-defaults.conf parameter.",
"display_name": "Suppress Parameter Validation: Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/spark-defaults.conf",
"name": "role_config_suppression_spark-conf/spark-defaults.conf_client_config_safety_valve",
"value": "false"
},
{
"desc": "Whether to suppress configuration warnings produced by the built-in parameter validation for the Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/log4j.properties parameter.",
"display_name": "Suppress Parameter Validation: Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/log4j.properties",
"name": "role_config_suppression_spark-conf/log4j.properties_client_config_safety_valve",
"value": "false"
},
{
"desc": "Name of class implementing org.apache.spark.serializer.Serializer to use in Spark applications.",
"display_name": "Spark Data Serializer",
"name": "spark_data_serializer",
"value": "org.apache.spark.serializer.KryoSerializer"
},
{
"desc": "For advanced use only, a string to be inserted into the client configuration for <strong>spark-conf/log4j.properties</strong>.",
"display_name": "Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/log4j.properties",
"name": "spark-conf/log4j.properties_client_config_safety_valve",
"value": null
},
{
"desc": "When dynamic allocation is enabled, number of executors to allocate when the application starts. By default, this is the same value as the minimum number of executors.",
"display_name": "Initial Executor Count",
"name": "spark_dynamic_allocation_initial_executors",
"value": null
},
{
"desc": "Whether to suppress configuration warnings produced by the built-in parameter validation for the Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/spark-env.sh parameter.",
"display_name": "Suppress Parameter Validation: Spark Client Advanced Configuration Snippet (Safety Valve) for spark-conf/spark-env.sh",
"name": "role_config_suppression_spark-conf/spark-env.sh_client_config_safety_valve",
"value": "false"
}
]