monasca-transform/tests/unit/test_resources/config/test_config.conf

62 lines
2.0 KiB
Plaintext

[DEFAULTS]
[repositories]
offsets = test_offsets_repo_class
data_driven_specs = test_data_driven_specs_repo_class
[database]
server_type = test_server_type
host = test_host_name
database_name = test_database_name
username = test_database_user_name
password = test_database_password
use_ssl = True
ca_file = test_ca_file_path
[stage_processors]
enable_pre_hourly_processor = False
[pre_hourly_processor]
late_metric_slack_time = 600
enable_instance_usage_df_cache = False
instance_usage_df_cache_storage_level = MEMORY_ONLY_SER_2
enable_batch_time_filtering = True
data_provider=tests.unit.processor.test_is_time_to_run:TestProcessUtilDataProvider
[service]
enable_record_store_df_cache = False
record_store_df_cache_storage_level = MEMORY_ONLY_SER_2
enable_debug_log_entries = true
# the location for the transform-service log
service_log_path=/tmp/
# the filename for the transform-service log
service_log_filename=monasca-transform.log
# Whether debug-level log entries should be included in the application
# log. If this setting is false, info-level will be used for logging.
enable_debug_log_entries = true
# The address of the mechanism being used for election coordination
coordinator_address = kazoo://localhost:2181
# The name of the coordination/election group
coordinator_group = monasca-transform
# How long the candidate should sleep between election result
# queries (in seconds)
election_polling_frequency = 15
spark_jars_list = /opt/spark/current/lib/spark-streaming-kafka.jar,/opt/spark/current/lib/scala-library-2.10.1.jar,/opt/spark/current/lib/kafka_2.10-0.8.1.1.jar,/opt/spark/current/lib/metrics-core-2.2.0.jar,/usr/share/java/mysql.jar
spark_driver = /opt/stack/monasca-transform/monasca_transform/driver/mon_metrics_kafka.py
# Whether Spark event logging should be enabled (true/false)
spark_event_logging_enabled = true
# A list of where the Spark master(s) should run
spark_master_list = spark://192.168.10.4:7077,192.168.10.5:7077
# spark_home for the environment
spark_home = /opt/spark/current