Merge "Lower Ceph PGs count in scenario004"

This commit is contained in:
Zuul 2018-07-17 22:58:37 +00:00 committed by Gerrit Code Review
commit 07ab589a9c
2 changed files with 8 additions and 2 deletions

View File

@ -101,7 +101,11 @@ parameter_defaults:
journal_size: 512
journal_collocation: true
osd_scenario: collocated
CephPoolDefaultPgNum: 32
# Without MDS and RGW we create 5 pools, totalling 160 PGs at 32 PGs each
# With MDS and RGW instead we create 9 pools, so we lower the PG size
CephPoolDefaultPgNum: 16
ManilaCephFSDataPoolPGNum: 16
ManilaCephFSMetadataPoolPGNum: 16
CephPoolDefaultSize: 1
CephAnsibleExtraConfig:
centos_package_dependencies: []

View File

@ -20,6 +20,8 @@ parameter_defaults:
# Override defaults to get HEALTH_OK with 1 OSD (for testing only)
CephPoolDefaultSize: 1
CephPoolDefaultPgNum: 32
CephPoolDefaultPgNum: 16
ManilaCephFSDataPoolPGNum: 16
ManilaCephFSMetadataPoolPGNum: 16
NovaReservedHostMemory: 512