summaryrefslogtreecommitdiffstats
path: root/site/intel-pod15/software/charts/ucp/ceph/ceph-client.yaml
diff options
context:
space:
mode:
authorSridhar K. N. Rao <sridhar.rao@spirent.com>2019-12-15 14:23:30 +0530
committerSridhar K. N. Rao <sridhar.rao@spirent.com>2019-12-17 10:16:25 +0530
commit8897346be53170aaac7d84bec4facd845bf09519 (patch)
treee09f5e39e76e9ea319b27591c0a3428dfeb34f71 /site/intel-pod15/software/charts/ucp/ceph/ceph-client.yaml
parent958e6dfa0c31a25425d1b86e7be3c52031ea67d9 (diff)
Addition of Intel Pod15 Site Manifest
This patch adds site manifest for Intel Pod15 Update the vlan-IDs and interface-names The NIC ens785f0 and ens785f1 have swapped roles. Accordingly, the configuration is changed Signed-off-by: Sridhar K. N. Rao <sridhar.rao@spirent.com> Change-Id: I20960e505361bc00d019ea3800814637b9ef4953
Diffstat (limited to 'site/intel-pod15/software/charts/ucp/ceph/ceph-client.yaml')
-rw-r--r--site/intel-pod15/software/charts/ucp/ceph/ceph-client.yaml100
1 files changed, 100 insertions, 0 deletions
diff --git a/site/intel-pod15/software/charts/ucp/ceph/ceph-client.yaml b/site/intel-pod15/software/charts/ucp/ceph/ceph-client.yaml
new file mode 100644
index 0000000..e1e8ecf
--- /dev/null
+++ b/site/intel-pod15/software/charts/ucp/ceph/ceph-client.yaml
@@ -0,0 +1,100 @@
+---
+# The purpose of this file is to define envrionment-specific parameters for the
+# ceph client
+schema: armada/Chart/v1
+metadata:
+ schema: metadata/Document/v1
+ name: ucp-ceph-client
+ layeringDefinition:
+ abstract: false
+ layer: site
+ parentSelector:
+ name: ucp-ceph-client-global
+ actions:
+ - method: merge
+ path: .
+ storagePolicy: cleartext
+data:
+ values:
+ conf:
+ pool:
+ target:
+ # NEWSITE-CHANGEME: The number of OSDs per ceph node. Does not need to
+ # change if your deployment HW matches this site's HW.
+ osd: 1
+ spec:
+ # RBD pool
+ - name: rbd
+ application: rbd
+ replication: 1
+ percent_total_data: 40
+ - name: cephfs_metadata
+ application: cephfs
+ replication: 1
+ percent_total_data: 5
+ - name: cephfs_data
+ application: cephfs
+ replication: 1
+ percent_total_data: 10
+ # RadosGW pools
+ - name: .rgw.root
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.control
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.data.root
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.gc
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.log
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.intent-log
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.meta
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.usage
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.users.keys
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.users.email
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.users.swift
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.users.uid
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.buckets.extra
+ application: rgw
+ replication: 1
+ percent_total_data: 0.1
+ - name: default.rgw.buckets.index
+ application: rgw
+ replication: 1
+ percent_total_data: 3
+ - name: default.rgw.buckets.data
+ application: rgw
+ replication: 1
+ percent_total_data: 34.8
+...