Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...


...
[ceph]
keyring_ext = .keyring
...
git clone -b <branch> https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/
cd openstack-helm/trilio-openstack/utils
./uninstall.sh



triliovault_nfs_map_input.yml' file allows users to distribute/load balance NFS share endpoints across compute nodes in a given cloud.workloadmgr license-create <license_file>helm uninstall <dynamic-backup-target-name> -n trilio-openstackkubectl get pods -n trilio-openstack
kubectl get jobs -n trilio-openstack
kubectl get pv -n trilio-openstack | grep nfs (Only if backup target was NFS)
kubectl get pvc -n trilio-openstack | grep nfs (Only if backup target was NFS)openstack service list | grep -E 'TrilioVaultWLM|dmapi'
openstack endpoint list | grep -E 'TrilioVaultWLM|dmapi'
# Delete TrilioVault services from keystone catalog
openstack service delete TrilioVaultWLM
openstack service delete dmapi
# Verify TrilioVault services and endpoints got cleaned. Following command output should be empty.
openstack service list | grep -E 'TrilioVaultWLM|dmapi'
openstack endpoint list | grep -E 'TrilioVaultWLM|dmapi'
# Login to database and clean TrilioVault's databases and db users.
MYSQL_DBADMIN_PASSWORD=`kubectl -n openstack get secrets/mariadb-dbadmin-password --template={{.data.MYSQL_DBADMIN_PASSWORD}} | base64 -d`
kubectl -n openstack exec -it mariadb-server-0 -- bash
mysql -u root -p${MYSQL_DBADMIN_PASSWORD} -e "REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'dmapi'@'%%';"
mysql -u root -p${MYSQL_DBADMIN_PASSWORD} -e "DROP USER 'dmapi'@'%%';"
mysql -u root -p${MYSQL_DBADMIN_PASSWORD} -e "DROP DATABASE dmapi;"
mysql -u root -p${MYSQL_DBADMIN_PASSWORD} -e "REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'workloadmgr'@'%%';"
mysql -u root -p${MYSQL_DBADMIN_PASSWORD} -e "DROP USER 'workloadmgr'@'%%';"
mysql -u root -p${MYSQL_DBADMIN_PASSWORD} -e "DROP DATABASE workloadmgr;"umount /var/lib/trilio/triliovault-mounts/<base64>sudo systemctl stop tripleo_triliovault_wlm_api.service
sudo systemctl disable tripleo_triliovault_wlm_api.service
sudo systemctl stop tripleo_triliovault_wlm_scheduler.service
sudo systemctl disable tripleo_triliovault_wlm_scheduler.service
sudo systemctl stop tripleo_triliovault_wlm_workloads.service
sudo systemctl disable tripleo_triliovault_wlm_workloads.servicesudo systemctl stop tripleo_triliovault_datamover.service
sudo systemctl disable tripleo_triliovault_datamover.service
sudo umount /var/lib/nova/triliovault-mountscd /home/stack
mv triliovault-cfg-scripts/ triliovault-cfg-scripts-5.xgit clone -b {{ trilio_branch }} https://github.com/trilioData/triliovault-cfg-scripts.gitcp /home/stack/triliovault-cfg-scripts-5.x/redhat-director-scripts/rhosp17/environments/passwords.yaml /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/environments/trilio_passwords.yaml
cp /home/stack/triliovault-cfg-scripts-5.x/redhat-director-scripts/rhosp17/puppet/trilio/files/triliovault_wlm_ids.conf /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/puppet/trilio/files/sudo pcs resource restart triliovault-wlm-cron-podman-0sudo podman exec -it triliovault_wlm_api bash
source /etc/triliovault-wlm/cloud_admin_rc
workloadmgr backup-target-type-listsudo podman exec -it triliovault_wlm_api bash
source /etc/triliovault-wlm/cloud_admin_rcworkloadmgr workload-importworkloads --inplace-upgradeworkloadmgr workload-get-importworkloads-list --source-bt {{ backup_target_id }}workloadmgr workload-importworkloads --source-btt {{ backup_target_type_id }}workloadmgr trust-listworkloadmgr trust-show <trust_id>workloadmgr trust-create [--is_cloud_trust {True,False}] <role_name>workloadmgr trust-delete <trust_id><channel type='unix'>
<target type='virtio' name='org.qemu.guest_agent.0'/>
<address type='virtio-serial' controller='0' bus='0' port='1'/>
</channel>[trilio]
name=Trilio Repository
baseurl=https://yum.fury.io/trilio-6-0/
enabled=1
gpgcheck=0[trilio]
name=Trilio Repository
baseurl=https://yum.fury.io/trilio-6-0/
enabled=1
gpgcheck=0deb [trusted=yes] https://apt.fury.io/trilio-6-0/ /./backing_file_update.sh /var/triliovault-mounts/<base64>/workload_<workload_id>/tmp/backing_file_update.logtriliovault-cfg-scripts/common/triliovault_nfs_map_input.yml192.168.1.33:/var/share1
192.168.1.34:/var/share1
192.168.1.35:/var/share1prod-compute-1.trilio.demo
prod-compute-2.trilio.demo
prod-compute-3.trilio.demo
.
.
.
prod-compute-30.trilio.democompute_bare.trilio.demo
compute_virtualmulti_ip_nfs_shares:
- "192.168.1.34:/var/share1": ['prod-compute-[1:10].trilio.demo', 'compute_bare.trilio.demo']
"192.168.1.35:/var/share1": ['prod-compute-[11:20].trilio.demo', 'compute_virtual']
"192.168.1.33:/var/share1": ['prod-compute-[21:30].trilio.demo']
single_ip_nfs_shares: []multi_ip_nfs_shares:
- "192.168.1.34:/var/share1": ['172.30.3.[11:20]', '172.30.4.40']
"192.168.1.35:/var/share1": ['172.30.3.[21:30]', '172.30.4.50']
"192.168.1.33:/var/share1": ['172.30.3.[31:40]']
single_ip_nfs_shares: [](undercloud) [stack@ucqa161 ~]$ openstack server list
+--------------------------------------+-------------------------------+--------+----------------------+----------------+---------+
| ID | Name | Status | Networks | Image | Flavor |
+--------------------------------------+-------------------------------+--------+----------------------+----------------+---------+
| 8c3d04ae-fcdd-431c-afa6-9a50f3cb2c0d | overcloudtrain1-controller-2 | ACTIVE | ctlplane=172.30.5.18 | overcloud-full | control |
| 103dfd3e-d073-4123-9223-b8cf8c7398fe | overcloudtrain1-controller-0 | ACTIVE | ctlplane=172.30.5.11 | overcloud-full | control |
| a3541849-2e9b-4aa0-9fa9-91e7d24f0149 | overcloudtrain1-controller-1 | ACTIVE | ctlplane=172.30.5.25 | overcloud-full | control |
| 74a9f530-0c7b-49c4-9a1f-87e7eeda91c0 | overcloudtrain1-novacompute-0 | ACTIVE | ctlplane=172.30.5.30 | overcloud-full | compute |
| c1664ac3-7d9c-4a36-b375-0e4ee19e93e4 | overcloudtrain1-novacompute-1 | ACTIVE | ctlplane=172.30.5.15 | overcloud-full | compute |
+--------------------------------------+-------------------------------+--------+----------------------+----------------+---------+<backup-target-type>.yaml file (nfs.yaml, other_s3.yaml or amazon_s3.yaml).cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
./uninstall_tvo_control_plane.sh




workloadmgr workload-reassign-workloads \
--old_tenant_ids 71637e3b98434ceeb5158087074af4aa \
--new_tenant_id 8e325e1056c94625854b3dffb4d1e64e \
--user_id 3a446f94fb57448a8c99c08238df0f13 \
--migrate_cloud True \
--source-btt 979a459d-26f9-4aa9-8f0d-21ca7a1f6b4fworkloadmgr workload-reassign-workloads \
--old_tenant_ids 71637e3b98434ceeb5158087074af4aa \
--new_tenant_id 8e325e1056c94625854b3dffb4d1e64e \
--user_id 3a446f94fb57448a8c99c08238df0f13 \
--migrate_cloud True \
--source-btt 979a459d-26f9-4aa9-8f0d-21ca7a1f6b4fworkloadmgr workload-reassign-workloads \
--old_tenant_ids 71637e3b98434ceeb5158087074af4aa \
--new_tenant_id 8e325e1056c94625854b3dffb4d1e64e \
--user_id 3a446f94fb57448a8c99c08238df0f13 \
--migrate_cloud True \
--source-btt 979a459d-26f9-4aa9-8f0d-21ca7a1f6b4fworkloadmgr workload-reassign-workloads \
--old_tenant_ids 71637e3b98434ceeb5158087074af4aa \
--new_tenant_id 8e325e1056c94625854b3dffb4d1e64e \
--user_id 3a446f94fb57448a8c99c08238df0f13 \
--migrate_cloud True \
--source-btt 979a459d-26f9-4aa9-8f0d-21ca7a1f6b4fworkloadmgr workload-reassign-workloads \
--old_tenant_ids 71637e3b98434ceeb5158087074af4aa \
--new_tenant_id 8e325e1056c94625854b3dffb4d1e64e \
--user_id 3a446f94fb57448a8c99c08238df0f13 \
--migrate_cloud True \
--source-btt 979a459d-26f9-4aa9-8f0d-21ca7a1f6b4fworkloadmgr workload-reassign-workloads \
--old_tenant_ids 71637e3b98434ceeb5158087074af4aa \
--new_tenant_id 8e325e1056c94625854b3dffb4d1e64e \
--user_id 3a446f94fb57448a8c99c08238df0f13 \
--migrate_cloud True \
--source-btt 979a459d-26f9-4aa9-8f0d-21ca7a1f6b4fworkloadmgr workload-reassign-workloads \
--old_tenant_ids 71637e3b98434ceeb5158087074af4aa \
--new_tenant_id 8e325e1056c94625854b3dffb4d1e64e \
--user_id 3a446f94fb57448a8c99c08238df0f13 \
--migrate_cloud True \
--source-btt 979a459d-26f9-4aa9-8f0d-21ca7a1f6b4f[root@overcloudtrain5-controller-0 /]# podman ps | grep trilio-
76511a257278 undercloudqa.ctlplane.trilio.local:8787/trilio/trilio-horizon-plugin:<CONTAINER-TAG-VERSION>-rhosp17.1 kolla_start 12 days ago Up 12 days ago horizon
5c5acec33392 cluster.common.tag/trilio-wlm:pcmklatest /bin/bash /usr/lo... 7 days ago Up 7 days ago triliovault-wlm-cron-podman-0
8dc61a674a7f undercloudqa.ctlplane.trilio.local:8787/trilio/trilio-datamover-api:<CONTAINER-TAG-VERSION>-rhosp17.1 kolla_start 7 days ago Up 7 days ago triliovault_datamover_api
a945fbf80554 undercloudqa.ctlplane.trilio.local:8787/trilio/trilio-wlm:<CONTAINER-TAG-VERSION>-rhosp17.1 kolla_start 7 days ago Up 7 days ago triliovault_wlm_scheduler
402c9fdb3647 undercloudqa.ctlplane.trilio.local:8787/trilio/trilio-wlm:<CONTAINER-TAG-VERSION>-rhosp17.1 kolla_start 7 days ago Up 6 days ago triliovault_wlm_workloads
f9452e4b3d14 undercloudqa.ctlplane.trilio.local:8787/trilio/trilio-wlm:<CONTAINER-TAG-VERSION>-rhosp17.1 kolla_start 7 days ago Up 6 days ago triliovault_wlm_api[root@overcloudtrain5-controller-0 /]# pcs status
Cluster name: tripleo_cluster
Cluster Summary:
* Stack: corosync
* Current DC: overcloudtrain5-controller-0 (version 2.0.5-9.el8_4.3-ba59be7122) - partition with quorum
* Last updated: Mon Jul 24 11:19:05 2023
* Last change: Mon Jul 17 10:38:45 2023 by root via cibadmin on overcloudtrain5-controller-0
* 4 nodes configured
* 14 resource instances configured
Node List:
* Online: [ overcloudtrain5-controller-0 ]
* GuestOnline: [ galera-bundle-0@overcloudtrain5-controller-0 rabbitmq-bundle-0@overcloudtrain5-controller-0 redis-bundle-0@overcloudtrain5-controller-0 ]
Full List of Resources:
* ip-172.30.6.27 (ocf::heartbeat:IPaddr2): Started overcloudtrain5-controller-0
* ip-172.30.6.16 (ocf::heartbeat:IPaddr2): Started overcloudtrain5-controller-0
* Container bundle: haproxy-bundle [cluster.common.tag/openstack-haproxy:pcmklatest]:
* haproxy-bundle-podman-0 (ocf::heartbeat:podman): Started overcloudtrain5-controller-0
* Container bundle: galera-bundle [cluster.common.tag/openstack-mariadb:pcmklatest]:
* galera-bundle-0 (ocf::heartbeat:galera): Master overcloudtrain5-controller-0
* Container bundle: rabbitmq-bundle [cluster.common.tag/openstack-rabbitmq:pcmklatest]:
* rabbitmq-bundle-0 (ocf::heartbeat:rabbitmq-cluster): Started overcloudtrain5-controller-0
* Container bundle: redis-bundle [cluster.common.tag/openstack-redis:pcmklatest]:
* redis-bundle-0 (ocf::heartbeat:redis): Master overcloudtrain5-controller-0
* Container bundle: openstack-cinder-volume [cluster.common.tag/openstack-cinder-volume:pcmklatest]:
* openstack-cinder-volume-podman-0 (ocf::heartbeat:podman): Started overcloudtrain5-controller-0
* Container bundle: triliovault-wlm-cron [cluster.common.tag/trilio-wlm:pcmklatest]:
* triliovault-wlm-cron-podman-0 (ocf::heartbeat:podman): Started overcloudtrain5-controller-0
Daemon Status:
corosync: active/enabled
pacemaker: active/enabled
pcsd: active/enabled/var/lib/config-data/puppet-generated/haproxy/etc/haproxy/haproxy.cfg[root@overcloudtrain5-novacompute-0 heat-admin]# podman ps | grep -i datamover
c750a8d0471f undercloudqa.ctlplane.trilio.local:8787/trilio/trilio-datamover:<CONTAINER-TAG-VERSION>-rhosp17.1 kolla_start 7 days ago Up 7 days ago triliovault_datamover[root@overcloudtrain5-novacompute-0 heat-admin]# df -h | grep triliovault-mounts
172.30.1.9:/mnt/rhosptargetnfs 7.0T 5.1T 2.0T 72% /var/lib/nova/triliovault-mounts/L21udC9yaG9zcHRhcmdldG5mcw==[root@overcloudtrain5-controller-0 heat-admin]# podman ps | grep horizon
76511a257278 undercloudqa.ctlplane.trilio.local:8787/trilio/trilio-horizon-plugin:<CONTAINER-TAG-VERSION>-rhosp17.1 kolla_start 12 days ago Up 12 days ago horizoncd triliovault-cfg-scripts/openstack-helm/trilio-backup-targets/values_overridestrilio_backup_target:
backup_target_name: 'NFS_BackupTarget'
backup_target_type: 'nfs'
is_default: true
nfs_server: '10.10.0.1'
nfs_shares: /home/openstack-helm
nfs_options: "nolock,soft,timeo=600,intr,lookupcache=none,nfsvers=3,retrans=10"
storage_size: 20Gi
storage_class_name: nfs
images:
trilio_backup_targets: docker.io/trilio/trilio-wlm-helm:<image-tag>trilio_backup_target:
backup_target_name: 'S3_BackupTarget'
backup_target_type: 's3'
is_default: true
# S3 Configuration
s3_type: 'amazon_s3' # if not Amazon S3, use 'other_s3'
s3_access_key: 'ACCESSKEY1'
s3_secret_key: 'SECRETKEY1'
s3_region_name: 'REGION1'
s3_bucket: 'BUCKET1'
s3_endpoint_url: '' # required only for 'other_s3'
s3_signature_version: 'default'
s3_auth_version: 'DEFAULT'
s3_ssl_enabled: true
s3_ssl_verify: true
s3_ssl_ca_cert: '' # add CA cert for 'other_s3'
images:
trilio_backup_targets: docker.io/trilio/trilio-wlm-helm:<image-tag>cd ../../
helm upgrade --install <release-name> ./trilio-backup-targets \
-n trilio-openstack \
-f ./trilio-backup-targets/values_overrides/nfs.yaml \
--wait \
--timeout 5mkubectl get pods -n trilio-openstack -l component=nfs-mountkubectl exec -n trilio-openstack -it <nfs-mount-pod-name> -- mount | grep trilio
Mount path like the one below should be visible
/var/lib/trilio/triliovault-mounts/<base64-nfs-path>kubectl get pv | grep trilio-openstack
kubectl get pvc -n trilio-openstack | grep triliocd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
./uninstall_operator.sh <TVO_OPERATOR_CONTAINER_IMAGE_URL>oc -n trilio-openstack get pvc
oc -n trilio-openstack delete pvc <pvc0> <pvc1> <pvc2>oc get nodes -l trilio-control-plane=enabled
Example:-
[openstackdev@localhost dataplane-scripts]$ oc get nodes -l trilio-control-plane=enabled
NAME STATUS ROLES AGE VERSION
master1 Ready control-plane,master,worker 90d v1.31.6
master2 Ready control-plane,master,worker 90d v1.31.6
master3 Ready control-plane,master,worker 90d v1.31.6
oc debug node/master1
chroot /host
## List NFS backup target mounts
mount | grep nfs
## Unmount all shares listed in above command output
umount -l <NFS_Share>
## Verify that trilio nfs backup target is listed in command output
mount | grep nfs
## Unmount S3 backup target mounts
ls -ll /var/lib/trilio/triliovault-mounts/
## If there is any S3 backup target mounted inside above director, abobve command will
## fail and will provide mount point information.
## You need to copy those mount points and do unmount
umount /var/lib/trilio/triliovault-mounts/<mount_point>
## After unmounting all backup targets, you will be able to run following command successfully
ls -ll /var/lib/trilio/triliovault-mounts/
## Remove triliovault-mounts directory
rm -rf /var/lib/trilio/triliovault-mounts/cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/dataplane-scripts
./uninstall.shssh root@<compute_ip>
Example:-
systemctl list-units | grep trilio
edpm_triliovault-datamover.service loaded active running triliovault-datamover container
edpm_triliovault-object-store-BT1_S3.service loaded active running triliovault-object-store-BT1_S3 container
systemctl stop edpm_triliovault-datamover.service
systemctl disable edpm_triliovault-datamover.service
Removed "/etc/systemd/system/multi-user.target.wants/edpm_triliovault-datamover.service".
openstack service delete dmapi
openstack user delete dmapi
openstack service delete TrilioVaultWLM
openstack user delete triliovaultoc exec -it openstack-galera-0 -n openstack bash
mysql -u root -p<password>## Clean database
DROP DATABASE dmapi;
## Clean dmapi user
=> List 'dmapi' user accounts
MariaDB [(none)]> select user, host from mysql.user where user='dmapi';
+-------+------+
| User | Host |
+-------+------+
| dmapi | % |
+-------+------+
1 row in set (0.001 sec)
=> Delete those user accounts
MariaDB [(none)]> DROP USER dmapi@'%';
Query OK, 0 rows affected (0.011 sec)
=> Verify that dmapi user got cleaned
MariaDB [(none)]> select user, host from mysql.user where user='dmapi';
Empty set (0.001 sec)
## Clean database
DROP DATABASE workloadmgr;
## Clean workloadmgr user
=> List 'workloadmgr' user accounts
MariaDB [(none)]> select user, host from mysql.user where user='workloadmgr';
+-------------+------+
| User | Host |
+-------------+------+
| workloadmgr | % |
+-------------+------+
1 row in set (0.001 sec)
=> Delete those user accounts
MariaDB [(none)]> DROP USER workloadmgr@'%';
Query OK, 0 rows affected (0.008 sec)
=> Verify that workloadmgr user got cleaned
MariaDB [(none)]> select user, host from mysql.user where user='workloadmgr';
Empty set (0.001 sec)
workloadmgr disable-scheduler --workloadids <workloadid>workloadmgr enable-scheduler --workloadids <workloadid>workloadmgr scheduler-trust-validate <workload_id>qemu-img info 85b645c5-c1ea-4628-b5d8-1faea0e9d549
image: 85b645c5-c1ea-4628-b5d8-1faea0e9d549
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 21M
cluster_size: 65536
backing file: /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW0=/workload_3c2fbee5-ad90-4448-b009-5047bcffc2ea/snapshot_f4874ed7-fe85-4d7d-b22b-082a2e068010/vm_id_9894f013-77dd-4514-8e65-818f4ae91d1f/vm_res_id_9ae3a6e7-dffe-4424-badc-bc4de1a18b40_vda/a6289269-3e72-4085-adca-e228ba656984
Format specific information:
compat: 1.1
lazy refcounts: false
refcount bits: 16
corrupt: false# echo -n 10.10.2.20:/upstream | base64
MTAuMTAuMi4yMDovdXBzdHJlYW0=#mount --bind <mount-path1> <mount-path2>#vi /etc/fstab
<mount-path1> <mount-path2> none bind 0 0curl -i -X PUT \
-H "X-Auth-Token:gAAAAABh0ttjiKRPpVNPBjRjZywzsgVton2HbMHUFrbTXDhVL1w2zCHF61erouo4ZUjGyHVoIQMG-NyGLdR7nexmgOmG7ed66LJ3IMVul1LC6CPzqmIaEIM48H0kc-BGvhV0pvX8VMZiozgFdiFnqYHPDvnLRdh7cK6_X5dw4FHx_XPmkhx7PsQ" \
-H "Content-Type:application/json" \
-d \
'{
"metadata": {
"workload_id": "c13243a3-74c8-4f23-b3ac-771460d76130",
"workload_name": "workload-c13243a3-74c8-4f23-b3ac-771460d76130"
}
}' \
'https://kolla-victoria-ubuntu20-1.triliodata.demo:9311/v1/secrets/f3b2fce0-3c7b-4728-b178-7eb8b8ebc966/metadata'
curl -i -X GET \
-H "X-Auth-Token:gAAAAABh0ttjiKRPpVNPBjRjZywzsgVton2HbMHUFrbTXDhVL1w2zCHF61erouo4ZUjGyHVoIQMG-NyGLdR7nexmgOmG7ed66LJ3IMVul1LC6CPzqmIaEIM48H0kc-BGvhV0pvX8VMZiozgFdiFnqYHPDvnLRdh7cK6_X5dw4FHx_XPmkhx7PsQ" \
'https://kolla-victoria-ubuntu20-1.triliodata.demo:9311/v1/secrets/f3b2fce0-3c7b-4728-b178-7eb8b8ebc966/metadata'workloadmgr filepath-search [--snapshotids <snapshotid>]
[--end_filter <end_filter>]
[--start_filter <start_filter>]
[--date_from <date_from>]
[--date_to <date_to>]
<vm_id> <file_path>git clone -b 6.0.0 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/https://pypi.fury.io/trilio-6-0/trilio_branch : 6.0.0workloadmgr project-quota-type-listworkloadmgr project-quota-type-show <quota_type_id>workloadmgr project-allowed-quota-create --quota-type-id quota_type_id
--allowed-value allowed_value
--high-watermark high_watermark
--project-id project_idworkloadmgr project-allowed-quota-list <project_id>workloadmgr project-allowed-quota-show <allowed_quota_id>workloadmgr project-allowed-quota-update [--allowed-value <allowed_value>]
[--high-watermark <high_watermark>]
[--project-id <project_id>]
<allowed_quota_id>workloadmgr project-allowed-quota-delete <allowed_quota_id>trilio_branch : 6.1.1cd /PATH/TO/triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts
--workload_ids <workload_id> ➡️ Specify workload_ids which need to reassign to new tenant. If not provided then all the workloads from old tenant will get reassigned to new tenant. Specifiy multiple times for multiple workloads.deb [trusted=yes] https://apt.fury.io/trilio-6-0/ /https://yum.fury.io/trilio-6-0/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-0/
enabled=1
gpgcheck=0juju export-bundle --filename openstack_base_file.yamlgit clone https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts
git checkout {{ trilio_branch }}
cd juju-charms/sample_overlay_bundlesjuju deploy --dry-run ./openstack_base_file.yaml --overlay <Trilio bundle path>juju deploy ./openstack_base_file.yaml --overlay <Trilio bundle path>juju run --wait trilio-wlm/leader create-cloud-admin-trust password=<openstack admin password>
juju attach-resource trilio-wlm license=<Path to trilio license file>
juju run --wait trilio-wlm/leader create-licensejuju run --wait trilio-wlm/leader create-cloud-admin-trust password=<openstack admin password>
juju attach-resource trilio-wlm license=<Path to trilio license file>
juju run-action --wait trilio-wlm/leader create-licensejuju status | grep -i trilio
trilio-data-mover 5.2.8.14 active 3 trilio-charmers-trilio-data-mover latest/candidate 22 no Unit is ready
trilio-data-mover-mysql-router 8.0.39 active 3 mysql-router 8.0/stable 200 no Unit is ready
trilio-dm-api 5.2.8 active 1 trilio-charmers-trilio-dm-api latest/candidate 17 no Unit is ready
trilio-dm-api-mysql-router 8.0.39 active 1 mysql-router 8.0/stable 200 no Unit is ready
trilio-horizon-plugin 5.2.8.8 active 1 trilio-charmers-trilio-horizon-plugin latest/candidate 10 no Unit is ready
trilio-wlm 5.2.8.15 active 1 trilio-charmers-trilio-wlm latest/candidate 18 no Unit is ready
trilio-wlm-mysql-router 8.0.39 active 1 mysql-router 8.0/stable 200 no Unit is ready
trilio-data-mover-mysql-router/2 active idle 172.20.1.5 Unit is ready
trilio-data-mover/1 active idle 172.20.1.5 Unit is ready
trilio-data-mover-mysql-router/0* active idle 172.20.1.7 Unit is ready
trilio-data-mover/2 active idle 172.20.1.7 Unit is ready
trilio-data-mover-mysql-router/1 active idle 172.20.1.8 Unit is ready
trilio-data-mover/0* active idle 172.20.1.8 Unit is ready
trilio-horizon-plugin/0* active idle 172.20.1.27 Unit is ready
trilio-dm-api/0* active idle 1/lxd/2 172.20.1.29 8784/tcp Unit is ready
trilio-dm-api-mysql-router/0* active idle 172.20.1.29 Unit is ready
trilio-wlm/0* active idle 1 172.20.1.4 8780/tcp Unit is ready
trilio-wlm-mysql-router/0* active idle 172.20.1.4 Unit is readyvi tvo-backup-target-cr-nfs.yaml
oc -n trilio-openstack apply -f tvo-backup-target-cr-nfs.yamlcd /PATH/TO/triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts
cp trilio-s3-backup-target-secret.yaml trilio-s3-backup-target-secret-<BACKUP_TARGET_NAME>.yaml
Example:-
cp trilio-s3-backup-target-secret.yaml trilio-s3-backup-target-secret-s3-bt8.yamlecho -n "s3_key_string" | base64vi trilio-s3-backup-target-secret-<BACKUP_TARGET_NAME>.yaml<BACKUP_TARGET_NAME>_s3_access_key: <base64_encoded_access_key>
<BACKUP_TARGET_NAME>_s3_secret_key: <base64_encoded_secret_key>
Example:-
s3_bt8_s3_access_key: ABDCD1234.......
s3_bt8_s3_secret_key: ABCD1234........oc -n trilio-openstack apply -f trilio-s3-backup-target-secret-<BACKUP_TARGET_NAME>.yaml
Example:-
oc -n trilio-openstack apply -f trilio-s3-backup-target-secret-s3-bt8.yamlvi tvo-backup-target-cr-amazon-s3.yaml
oc -n trilio-openstack apply -f tvo-backup-target-cr-amazon-s3.yamlvi tvo-backup-target-cr-other-s3.yaml
oc -n trilio-openstack apply -f tvo-backup-target-cr-other-s3.yamloc get pods -n trilio-openstack
[root@localhost ctlplane-scripts]# oc get pods -n trilio-openstack
NAME READY STATUS RESTARTS AGE
job-triliovault-datamover-api-db-init-5fcvs 0/1 Completed 0 44h
job-triliovault-datamover-api-keystone-init-hf89t 0/1 Completed 0 44h
job-triliovault-datamover-api-rabbitmq-init-zd9lf 0/1 Completed 0 44h
job-triliovault-wlm-cloud-trust-mzj8b 0/1 Completed 0 44h
job-triliovault-wlm-db-init-nzqxd 0/1 Completed 0 44h
job-triliovault-wlm-keystone-init-s82ph 0/1 Completed 0 44h
job-triliovault-wlm-rabbitmq-init-xvhcf 0/1 Completed 0 44h
triliovault-datamover-api-789c55cb7c-4xkln 1/1 Running 0 44h
triliovault-datamover-api-789c55cb7c-799lr 1/1 Running 0 44h
triliovault-datamover-api-789c55cb7c-nv8wl 1/1 Running 0 44h
triliovault-object-store-bt1-s3-45sz5 1/1 Running 0 44h
triliovault-object-store-bt1-s3-fhmgz 1/1 Running 0 44h
triliovault-object-store-bt1-s3-xn78b 1/1 Running 0 44h
triliovault-object-store-s3-bt8-flhws 1/1 Running 0 55s
triliovault-object-store-s3-bt8-l2tbl 1/1 Running 0 55s
triliovault-object-store-s3-bt8-x758v 1/1 Running 0 55s
triliovault-wlm-api-5f9fccf467-djmn6 1/1 Running 0 44h
triliovault-wlm-api-5f9fccf467-mvxt4 1/1 Running 0 44h
triliovault-wlm-api-5f9fccf467-tmjr8 1/1 Running 0 44h
triliovault-wlm-cron-58b75c464-75wnh 1/1 Running 0 44h
triliovault-wlm-scheduler-74c9f6f86-5pt66 1/1 Running 0 44h
triliovault-wlm-scheduler-74c9f6f86-klpw2 1/1 Running 0 44h
triliovault-wlm-scheduler-74c9f6f86-p5frf 1/1 Running 0 44h
triliovault-wlm-workloads-cb5f48549-4nzr2 1/1 Running 0 44h
triliovault-wlm-workloads-cb5f48549-qp2b9 1/1 Running 0 44h
triliovault-wlm-workloads-cb5f48549-xgl6j 1/1 Running 0 44h
[root@localhost ctlplane-scripts]#
oc get daemonsets -n trilio-openstackcd /PATH/TO/triliovault-cfg-scripts/redhat-director-scripts/rhosp18/dataplane-scripts/./create-templates.sh <BACKUP_TARGET_NAME> <BACKUP_TARGET_TYPE>
./create-templates.sh s3-bt8 s3cd <BACKUP_TARGET_NAME>/
ls -llcd <BACKUP_TARGET_NAME>/vi cm-trilio-backup-target.yamloc -n openstack apply -f cm-trilio-backup-target.yamloc -n openstack apply -f ../../ctlplane-scripts/trilio-s3-backup-target-secret-<BACKUP_TARGET_NAME>.yaml
Example:-
oc -n openstack apply -f ../../ctlplane-scripts/trilio-s3-backup-target-secret-s3-bt8.yamlvi trilio-add-backup-target-service.yamloc -n openstack apply -f trilio-add-backup-target-service.yaml
sleep 5soc -n openstack get OpenStackDataPlaneNodeSetvi trilio-add-backup-target-deployment.yamloc -n openstack apply -f trilio-add-backup-target-deployment.yaml# Get deployment pod name
oc -n openstack get pods -l openstackdataplanedeployment=<DEPLOYMENT_NAME>
# Example:
oc -n openstack get pods -l openstackdataplanedeployment=edpm-trilio-add-backup-target-s3-bt8
## Check logs
oc -n openstack logs <POD_NAME>
# Example:
oc -n openstack logs trilio-add-backup-target-s3-bt8-edpm-trilio-add-backup-tarkx5xxoc -n openstack get openstackdataplanedeployment | grep trilio
Example:-
[openstackdev@localhost ~]$ oc -n openstack get openstackdataplanedeployment | grep trilio
edpm-trilio-add-backup-target-s3-bt8 ["openstack-data-plane"] True Setup complete
edpm-trilio-add-backup-target-s3-bt5 ["openstack-data-plane"] True Setup complete
edpm-trilio-add-backup-target-s3-bt6 ["openstack-data-plane"] True Setup complete
edpm-trilio-data-plane-deployment-1 ["openstack-data-plane"] True Setup completeoc get pods -n trilio-openstack | grep wlm-api
oc exec -n trilio-openstack -it <trilio wlm api pod name> bash
source <admin rc file>workloadmgr backup-target-create --type nfs --filesystem-export <filesystem_export> --btt-name <btt name>workloadmgr backup-target-create --type nfs --filesystem-export 192.168.0.53:/home/rhosp2 --btt-name bt3-nfsworkloadmgr backup-target-create --type s3 --s3-endpoint-url <s3_endpoint_url> --s3-bucket <s3_bucket> --btt-name <btt name> --immutable --metadata object_lock=1 bucket=s3-object-lockworkloadmgr backup-target-create --type s3 --s3-endpoint-url https://s3.wasabisys.com --s3-bucket object-locked-s3-2 --btt-name s3-bt8 --immutable --metadata object_lock=1 bucket=s3-object-lockworkloadmgr backup-target-create --type s3 --s3-endpoint-url <s3_endpoint_url> --s3-bucket <s3_bucket> --btt-name <btt name>workloadmgr backup-target-create --type s3 --s3-endpoint-url https://s3.wasabisys.com --s3-bucket qa-sachin --btt-name s3-bt8workloadmgr workload-get-importworkloads-list [--project_id <project_id>]workloadmgr workload-importworkloads [--workloadids <workloadid>]workloadmgr workload-get-orphaned-workloads-list [--migrate_cloud {True,False}]
[--generate_yaml {True,False}]workloadmgr workload-reassign-workloads
[--old_tenant_ids <old_tenant_id>]
[--new_tenant_id <new_tenant_id>]
[--workload_ids <workload_id>]
[--user_id <user_id>]
[--migrate_cloud {True,False}]
[--map_file <map_file>]
[--source-btt <source-btt> [<source-btt> ...]]
[--source-btt-all]reassign_mappings:
- old_tenant_ids: [] #user can provide list of old_tenant_ids or workload_ids
new_tenant_id: new_tenant_id
user_id: user_id
source_btt: source_btt # list of source_btt ID's where provided workload IDs will be searched
source_btt_all: True # searches all workloads in all available BTTs
workload_ids: [] #user can provide list of old_tenant_ids or workload_ids
migrate_cloud: True/False #Set to True if want to reassign workloads from
# other clouds as well. Default is False
- old_tenant_ids: [] #user can provide list of old_tenant_ids or workload_ids
new_tenant_id: new_tenant_id
user_id: user_id
source_btt: source_btt # list of source_btt ID's where provided workload IDs will be searched
source_btt_all: True # searches all workloads in all available BTTs
workload_ids: [] #user can provide list of old_tenant_ids or workload_ids
migrate_cloud: True/False #Set to True if want to reassign workloads from
# other clouds as well. Default is False a. Using less command
i. less <file_system_mountpath>/workload_<workload_uuid>/workload_db
ii. Example
1. less /var/trilio/triliovault-mounts/L2hvbWUva29sbGEv/workload_385d0e94-d602-4963-96c2-28bebea352f1/workload_db
iii. search backup_target_types and backup_media_target in the file and note down it’s respective values
b. Using jq command
i. jq '.metadata[] | select(.key == "backup_media_target") | .value' <file_system_mountpath>/workload_<workload_uuid>/workload_db
ii. Example
jq '.metadata[] | select(.key == "backup_target_types") | .value' /var/trilio/triliovault-mounts/L2hvbWUva29sbGEv/workload_385d0e94-d602-4963-96c2-28bebea352f1_backup/workload_db
iii. search backup_target_types and backup_media_target and note down it’s respective values a. grep -rl 'old_BTT' <file_system_mountpath>/workload_<workload_uuid> | xargs sed -i 's/<old_BTT>/<new_BTT>/g'
b. Here,
i. old_BTT ---> Old Backup Target Type name
ii. new_BTT ---> New Backup Target Type name
c. Example
i. grep -rl 'nfs_1' /var/trilio/triliovault-mounts/L2hvbWUva29sbGEv/workload_385d0e94-d602-4963-96c2-28bebea352f1 | xargs sed -i 's/nfs_1/nfs_2/g' a. grep -rl '<old_filesystem_export>' <file_system_mountpath>/workload_<workload_uuid> | xargs sed -i 's/<old_filesystem_export>/<new_filesystem_export>/g'
b. Here,
i. old_filesystem_export ---> File system export path of old Backup Target
ii. new_filesystem_export ---> File system export path of new Backup Target
c. Note: Please make sure to use {{}} before every {{/ }} character in old_filesystem_export & new_filesystem_export places.
d. Example
i. grep -rl '192.168.0.51:/home/kolla/' /var/trilio/triliovault-mounts/L2hvbWUva29sbGEv/workload_385d0e94-d602-4963-96c2-28bebea352f1 | xargs sed -i 's/192.168.0.51:\/home\/kolla\//192.168.0.52:\/home\/kolla_new\//g'
ii. Here the original filesystem export is 192.168.0.51:/home/kolla/ which we have mentioned in above command as 192.168.0.51:\/home\/kolla\/ by adding additional }} before every {{/ character. The same changes are expected for new filesystem export path as well.
a. The following commands must show the files having the updated changes.
i. grep -rl 'new_BTT' <file_system_mountpath>/workload_<workload_uuid>
ii. grep -rl '<new_filesystem_export>' <file_system_mountpath>/workload_<workload_uuid>registry.connect.redhat.com/trilio/trilio-datamover:6.0.0-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.0.0-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.0.0-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.0.0-rhosp17.1OS::TripleO::Services::TrilioDatamoverApi
OS::TripleO::Services::TrilioWlmApi
OS::TripleO::Services::TrilioWlmWorkloads
OS::TripleO::Services::TrilioWlmScheduler
OS::TripleO::Services::TrilioWlmCronpodman rm -f triliovault_datamover_api
podman rm -f triliovault_datamover_api_db_sync
podman rm -f triliovault_datamover_api_init_logrm -rf /var/lib/config-data/puppet-generated/triliovaultdmapi
rm /var/lib/config-data/puppet-generated/triliovaultdmapi.md5sum
rm -rf /var/lib/config-data/triliovaultdmapi*
rm -f /var/lib/config-data/triliovault_datamover_api*rm -rf /var/log/containers/triliovault-datamover-api/podman rm -f triliovault_wlm_api
podman rm -f triliovault_wlm_api_cloud_trust_init
podman rm -f triliovault_wlm_api_db_sync
podman rm -f triliovault_wlm_api_config_dynamic
podman rm -f triliovault_wlm_api_init_logrm -rf /var/lib/config-data/puppet-generated/triliovaultwlmapi
rm /var/lib/config-data/puppet-generated/triliovaultwlmapi.md5sum
rm -rf /var/lib/config-data/triliovaultwlmapi*
rm -f /var/lib/config-data/triliovault_wlm_api*rm -rf /var/log/containers/triliovault-wlm-api/podman rm -f triliovault_wlm_workloads
podman rm -f triliovault_wlm_workloads_config_dynamic
podman rm -f triliovault_wlm_workloads_init_logrm -rf /var/lib/config-data/puppet-generated/triliovaultwlmworkloads
rm /var/lib/config-data/puppet-generated/triliovaultwlmworkloads.md5sum
rm -rf /var/lib/config-data/triliovaultwlmworkloads*rm -rf /var/log/containers/triliovault-wlm-api/podman rm -f triliovault_wlm_scheduler
podman rm -f triliovault_wlm_scheduler_config_dynamic
podman rm -f triliovault_wlm_scheduler_init_logrm -rf /var/lib/config-data/puppet-generated/triliovaultwlmscheduler
rm /var/lib/config-data/puppet-generated/triliovaultwlmscheduler.md5sum
rm -rf /var/lib/config-data/triliovaultwlmscheduler*rm -rf /var/log/containers/triliovault-wlm-scheduler/podman rm -f triliovault-wlm-cron-podman-0
podman rm -f triliovault_wlm_cron_config_dynamic
podman rm -f triliovault_wlm_cron_init_logrm -rf /var/lib/config-data/puppet-generated/triliovaultwlmcron
rm /var/lib/config-data/puppet-generated/triliovaultwlmcron.md5sum
rm -rf /var/lib/config-data//triliovaultwlmcron*rm -rf /var/log/containers/triliovault-wlm-cron/podman rm -f triliovault_datamover## Following steps are applicable for all supported RHOSP releases.
# Check triliovault backup target mount point
mount | grep trilio
# Unmount it
-- If it's NFS (COPY UUID_DIR from your compute host using above command)
umount /var/lib/nova/triliovault-mounts/<UUID_DIR>
-- If it's S3
umount /var/lib/nova/triliovault-mounts
# Verify that it's unmounted
mount | grep trilio
df -h | grep trilio
# Remove mount point directory after verifying that backup target unmounted successfully.
# Otherwise actual data from backup target may get cleaned.
rm -rf /var/lib/nova/triliovault-mountsrm -rf /var/lib/config-data/puppet-generated/triliovaultdm/
rm /var/lib/config-data/puppet-generated/triliovaultdm.md5sum
rm -rf /var/lib/config-data/triliovaultdm*rm -rf /var/log/containers/triliovault-datamover/pcs resource delete triliovault-wlm-cronlisten triliovault_datamover_api
bind 172.30.5.23:13784 transparent ssl crt /etc/pki/tls/private/overcloud_endpoint.pem
bind 172.30.5.23:8784 transparent ssl crt /etc/pki/tls/certs/haproxy/overcloud-haproxy-internal_api.pem
balance roundrobin
http-request set-header X-Forwarded-Proto https if { ssl_fc }
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
http-request set-header X-Forwarded-Port %[dst_port]
maxconn 50000
option httpchk
option httplog
retries 5
timeout check 10m
timeout client 10m
timeout connect 10m
timeout http-request 10m
timeout queue 10m
timeout server 10m
server overcloudtrain1-controller-0.internalapi.trilio.local 172.30.5.28:8784 check fall 5 inter 2000 rise 2 verifyhost overcloudtrain1-controller-0.internalapi.trilio.local
listen triliovault_wlm_api
bind 172.30.5.23:13781 transparent ssl crt /etc/pki/tls/private/overcloud_endpoint.pem
bind 172.30.5.23:8781 transparent ssl crt /etc/pki/tls/certs/haproxy/overcloud-haproxy-internal_api.pem
balance roundrobin
http-request set-header X-Forwarded-Proto https if { ssl_fc }
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
http-request set-header X-Forwarded-Port %[dst_port]
maxconn 50000
option httpchk
option httplog
retries 5
timeout check 10m
timeout client 10m
timeout connect 10m
timeout http-request 10m
timeout queue 10m
timeout server 10m
server overcloudtrain1-controller-0.internalapi.trilio.local 172.30.5.28:8780 check fall 5 inter 2000 rise 2 verifyhost overcloudtrain1-controller-0.internalapi.trilio.localpodman restart haproxy-bundle-podman-0openstack service delete dmapi
openstack user delete dmapi
openstack service delete TrilioVaultWLM
openstack user delete triliovaultpodman exec -it galera-bundle-podman-0 mysql -u root## Clean database
DROP DATABASE dmapi;
## Clean dmapi user
=> List 'dmapi' user accounts
MariaDB [mysql]> select user, host from mysql.user where user='dmapi';
+-------+-----------------------------------------+
| user | host |
+-------+-----------------------------------------+
| dmapi | % |
| dmapi | 172.30.5.28 |
| dmapi | overcloudtrain1internalapi.trilio.local |
+-------+-----------------------------------------+
3 rows in set (0.000 sec)
=> Delete those user accounts
MariaDB [(none)]> DROP USER dmapi@'%';
Query OK, 0 rows affected (0.005 sec)
MariaDB [(none)]> DROP USER [email protected];
Query OK, 0 rows affected (0.006 sec)
MariaDB [(none)]> DROP USER [email protected];
Query OK, 0 rows affected (0.005 sec)
=> Verify that dmapi user got cleaned
MariaDB [mysql]> select user, host from mysql.user where user='dmapi';
Empty set (0.00 sec)
## Clean database
DROP DATABASE workloadmgr;
## Clean workloadmgr user
=> List 'workloadmgr' user accounts
MariaDB [(none)]> select user, host from mysql.user where user='workloadmgr';
+-------------+-----------------------------------------+
| user | host |
+-------------+-----------------------------------------+
| workloadmgr | % |
| workloadmgr | 172.30.5.28 |
| workloadmgr | overcloudtrain1internalapi.trilio.local |
+-------------+-----------------------------------------+
3 rows in set (0.000 sec)
=> Delete those user accounts
MariaDB [(none)]> DROP USER workloadmgr@'%';
Query OK, 0 rows affected (0.012 sec)
MariaDB [(none)]> DROP USER [email protected];
Query OK, 0 rows affected (0.006 sec)
MariaDB [(none)]> DROP USER [email protected];
Query OK, 0 rows affected (0.005 sec)
=> Verify that workloadmgr user got cleaned
MariaDB [(none)]> select user, host from mysql.user where user='workloadmgr';
Empty set (0.000 sec)deb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0git clone -b 6.1.1 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.1 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/git clone -b 6.1.1 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/https://pypi.fury.io/trilio-6-1/git clone -b 6.1.6 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.6 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/git clone -b 6.1.6 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/https://pypi.fury.io/trilio-6-1/git clone -b 6.1.3 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.3 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/git clone -b 6.1.3 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/git clone -b 6.1.3 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/https://pypi.fury.io/trilio-6-1/git clone -b 6.1.2 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.2 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/git clone -b 6.1.2 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/git clone -b 6.1.2 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/https://pypi.fury.io/trilio-6-1/git clone -b 6.1.0 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.0 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/https://pypi.fury.io/trilio-6-1/trilio_branch : 6.1.6trilio_branch : 6.1.3trilio_branch : 6.1.2trilio_branch : 6.1.0region=RegionOne
Network Subnet: 172.21.6/23
| neutron | network | RegionOne |
| | | public: https://172.21.6.20:9696 |
| | | RegionOne |
| | | internal: https://172.21.6.20:9696 |
| | | RegionOne |
| | | admin: https://172.21.6.20:9696 |
| | | | |
| | | |
| nova | compute | RegionOne |
| | | public: https://172.21.6.21:8774/v2.1 |
| | | RegionOne |
| | | admin: https://172.21.6.21:8774/v2.1 |
| | | RegionOne |
| | | internal: https://172.21.6.21:8774/v2.1 |
| | | |
+-------------+--------------+--------------------------------------------------------------------------+
region=RegionTwo
Network Subnet: 172.21.31/23
| neutron | network | RegionTwo |
| | | public: https://172.31.6.20:9696 |
| | | RegionTwo |
| | | internal: https://172.31.6.20:9696 |
| | | RegionTwo |
| | | admin: https://172.31.6.20:9696 |
| | | | |
| | | |
| nova | compute | RegionTwo |
| | | public: https://172.31.6.21:8774/v2.1 |
| | | RegionTwo |
| | | admin: https://172.31.6.21:8774/v2.1 |
| | | RegionTwo |
| | | internal: https://172.31.6.21:8774/v2.1 |
| | | |
+-------------+--------------+--------------------------------------------------------------------------+region=RegionOne
Network Subnet: 172.21.6/23
| neutron | network | RegionOne |
| | | public: https://172.21.6.20:9696 |
| | | RegionOne |
| | | internal: https://172.21.6.20:9696 |
| | | RegionOne |
| | | admin: https://172.21.6.20:9696 |
| | | |
| workloadmgr | workloads | RegionOne |
| | | internal: https://172.21.6.23:8780/v1/38bd7aa9b55944ebb3578c251a1b785b |
| | | RegionOne |
| | | public: https://172.21.6.23:8780/v1/38bd7aa9b55944ebb3578c251a1b785b |
| | | RegionOne |
| | | admin: https://172.21.6.23:8780/v1/38bd7aa9b55944ebb3578c251a1b785b |
| | | |
| dmapi | datamover | RegionOne |
| | | internal: https://172.21.6.22:8784/v2 |
| | | RegionOne |
| | | public: https://172.21.6.22:8784/v2 |
| | | RegionOne |
| | | admin: https://172.21.6.22:8784/v2 |
| | | |
| nova | compute | RegionOne |
| | | public: https://172.21.6.21:8774/v2.1 |
| | | RegionOne |
| | | admin: https://172.21.6.21:8774/v2.1 |
| | | RegionOne |
| | | internal: https://172.21.6.21:8774/v2.1 |
| | | |
+-------------+--------------+--------------------------------------------------------------------------+
region=RegionTwo
Network Subnet: 172.21.31/23
| neutron | network | RegionTwo |
| | | public: https://172.31.6.20:9696 |
| | | RegionTwo |
| | | internal: https://172.31.6.20:9696 |
| | | RegionTwo |
| | | admin: https://172.31.6.20:9696 |
| | | |
| workloadmgr | workloads | RegionTwo |
| | | internal: https://172.31.6.23:8780/v1/38bd7aa9b55944ebb3578c251a1b785b |
| | | RegionTwo |
| | | public: https://172.31.6.23:8780/v1/38bd7aa9b55944ebb3578c251a1b785b |
| | | RegionTwo |
| | | admin: https://172.31.6.23:8780/v1/38bd7aa9b55944ebb3578c251a1b785b |
| | | |
| dmapi | datamover | RegionTwo |
| | | internal: https://172.31.6.22:8784/v2 |
| | | RegionTwo |
| | | public: https://172.31.6.22:8784/v2 |
| | | RegionTwo |
| | | admin: https://172.31.6.22:8784/v2 |
| | | |
| nova | compute | RegionTwo |
| | | public: https://172.31.6.21:8774/v2.1 |
| | | RegionTwo |
| | | admin: https://172.31.6.21:8774/v2.1 |
| | | RegionTwo |
| | | internal: https://172.31.6.21:8774/v2.1 |
| | | |
+-------------+--------------+--------------------------------------------------------------------------+oc get pods -n trilio-openstack | grep wlm-apioc exec -n trilio-openstack -it <trilio-wlm-api-pod-name> bashoc -n openstack get osdpd | grep triliooc -n openstack delete osdpd <DEPLOYMENT_NAME>oc -n openstack get osdps | grep triliooc -n openstack delete osdps <OpenStackDataPlaneService_NAME>cd dataplane-scripts/
chmod +x create-templates-delete-backup-target.sh
./create-templates-delete-backup-target.sh <BACKUP_TARGET_NAME> <BACKUP_TARGET_TYPE>
cd <BACKUP_TARGET_NAME>/
ls -llvi trilio-delete-backup-target-service.yamlvi trilio-delete-backup-target-deployment.yamloc -n trilio-openstack get tvobackuptargetoc -n trilio-openstack get tvobackuptarget
NAME AGE
tvobackuptarget-s3-bt2 47d
tvobackuptarget-s3-bt6 43d
tvobackuptarget-s3-bt9 43doc delete tvobackuptarget <TVO_BACKUP_TARGET_NAME>oc delete tvobackuptarget tvobackuptarget-s3-bt2
tvobackuptarget.tvo.trilio.io "tvobackuptarget-s3-bt2" deletedoc -n trilio-openstack get tvobackuptarget | grep <TVO_BACKUP_TARGET_NAME>oc -n trilio-openstack get tvobackuptarget | grep tvobackuptarget-s3-bt2oc -n trilio-openstack get podsopenstack secret order create --name secret2 --algorithm aes --mode ctr --bit-length 256 --payload-content-type=application/octet-stream key
+----------------+------------------------------------------------------------------------+
| Field | Value |
+----------------+------------------------------------------------------------------------+
| Order href | http://172.30.0.75:9311/v1/orders/8b442c6e-73e0-40ed-9946-b986c8cc7e12 |
| Type | Key |
| Container href | N/A |
| Secret href | None |
| Created | None |
| Status | None |
| Error code | None |
| Error message | None |
+----------------+------------------------------------------------------------------------+openstack secret order get http://172.30.0.75:9311/v1/orders/8b442c6e-73e0-40ed-9946-b986c8cc7e12
+----------------+-------------------------------------------------------------------------+
| Field | Value |
+----------------+-------------------------------------------------------------------------+
| Order href | http://172.30.0.75:9311/v1/orders/8b442c6e-73e0-40ed-9946-b986c8cc7e12 |
| Type | Key |
| Container href | N/A |
| Secret href | http://172.30.0.75:9311/v1/secrets/b2d8a100-9ef9-4460-beff-bc5592f83746 |
| Created | 2025-09-18T03:28:30+00:00 |
| Status | ACTIVE |
| Error code | None |
| Error message | None |
+----------------+-------------------------------------------------------------------------+openstack secret get http://172.30.0.75:9311/v1/secrets/b14bc1ae-9251-4dd5-9fd4-18ef50bbb664
+---------------+-------------------------------------------------------------------------+
| Field | Value |
+---------------+-------------------------------------------------------------------------+
| Secret href | http://172.30.0.75:9311/v1/secrets/b14bc1ae-9251-4dd5-9fd4-18ef50bbb664 |
| Name | None |
| Created | 2025-09-17T10:46:16+00:00 |
| Status | ACTIVE |
| Content types | None |
| Algorithm | aes |
| Bit length | 256 |
| Secret type | opaque |
| Mode | cbc |
| Expiration | None |
+---------------+-------------------------------------------------------------------------+trilio_branch : 6.1.5trilio_branch : 6.1.4



--daily ➡️ Specify following key value pairs for daily jobschedule backup_time='1:30 12:30 00:30',retention=,snapshot_type=<full|incremental> For example --daily backup_time='01:00 02:00 11:00',retention='1',snapshot_type='incremental'HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Mon, 09 Nov 2020 13:23:25 GMT
Content-Type: application/json
Content-Length: 244
Connection: keep-alive
X-Compute-Request-Id: req-bdfd3fb8-5cbf-4108-885f-63160426b2fa
{
"file_search":{
"created_at":"2020-11-09T13:23:25.698534",
"updated_at":null,
"id":14,
"deleted_at":null,
"status":"executing",
"error_msg":null,
"filepath":"/etc/h*",
"json_resp":null,
"vm_id":"08dab61c-6efd-44d3-a9ed-8e789d338c1b"
}
}deb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0source <admin-rc-file>workloadmgr backup-target-list -c ID -c "Backend Endpoint"workloadmgr backup-target-delete <backup_target_id>workloadmgr backup-target-list -c ID -c "Backend Endpoint"oc get cm -n openstack | grep trilio | grep <BACKUP_TARGET_NAME>
oc get secrets -n openstack | grep trilio | grep <BACKUP_TARGET_NAME>oc -n openstack apply -f trilio-delete-backup-target-service.yamloc -n openstack get osdps | grep trilio-delete-backup-targetoc -n openstack apply -f trilio-delete-backup-target-deployment.yamloc -n openstack get osdpdoc -n openstack get pods | grep trilio-delete-backup-targetoc logs -f <ANSIBLE_RUNNER_POD_NAME> -n openstack workloadmgr snapshot-list [--workload_id <workload_id>]
[--tvault_node <host>]
[--date_from <date_from>]
[--date_to <date_to>]
[--all {True,False}]workloadmgr workload-snapshot [--full] [--display-name <display-name>]
[--display-description <display-description>]
<workload_id>workloadmgr snapshot-show [--output <output>] <snapshot_id>workloadmgr snapshot-delete <snapshot_id>workloadmgr snapshot-cancel <snapshot_id>workloadmgr policy-listworkloadmgr policy-show <policy_id>workloadmgr policy-create --policy-fields <key=key-name>
[--hourly interval=<n>,retention=<count>,snapshot_type=<incremental|full>]
[--daily backup_time=<time>,retention=<count>,snapshot_type=<incremental|full>]
[--weekly backup_day=<days>,retention=<count>,snapshot_type=<full>]
[--monthly month_backup_day=<date>,retention=<count>,snapshot_type=<full>]
[--yearly backup_month=<month>,retention=<count>,snapshot_type=<full>]
[--manual retention=<snapshots count>,retention_days_to_keep=<num of days>]
[--display-description <display_description>]
[--metadata <key=key-name>]
<display_name>workloadmgr policy-update [--display-name <display-name>]
[--display-description <display-description>]
[--policy-fields <key=key-name>]
[--hourly interval=<n>,retention=<count>,snapshot_type=<incremental|full>]
[--daily backup_time=<time>,retention=<count>,snapshot_type=<incremental|full>]
[--weekly backup_day=<days>,retention=<count>,snapshot_type=<full>]
[--monthly month_backup_day=<days>,retention=<count>,snapshot_type=<full>]
[--yearly backup_month=<months>,retention=<count>,snapshot_type=<full>]
[--manual retention=<snapshots count>,retention_days_to_keep=<num of days>]
[--metadata <key=key-name>]
<policy_id>workloadmgr policy-assign [--add_project <project_id>]
[--remove_project <project_id>]
<policy_id>workloadmgr policy-delete <policy_id>{
"file_search":{
"start":<Integer>,
"end":<Integer>,
"filepath":"<Reg-Ex String>",
"date_from":<Date Format: YYYY-MM-DDTHH:MM:SS>,
"date_to":<Date Format: YYYY-MM-DDTHH:MM:SS>,
"snapshot_ids":[
"<Snapshot-ID>"
],
"vm_id":"<VM-ID>"
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Mon, 09 Nov 2020 13:24:28 GMT
Content-Type: application/json
Content-Length: 819
Connection: keep-alive
X-Compute-Request-Id: req-d57bea9a-9968-4357-8743-e0b906466063
{
"file_search":{
"created_at":"2020-11-09T13:23:25.000000",
"updated_at":"2020-11-09T13:23:48.000000",
"id":14,
"deleted_at":null,
"status":"completed",
"error_msg":null,
"filepath":"/etc/h*",
"json_resp":"[
{
"ed4f29e8-7544-4e1c-af8a-a76031211926":[
{
"/dev/vda1":[
"/etc/hostname",
"/etc/hosts"
],
"/etc/hostname":{
"dev":"2049",
"ino":"32",
"mode":"33204",
"nlink":"1",
"uid":"0",
"gid":"0",
"rdev":"0",
"size":"1",
"blksize":"1024",
"blocks":"2",
"atime":"1603455255",
"mtime":"1603455255",
"ctime":"1603455255"
},
"/etc/hosts":{
"dev":"2049",
"ino":"127",
"mode":"33204",
"nlink":"1",
"uid":"0",
"gid":"0",
"rdev":"0",
"size":"37",
"blksize":"1024",
"blocks":"2",
"atime":"1603455257",
"mtime":"1431011050",
"ctime":"1431017172"
}
}
]
}
]",
"vm_id":"08dab61c-6efd-44d3-a9ed-8e789d338c1b"
}
}registry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.1-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.1-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.1-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.1-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.1-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.1-rhoso18.0registry.connect.redhat.com/trilio/trilio-datamover:6.1.1-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.1.1-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.1.1-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.1.1-rhosp17.1docker.io/trilio/trilio-datamover-helm:6.1.1-2023.2
docker.io/trilio/trilio-datamover-api-helm:6.1.1-2023.2
docker.io/trilio/trilio-wlm-helm:6.1.1-2023.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.1-2023.2docker.io/trilio/trilio-datamover-helm:6.1.1-2023.1
docker.io/trilio/trilio-datamover-api-helm:6.1.1-2023.1
docker.io/trilio/trilio-wlm-helm:6.1.1-2023.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.1-2023.1docker.io/trilio/trilio-datamover-helm:6.1.6-2023.2
docker.io/trilio/trilio-datamover-api-helm:6.1.6-2023.2
docker.io/trilio/trilio-wlm-helm:6.1.6-2023.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.6-2023.2docker.io/trilio/trilio-datamover-helm:6.1.6-2023.1
docker.io/trilio/trilio-datamover-api-helm:6.1.6-2023.1
docker.io/trilio/trilio-wlm-helm:6.1.6-2023.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.6-2023.1docker.io/trilio/trilio-datamover-helm:6.1.6-mosk25.1
docker.io/trilio/trilio-datamover-api-helm:6.1.6-mosk25.1
docker.io/trilio/trilio-wlm-helm:6.1.6-mosk25.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.6-mosk25.1registry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.6-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.6-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.6-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.6-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.6-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.6-rhoso18.0## Change name of old 'triliovault-cfg-scripts' repository directory.
mv triliovault-cfg-scripts triliovault-cfg-scripts-old
## Clone latest git repository 'triliovault-cfg-scripts'.
git clone -b {{ trilio_branch }} https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/trilio-openstack/
helm dep up
cd ../../../vi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/2023.2.yaml---
images:
tags:
bootstrap: docker.io/openstackhelm/heat:2023.2-ubuntu_jammy
db_init: docker.io/openstackhelm/heat:2023.2-ubuntu_jammy
db_drop: docker.io/openstackhelm/heat:2023.2-ubuntu_jammy
rabbit_init: docker.io/rabbitmq:3.7-management
ks_user: docker.io/openstackhelm/heat:2023.2-ubuntu_jammy
ks_service: docker.io/openstackhelm/heat:2023.2-ubuntu_jammy
ks_endpoints: docker.io/openstackhelm/heat:2023.2-ubuntu_jammy
dep_check: quay.io/airshipit/kubernetes-entrypoint:v1.0.0
image_repo_sync: docker.io/docker:17.07.0
triliovault_wlm_cloud_trust: docker.io/trilio/trilio-wlm-helm:<image-tag>
triliovault_wlm_api: docker.io/trilio/trilio-wlm-helm:<image-tag>
triliovault_wlm_cron: docker.io/trilio/trilio-wlm-helm:<image-tag>
triliovault_wlm_scheduler: docker.io/trilio/trilio-wlm-helm:<image-tag>
triliovault_wlm_workloads: docker.io/trilio/trilio-wlm-helm:<image-tag>
triliovault_wlm_db_sync: docker.io/trilio/trilio-wlm-helm:<image-tag>
triliovault_datamover: docker.io/trilio/trilio-datamover-helm:<image-tag>
triliovault_datamover_api: docker.io/trilio/trilio-datamover-api-helm:<image-tag>
triliovault_datamover_db_sync: docker.io/trilio/trilio-datamover-api-helm:<image-tag>cp triliovault-cfg-scripts-old/openstack-helm/trilio-openstack/values_overrides/nfs.yaml triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/nfs.yamlcp triliovault-cfg-scripts-old/openstack-helm/trilio-openstack/files/s3-cert.pem triliovault-cfg-scripts/openstack-helm/trilio-openstack/files/s3-cert.pemvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/keystone.yaml
conf:
triliovault:
cloud_admin_user_name: "admin"
cloud_admin_project_name: "admin"
cloud_admin_domain_name: "default"
## Keystone endpoint interface that triliovault workloadmgr services will use to communicate to other openstack services
## Valid values: internal, public, admin
interface: "internal"
## Provide any role name as your preference.
## Significance: To run any backups in triliovault an openstack user needs to have this role on given project.
trustee_role: "creator"cd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils
./create_rabbitmq.sh./get_admin_creds.sh <internal_domain_name> <public_domain_name>
Example:
./get_admin_creds.sh cluster.local triliodata.democp triliovault-cfg-scripts-old/openstack-helm/trilio-openstack/values_overrides/admin_creds.yaml triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/admin_creds.yaml
cp triliovault-cfg-scripts-old/openstack-helm/trilio-openstack/templates/bin/_triliovault-nova-compute.conf.tpl triliovault-cfg-scripts/openstack-helm/trilio-openstack/templates/bin/_triliovault-nova-compute.conf.tplcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils
./get_admin_creds_mosk.sh <internal_domain_name> <public_domain_name>
Example:
./get_admin_creds_mosk.sh cluster.local setup.triliodata.democp triliovault-cfg-scripts-old/openstack-helm/trilio-openstack/values_overrides/ceph.yaml triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/ceph.yaml
cp triliovault-cfg-scripts-old/openstack-helm/trilio-openstack/templates/bin/_triliovault-ceph.conf.tpl triliovault-cfg-scripts/openstack-helm/trilio-openstack/templates/bin/_triliovault-ceph.conf.tpl# kubectl get secrets | grep triliovault-image-registry
triliovault-image-registry kubernetes.io/dockerconfigjson 1 89dvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils/upgrade.sh#!/bin/bash -x
cd ../../
helm upgrade trilio-openstack ./trilio-openstack --namespace=trilio-openstack \
--values=./trilio-openstack/values_overrides/image_pull_secrets.yaml \
--values=./trilio-openstack/values_overrides/keystone.yaml \
--values=./trilio-openstack/values_overrides/nfs.yaml \
--values=./trilio-openstack/values_overrides/2023.2.yaml \
--values=./trilio-openstack/values_overrides/admin_creds.yaml \
--values=./trilio-openstack/values_overrides/tls_public_endpoint.yaml \
--values=./trilio-openstack/values_overrides/ceph.yaml \
--values=./trilio-openstack/values_overrides/ingress.yaml \
--values=./trilio-openstack/values_overrides/triliovault_passwords.yaml
echo -e "Waiting for triliovault pods to get into running state"
./trilio-openstack/utils/wait_for_pods.sh trilio-openstack
kubectl get pods -n trilio-openstackcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils
./upgrade.sh## Check trilio-openstack helm chart version. It should be latest, mentioned in file 'triliovault-cfg-scripts/openstack-helm/trilio-openstack/Chart.yaml'
helm ls | grep trilio-openstack
## Check status of trilio-openstack helm chart release
helm status trilio-openstack
## Check if all pods are in running/Completed state.
kubectl get pods -n trilio-openstack
Sample Output:
----------------------
# kubectl get pods -n trilio-openstack
NAME READY STATUS RESTARTS AGE
triliovault-datamover-api-5877ff9b48-49wsq 1/1 Running 0 6d
triliovault-datamover-api-5877ff9b48-8zfmh 1/1 Running 0 6d
triliovault-datamover-api-5877ff9b48-xx4rm 1/1 Running 0 6d
triliovault-datamover-db-init-r4j9w 0/1 Completed 0 6d
triliovault-datamover-db-sync-c254n 0/1 Completed 0 6d
triliovault-datamover-ks-endpoints-8qnt8 0/3 Completed 0 6d
triliovault-datamover-ks-service-hps97 0/1 Completed 0 6d
triliovault-datamover-ks-user-sgrz6 0/1 Completed 0 6d
triliovault-datamover-openstack-compute-node-2s7k7 1/1 Running 0 6d
triliovault-datamover-openstack-compute-node-64sm5 1/1 Running 0 6d
triliovault-wlm-api-5bbc74cb4b-69b2b 1/1 Running 0 6d
triliovault-wlm-api-5bbc74cb4b-hbzfj 1/1 Running 0 6d
triliovault-wlm-api-5bbc74cb4b-zxlcl 1/1 Running 0 6d
triliovault-wlm-cloud-trust-hnxj8 0/1 Completed 0 6d
triliovault-wlm-cron-764658cb59-b8jx6 1/1 Running 0 6d
triliovault-wlm-db-init-rbpw2 0/1 Completed 0 6d
triliovault-wlm-db-sync-p97z8 0/1 Completed 0 6d
triliovault-wlm-ks-endpoints-tx65s 0/3 Completed 0 6d
triliovault-wlm-ks-service-9mkkt 0/1 Completed 0 6d
triliovault-wlm-ks-user-x5xrx 0/1 Completed 0 6d
triliovault-wlm-rabbit-init-qqzwr 0/1 Completed 0 6d
triliovault-wlm-scheduler-6c85688899-55tz7 1/1 Running 0 6d
triliovault-wlm-workloads-f9dc944db-5w7s9 1/1 Running 0 6d
triliovault-wlm-workloads-f9dc944db-84wj7 1/1 Running 0 6d
triliovault-wlm-workloads-f9dc944db-ccdvz 1/1 Running 0 6d
## Check if jobs finished well
kubectl get jobs -n trilio-openstack
Sample Output:
--------------
NAME COMPLETIONS DURATION AGE
triliovault-datamover-db-init 1/1 4s 6d
triliovault-datamover-db-sync 1/1 8s 6d
triliovault-datamover-ks-endpoints 1/1 19s 6d
triliovault-datamover-ks-service 1/1 8s 6d
triliovault-datamover-ks-user 1/1 21s 6d
triliovault-wlm-cloud-trust 1/1 3m10s 6d
triliovault-wlm-db-init 1/1 4s 6d
triliovault-wlm-db-sync 1/1 13s 6d
triliovault-wlm-ks-endpoints 1/1 20s 6d
triliovault-wlm-ks-service 1/1 7s 6d
triliovault-wlm-ks-user 1/1 20s 6d
triliovault-wlm-rabbit-init 1/1 9s 6d
## If you are using NFS backup target, check if nfs pvc got into Bound state
kubectl get pvc -n trilio-openstack
Sample Output:
-------------
NAME STATUS VOLUME CAPACITY ACCESS MODES STORAGECLASS AGE
triliovault-nfs-pvc-172-25-0-10-mnt-tvault-42424 Bound triliovault-nfs-pv-172-25-0-10-mnt-tvault-42424 20Gi RWX nfs 6ddeb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0deb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0deb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0deb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0git clone -b 6.1.5 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.5 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/git clone -b 6.1.5 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/git clone -b 6.1.5 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/https://pypi.fury.io/trilio-6-1/deb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0git clone -b 6.1.4 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.4 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/git clone -b 6.1.4 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/git clone -b 6.1.4 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/https://pypi.fury.io/trilio-6-1/git clone -b 6.1.7 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/git clone -b 6.1.7 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp17/git clone -b 6.1.7 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/git clone -b 6.1.7 https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/https://pypi.fury.io/trilio-6-1/HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 11 Nov 2020 15:29:03 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-9d779802-9c65-463a-973c-39cdffcba82eregistry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.3-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.3-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.3-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.3-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.3-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.3-rhoso18.0registry.connect.redhat.com/trilio/trilio-datamover:6.1.3-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.1.3-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.1.3-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.1.3-rhosp17.1docker.io/trilio/trilio-datamover-helm:6.1.3-2023.2
docker.io/trilio/trilio-datamover-api-helm:6.1.3-2023.2
docker.io/trilio/trilio-wlm-helm:6.1.3-2023.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.3-2023.2docker.io/trilio/trilio-datamover-helm:6.1.3-2023.1
docker.io/trilio/trilio-datamover-api-helm:6.1.3-2023.1
docker.io/trilio/trilio-wlm-helm:6.1.3-2023.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.3-2023.1docker.io/trilio/trilio-datamover-helm:6.1.3-mosk25.1
docker.io/trilio/trilio-datamover-api-helm:6.1.3-mosk25.1
docker.io/trilio/trilio-wlm-helm:6.1.3-mosk25.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.3-mosk25.1registry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.2-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.2-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.2-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.2-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.2-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.2-rhoso18.0registry.connect.redhat.com/trilio/trilio-datamover:6.1.2-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.1.2-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.1.2-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.1.2-rhosp17.1docker.io/trilio/trilio-datamover-helm:6.1.2-2023.2
docker.io/trilio/trilio-datamover-api-helm:6.1.2-2023.2
docker.io/trilio/trilio-wlm-helm:6.1.2-2023.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.2-2023.2docker.io/trilio/trilio-datamover-helm:6.1.2-2023.1
docker.io/trilio/trilio-datamover-api-helm:6.1.2-2023.1
docker.io/trilio/trilio-wlm-helm:6.1.2-2023.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.2-2023.1docker.io/trilio/trilio-datamover-helm:6.1.2-mosk25.1
docker.io/trilio/trilio-datamover-api-helm:6.1.2-mosk25.1
docker.io/trilio/trilio-wlm-helm:6.1.2-mosk25.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.2-mosk25.1registry.connect.redhat.com/trilio/trilio-datamover:6.1.0-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.1.0-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.1.0-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.1.0-rhosp17.1registry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.0-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.0-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.0-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.0-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.0-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.0-rhoso18.0trilio_branch : 6.1.7openstack image create \
--file <File Manager Image Path> \
--container-format bare \
--disk-format qcow2 \
--public \
--property hw_qemu_guest_agent=yes \
--property tvault_recovery_manager=yes \
--property hw_disk_bus=virtio \
tvault-file-manageropenstack server set --os-compute-api-version 2.26 --tag "tvault_recovery_manager" <FRM_UUID>guest-file-read
guest-file-write
guest-file-open
guest-file-closeSELINUX=disabledyum install python3 lvm2apt-get update
apt-get install qemu-guest-agent
systemctl enable qemu-guest-agentLoaded: loaded (/etc/init.d/qemu-guest-agent; generated)DAEMON_ARGS="-F/etc/qemu/fsfreeze-hook"Loaded: loaded (/usr/lib/systemd/system/qemu-guest-agent.service; disabled; vendor preset: enabled)systemctl edit qemu-guest-agent[Service]
ExecStart=
ExecStart=/usr/sbin/qemu-ga -F/etc/qemu/fsfreeze-hooksystemctl restart qemu-guest-agentapt-get install python3workloadmgr snapshot-mount <snapshot_id> <mount_vm_id>workloadmgr snapshot-mounted-list [--workloadid <workloadid>]workloadmgr snapshot-dismount <snapshot_id>HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 11 Nov 2020 15:44:42 GMT
Content-Type: application/json
Content-Length: 228
Connection: keep-alive
X-Compute-Request-Id: req-04c6ef90-125c-4a36-9603-af1af001006a
{
"mounted_snapshots":[
{
"snapshot_id":"ed4f29e8-7544-4e1c-af8a-a76031211926",
"snapshot_name":"snapshot",
"workload_id":"4bafaa03-f69a-45d5-a6fc-ae0119c77974",
"mounturl":"[\"http://192.168.100.87\"]",
"status":"mounted"
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 11 Nov 2020 15:44:42 GMT
Content-Type: application/json
Content-Length: 228
Connection: keep-alive
X-Compute-Request-Id: req-04c6ef90-125c-4a36-9603-af1af001006a
{
"mounted_snapshots":[
{
"snapshot_id":"ed4f29e8-7544-4e1c-af8a-a76031211926",
"snapshot_name":"snapshot",
"workload_id":"4bafaa03-f69a-45d5-a6fc-ae0119c77974",
"mounturl":"[\"http://192.168.100.87\"]",
"status":"mounted"
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 11 Nov 2020 16:03:49 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-abf69be3-474d-4cf3-ab41-caa56bb611e4{
"mount":{
"mount_vm_id":"15185195-cd8d-4f6f-95ca-25983a34ed92",
"options":{
}
}
}{
"mount":
{
"options": null
}
}registry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.5-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.5-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.5-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.5-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.5-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.5-rhoso18.0registry.connect.redhat.com/trilio/trilio-datamover:6.1.5-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.1.5-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.1.5-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.1.5-rhosp17.1docker.io/trilio/trilio-datamover-helm:6.1.5-2023.2
docker.io/trilio/trilio-datamover-api-helm:6.1.5-2023.2
docker.io/trilio/trilio-wlm-helm:6.1.5-2023.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.5-2023.2docker.io/trilio/trilio-datamover-helm:6.1.5-2023.1
docker.io/trilio/trilio-datamover-api-helm:6.1.5-2023.1
docker.io/trilio/trilio-wlm-helm:6.1.5-2023.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.5-2023.1docker.io/trilio/trilio-datamover-helm:6.1.5-mosk25.1
docker.io/trilio/trilio-datamover-api-helm:6.1.5-mosk25.1
docker.io/trilio/trilio-wlm-helm:6.1.5-mosk25.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.5-mosk25.1registry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.4-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.4-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.4-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.4-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.4-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.4-rhoso18.0registry.connect.redhat.com/trilio/trilio-datamover:6.1.4-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.1.4-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.1.4-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.1.4-rhosp17.1docker.io/trilio/trilio-datamover-helm:6.1.4-2023.2
docker.io/trilio/trilio-datamover-api-helm:6.1.4-2023.2
docker.io/trilio/trilio-wlm-helm:6.1.4-2023.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.4-2023.2docker.io/trilio/trilio-datamover-helm:6.1.4-2023.1
docker.io/trilio/trilio-datamover-api-helm:6.1.4-2023.1
docker.io/trilio/trilio-wlm-helm:6.1.4-2023.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.4-2023.1docker.io/trilio/trilio-datamover-helm:6.1.4-mosk25.1
docker.io/trilio/trilio-datamover-api-helm:6.1.4-mosk25.1
docker.io/trilio/trilio-wlm-helm:6.1.4-mosk25.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.4-mosk25.1deb [trusted=yes] https://apt.fury.io/trilio-6-1/ /https://yum.fury.io/trilio-6-1/[trilio-fury]
name=Trilio Gemfury Private Repo
baseurl=https://yum.fury.io/trilio-6-1/
enabled=1
gpgcheck=0registry.connect.redhat.com/trilio/trilio-datamover-rhoso:6.1.7-rhoso18.0
registry.connect.redhat.com/trilio/trilio-datamover-api-rhoso:6.1.7-rhoso18.0
registry.connect.redhat.com/trilio/trilio-horizon-plugin-rhoso:6.1.7-rhoso18.0
registry.connect.redhat.com/trilio/trilio-wlm:6.1.7-rhoso18.0
registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.7-rhoso18.0
registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:6.1.7-rhoso18.0registry.connect.redhat.com/trilio/trilio-datamover:6.1.7-rhosp17.1
registry.connect.redhat.com/trilio/trilio-datamover-api:6.1.7-rhosp17.1
registry.connect.redhat.com/trilio/trilio-horizon-plugin:6.1.7-rhosp17.1
registry.connect.redhat.com/trilio/trilio-wlm:6.1.7-rhosp17.1docker.io/trilio/trilio-datamover-helm:6.1.7-2025.1
docker.io/trilio/trilio-datamover-api-helm:6.1.7-2025.1
docker.io/trilio/trilio-wlm-helm:6.1.7-2025.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.7-2025.1docker.io/trilio/trilio-datamover-helm:6.1.7-2023.2
docker.io/trilio/trilio-datamover-api-helm:6.1.7-2023.2
docker.io/trilio/trilio-wlm-helm:6.1.7-2023.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.7-2023.2docker.io/trilio/trilio-datamover-helm:6.1.7-mosk25.2
docker.io/trilio/trilio-datamover-api-helm:6.1.7-mosk25.2
docker.io/trilio/trilio-wlm-helm:6.1.7-mosk25.2
docker.io/trilio/trilio-horizon-plugin-helm:6.1.7-mosk25.2docker.io/trilio/trilio-datamover-helm:6.1.7-mosk25.1
docker.io/trilio/trilio-datamover-api-helm:6.1.7-mosk25.1
docker.io/trilio/trilio-wlm-helm:6.1.7-mosk25.1
docker.io/trilio/trilio-horizon-plugin-helm:6.1.7-mosk25.1workloadmgr workload-list [--all {True,False}] [--nfsshare <nfsshare>]workloadmgr workload-create [--display-name <display-name>]
[--display-description <display-description>]
[--source-platform <source-platform>]
[--jobschedule <key=key-name>]
[--hourly [<key=key-name> ...]]
[--daily [<key=key-name> ...]]
[--weekly [<key=key-name> ...]]
[--monthly [<key=key-name> ...]]
[--yearly [<key=key-name> ...]]
[--manual <key=key-name> [<key=key-name> ...]]
[--metadata <key=key-name>]
[--policy-id <policy_id>]
[--encryption <True/False>]
[--secret-uuid <secret_uuid>]
[--backup-target-type <backup_target_type>]
<instance-id=instance-uuid> [<instance-id=instance-uuid> ...]workloadmgr workload-show <workload_id> [--verbose <verbose>]usage: workloadmgr workload-modify [--display-name <display-name>]
[--display-description <display-description>]
[--instance <instance-id=instance-uuid>]
[--jobschedule <key=key-name>]
[--hourly [<key=key-name> ...]]
[--daily [<key=key-name> ...]]
[--weekly [<key=key-name> ...]]
[--monthly [<key=key-name> ...]]
[--yearly [<key=key-name> ...]]
[--manual [<key=key-name> ...]]
[--metadata <key=key-name>]
[--policy-id <policy_id>]
<workload_id>workloadmgr workload-delete [--database_only <True/False>] <workload_id>workloadmgr workload-unlock <workload_id>workloadmgr workload-reset <workload_id>workloadmgr workload-service-disable [--reason <reason>] <node_name>workloadmgr workload-service-enable <node_name>workloadmgr setting-create [--description <description>]
[--category <category>]
[--type <type>]
[--is-public {True,False}]
[--is-hidden {True,False}]
[--metadata <key=value>]
<name> <value>workloadmgr setting-update [--description <description>]
[--category <category>]
[--type <type>]
[--is-public {True,False}]
[--is-hidden {True,False}]
[--metadata <key=value>]
<name> <value>workloadmgr setting-show [--get_hidden {True,False}] <setting_name>workloadmgr setting-delete <setting_name>workloadmgr get-global-job-schedulerworkloadmgr disable-global-job-schedulerworkloadmgr enable-global-job-schedulerHTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Fri, 13 Nov 2020 11:52:56 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-99f51825-9b47-41ea-814f-8f8141157fc7HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Fri, 13 Nov 2020 12:06:01 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-4eb1863e-3afa-4a2c-b8e6-91a41fe37f78HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Fri, 13 Nov 2020 12:31:49 GMT
Content-Type: application/json
Content-Length: 1223
Connection: keep-alive
X-Compute-Request-Id: req-c6f826a9-fff7-442b-8886-0770bb97c491
{
"scheduler_enabled":true,
"trust":{
"created_at":"2020-10-23T14:35:11.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"trust-002bcbaf-c16b-44e6-a9ef-9c1efbfa2e2c",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"value":"871ca24f38454b14b867338cb0e9b46c",
"description":"token id for user ccddc7e7a015487fa02920f4d4979779 project c76b3355a164498aa95ddbc960adc238",
"category":"identity",
"type":"trust_id",
"public":false,
"hidden":true,
"status":"available",
"metadata":[
{
"created_at":"2020-10-23T14:35:11.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"a3cc9a01-3d49-4ff8-ad8e-b12a7b3c68b0",
"settings_name":"trust-002bcbaf-c16b-44e6-a9ef-9c1efbfa2e2c",
"settings_project_id":"c76b3355a164498aa95ddbc960adc238",
"key":"role_name",
"value":"member"
}
]
},
"is_valid":true,
"scheduler_obj":{
"workload_id":"4bafaa03-f69a-45d5-a6fc-ae0119c77974",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"user_domain_id":"default",
"user":"ccddc7e7a015487fa02920f4d4979779",
"tenant":"c76b3355a164498aa95ddbc960adc238"
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Fri, 13 Nov 2020 12:45:27 GMT
Content-Type: application/json
Content-Length: 30
Connection: keep-alive
X-Compute-Request-Id: req-cd447ce0-7bd3-4a60-aa92-35fc43b4729b
{"global_job_scheduler": true}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Fri, 13 Nov 2020 12:49:29 GMT
Content-Type: application/json
Content-Length: 31
Connection: keep-alive
X-Compute-Request-Id: req-6f49179a-737a-48ab-91b7-7e7c460f5af0
{"global_job_scheduler": false}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Fri, 13 Nov 2020 12:50:11 GMT
Content-Type: application/json
Content-Length: 30
Connection: keep-alive
X-Compute-Request-Id: req-ed279acc-9805-4443-af91-44a4420559bc
{"global_job_scheduler": true}curl -O https://get.helm.sh/helm-v3.17.2-linux-amd64.tar.gz
tar -zxvf helm*.tar.gz
mv linux-amd64/helm /usr/local/bin/helm
rm -rf linux-amd64 helm*.tar.gz
kubectl get nodes --show-labels | grep openstack-control-plane
kubectl get nodes --show-labels | grep openstack-computeapt-get install nfs-common -ysudo apt update -y && sudo apt install make jq -ygit clone -b {{ trilio_branch }} https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/trilio-openstack/
helm dep up
cd ../../../vi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/mosk25.1.yamlkubectl create namespace trilio-openstack
kubectl config set-context --current --namespace=trilio-openstackkubectl get nodes --show-labels | grep openstack-control-planekubectl label nodes <NODE_NAME_1> triliovault-control-plane=enabled
kubectl label nodes <NODE_NAME_2> triliovault-control-plane=enabled
kubectl label nodes <NODE_NAME_3> triliovault-control-plane=enabledkubectl get nodes --show-labels | grep triliovault-control-planevi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/nfs.yamlvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/s3.yamlvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/files/s3-cert.pemgit clone https://github.com/openstack/openstack-helm.git# If helm osh plugin doesn't exist, install it
helm plugin install https://opendev.org/openstack/openstack-helm-plugin.git
# Set OpenStack release (adjust as needed for the deployment version)
export OPENSTACK_RELEASE=2024.1
# Features enabled for the deployment. This is used to look up values overrides.
export FEATURES="${OPENSTACK_RELEASE} ubuntu_jammy"
# Directory where values overrides are looked up or downloaded to.
export OVERRIDES_DIR=$(pwd)/overrides
mkdir -p "${OVERRIDES_DIR}"
cd openstack-helm/rabbitmq
helm dependency build
cd ../..helm upgrade --install trilio-rabbitmq openstack-helm/rabbitmq \
--namespace=trilio-openstack \
--set pod.replicas.server=1 \
--set volume.class_name="general" \
--timeout=600s \
$(helm osh get-values-overrides -p ${OVERRIDES_DIR} -c rabbitmq ${FEATURES})
helm osh wait-for-pods trilio-openstackkubectl get osdpl -n openstackroot@ubuntu:~# kubectl get osdpl -n openstack
NAME OPENSTACK AGE DRAFT
osh-dev caracal 7d19h
root@ubuntu:~# kubectl describe osdpl osh-dev -n openstack | grep domain
internal_domain_name: cluster.local
public_domain_name: triliodata.democd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils./get_admin_creds_mosk.sh <internal_domain_name> <public_domain_name>
Example:
./get_admin_creds_mosk.sh cluster.local setup.triliodata.democd ../../../../
cat triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/admin_creds.yaml
cat triliovault-cfg-scripts/openstack-helm/trilio-openstack/templates/bin/_triliovault-nova-compute.conf.tplvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/keystone.yaml$ kubectl describe osdpl osh-dev -n openstack | grep live_migration_interface
Example output:
live_migration_interface: mcc-lcmvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values.yamllibvirt:
images_rbd_ceph_conf: /etc/ceph/ceph.conf
live_migration_interface: "<output>" # Interface used for live migration traffic
hypervisor_host_interface: "<output>" # Interface used for live migration trafficcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils
./get_ceph_mosk.shcat ../values_overrides/ceph.yaml
cat ../templates/bin/_triliovault-ceph.conf.tplvi ../values_overrides/ceph.yamlvi ../templates/bin/_triliovault-ceph.conf.tplcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils./create_image_pull_secret.sh <TRILIO_REGISTRY_USERNAME> <TRILIO_REGISTRY_PASSWORD>kubectl describe secret triliovault-image-registry -n trilio-openstackcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils/
vi install.shhelm upgrade --install trilio-openstack ./trilio-openstack --namespace=trilio-openstack \
--values=./trilio-openstack/values_overrides/image_pull_secrets.yaml \
--values=./trilio-openstack/values_overrides/keystone.yaml \
--values=./trilio-openstack/values_overrides/s3.yaml \
--values=./trilio-openstack/values_overrides/mosk25.1.yaml \
--values=./trilio-openstack/values_overrides/admin_creds.yaml \
--values=./trilio-openstack/values_overrides/tls_public_endpoint.yaml \
--values=./trilio-openstack/values_overrides/ceph.yaml \
--values=./trilio-openstack/values_overrides/db_drop.yaml \
--values=./trilio-openstack/values_overrides/ingress.yaml \
--values=./trilio-openstack/values_overrides/triliovault_passwords.yaml
echo -e "Waiting for TrilioVault pods to reach running state"
./trilio-openstack/utils/wait_for_pods.sh trilio-openstack
kubectl get podscd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils/
./install.shkubectl -n trilio-openstack get ingressroot@master:~# kubectl -n trilio-openstack get ingress
NAME CLASS HOSTS ADDRESS PORTS AGE
triliovault-datamover nginx triliovault-datamover,triliovault-datamover.trilio-openstack,triliovault-datamover.trilio-openstack.svc.cluster.local 192.168.2.5 80 14h
triliovault-datamover-cluster-fqdn nginx-cluster triliovault-datamover.triliodata.demo 80, 443 14h
triliovault-datamover-namespace-fqdn nginx triliovault-datamover.triliodata.demo 192.168.2.5 80, 443 14h
triliovault-wlm nginx triliovault-wlm,triliovault-wlm.trilio-openstack,triliovault-wlm.trilio-openstack.svc.cluster.local 192.168.2.5 80 14h
triliovault-wlm-cluster-fqdn nginx-cluster triliovault-wlm.triliodata.demo 80, 443 14h
triliovault-wlm-namespace-fqdn nginx triliovault-wlm.triliodata.demo 192.168.2.5 80, 443 14h
for i in $(kubectl get ingress -n trilio-openstack -o name); do
kubectl patch -n trilio-openstack "$i" --type='json' -p='[
{"op": "remove", "path": "/spec/ingressClassName"},
{"op": "add", "path": "/metadata/annotations/kubernetes.io~1ingress.class", "value": "openstack-ingress-nginx"}
]' || echo "Skipping $i (possibly missing ingressClassName)"
donehelm status trilio-openstackkubectl get pods -n trilio-openstackNAME READY STATUS RESTARTS AGE
triliovault-datamover-api-5c7fbb949c-2m8dc 1/1 Running 0 21h
triliovault-datamover-api-5c7fbb949c-kxspg 1/1 Running 0 21h
triliovault-datamover-api-5c7fbb949c-z4wkn 1/1 Running 0 21h
triliovault-datamover-db-init-7k7jg 0/1 Completed 0 21h
triliovault-datamover-db-sync-6jkgs 0/1 Completed 0 21h
triliovault-datamover-ks-endpoints-gcrht 0/3 Completed 0 21h
triliovault-datamover-ks-service-nnnvh 0/1 Completed 0 21h
triliovault-datamover-ks-user-td44v 0/1 Completed 0 20h
triliovault-datamover-openstack-compute-node-4gkv8 1/1 Running 0 21h
triliovault-datamover-openstack-compute-node-6lbc4 1/1 Running 0 21h
triliovault-datamover-openstack-compute-node-pqslx 1/1 Running 0 21h
triliovault-wlm-api-7647c4b45c-52449 1/1 Running 0 21h
triliovault-wlm-api-7647c4b45c-h47mw 1/1 Running 0 21h
triliovault-wlm-api-7647c4b45c-rjbvl 1/1 Running 0 21h
triliovault-wlm-cloud-trust-h8xgq 0/1 Completed 0 20h
triliovault-wlm-cron-574ff78486-54rqg 1/1 Running 0 21h
triliovault-wlm-db-init-hvk65 0/1 Completed 0 21h
triliovault-wlm-db-sync-hpl4c 0/1 Completed 0 21h
triliovault-wlm-ks-endpoints-4bsxl 0/3 Completed 0 21h
triliovault-wlm-ks-service-btcb4 0/1 Completed 0 21h
triliovault-wlm-ks-user-gnfdh 0/1 Completed 0 20h
triliovault-wlm-rabbit-init-ws262 0/1 Completed 0 21h
triliovault-wlm-scheduler-669f4758b4-ks7qr 1/1 Running 0 21h
triliovault-wlm-workloads-5ff86448c-mj8p2 1/1 Running 0 21h
triliovault-wlm-workloads-5ff86448c-th6f4 1/1 Running 0 21h
triliovault-wlm-workloads-5ff86448c-zhr4m 1/1 Running 0 21hkubectl get jobs -n trilio-openstackNAME COMPLETIONS DURATION AGE
triliovault-datamover-db-init 1/1 5s 21h
triliovault-datamover-db-sync 1/1 8s 21h
triliovault-datamover-ks-endpoints 1/1 17s 21h
triliovault-datamover-ks-service 1/1 18s 21h
triliovault-datamover-ks-user 1/1 19s 21h
triliovault-wlm-cloud-trust 1/1 2m10s 20h
triliovault-wlm-db-init 1/1 5s 21h
triliovault-wlm-db-sync 1/1 20s 21h
triliovault-wlm-ks-endpoints 1/1 17s 21h
triliovault-wlm-ks-service 1/1 17s 21h
triliovault-wlm-ks-user 1/1 19s 21h
triliovault-wlm-rabbit-init 1/1 4s 21hkubectl get pvc -n trilio-openstacktriliovault-nfs-pvc-172-25-0-10-mnt-tvault-42424 Bound triliovault-nfs-pv-172-25-0-10-mnt-tvault-42424 20Gi RWX nfs 6dkubectl get nodes -o wide --show-labels | grep openstack-control-plane=enabled | awk '{print $1, $6;}'
# Sample output:
-------------------
root@helm1# kubectl get nodes -o wide --show-labels | grep openstack-control-plane=enabled | awk '{print $1, $6;}'
helm2 172.25.10.203
helm3 172.25.10.204
helm4 172.25.10.205# ssh to horizon nodes
ssh <HORIZON_NODE_IP>
# Login to TrilioVault Docker Image Registry
docker login docker.io -u <TRILIO_DOCKER_REGISTRY_USERNAME> -p <TRILIO_DOCKER_REGISTRY_PASSWORD>
# Pull TrilioVault Horizon Plugin Image
docker pull docker.io/trilio/trilio-horizon-plugin-helm:<TRILIOVAULT_HORIZON_PLUGIN_IMAGE_TAG>## Get mosk openstack deployment resource name
kubectl -n openstack get osdpl
## Example: Here 'osh-dev' is mosk openstack deployment resource name
kubectl -n openstack get osdpl
NAME OPENSTACK AGE DRAFT
osh-dev victoria 243d
## Get it's resource definition file in yaml format.
kubectl -n openstack get osdpl ${OSDPL_RESOURCE_NAME} -o yaml > openstackdeployment.yaml
## Example
kubectl -n openstack get osdpl osh-dev -o yaml > openstackdeployment.yaml services:
dashboard:
horizon:
values:
images:
tags:
horizon: docker.io/trilio/trilio-horizon-plugin-helm:<TRILIOVAULT_HORIZON_PLUGIN_IMAGE_TAG>kubectl apply -f openstackdeployment.yamlkubectl -n openstack get osdplst
##Sample output of update completed state:
--------------------------------------------
root@helm1:# kubectl -n openstack get osdplst
NAME OPENSTACK VERSION CONTROLLER VERSION STATE
osh-dev victoria 0.8.3 APPLIEDkubectl get pods -n openstack | grep horizon
## Sample output
root@helm1:# kubectl get pods -n openstack | grep horizon
horizon-f9d4c747d-8wmzt 1/1 Running 0 4d20h
horizon-f9d4c747d-lhplg 1/1 Running 0 4d19htrilio-openstack NamespaceHTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 21 Jan 2021 11:21:57 GMT
Content-Type: application/json
Content-Length: 868
Connection: keep-alive
X-Compute-Request-Id: req-fa48f0ad-aa76-42fa-85ea-1e5461889fb3
{
"trust":[
{
"created_at":"2020-11-26T13:10:53.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"trust-6e290937-de9b-446a-a406-eb3944e5a034",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":"cloud_admin",
"value":"dbe2e160d4c44d7894836a6029644ea0",
"description":"token id for user adfa32d7746a4341b27377d6f7c61adb project 4dfe98a43bfa404785a812020066b4d6",
"category":"identity",
"type":"trust_id",
"public":false,
"hidden":true,
"status":"available",
"metadata":[
{
"created_at":"2020-11-26T13:10:54.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"e9ec386e-79cf-4f6b-8201-093315648afe",
"settings_name":"trust-6e290937-de9b-446a-a406-eb3944e5a034",
"settings_project_id":"4dfe98a43bfa404785a812020066b4d6",
"key":"role_name",
"value":"admin"
}
]
}
]
}curl -O https://get.helm.sh/helm-v3.17.2-linux-amd64.tar.gz
tar -zxvf helm*.tar.gz
mv linux-amd64/helm /usr/local/bin/helm
rm -rf linux-amd64 helm*.tar.gz
kubectl get nodes --show-labels | grep openstack-control-plane
kubectl get nodes --show-labels | grep openstack-computesudo apt-get install nfs-common -ysudo apt update -y && sudo apt install make jq -ygit clone -b {{ trilio_branch }} https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/openstack-helm/trilio-openstack/
helm dep up
cd ../../../vi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/2023.2.yamlkubectl create namespace trilio-openstack
kubectl config set-context --current --namespace=trilio-openstackkubectl get nodes --show-labels | grep openstack-control-planekubectl label nodes <NODE_NAME_1> triliovault-control-plane=enabled
kubectl label nodes <NODE_NAME_2> triliovault-control-plane=enabled
kubectl label nodes <NODE_NAME_3> triliovault-control-plane=enabledkubectl get nodes --show-labels | grep triliovault-control-planevi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/nfs.yamlvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/s3.yamlvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/files/s3-cert.pemvi triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/keystone.yamlcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils
./create_rabbitmq.shcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils./get_admin_creds.sh <internal_domain_name> <public_domain_name>
Example:
./get_admin_creds.sh cluster.local triliodata.democat ../values_overrides/admin_creds.yaml
cat ../templates/bin/_triliovault-nova-compute.conf.tpl
cat ../templates/bin/_triliovault-datamover-init.sh.tpl
cat ../templates/bin/_triliovault-datamover.sh.tplcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/values_overrides/
## Provide rbd_user, keyring. This user needs to have read,write access on vms, volumes pool used for nova, cinder backend.
## By default 'nova' user generally has these permissions. But we recommend to verify and use it for triliovault.
vi ceph.yaml
## Copy your /etc/ceph/ceph.conf content to following file. Clean existing file content.
vi ../templates/bin/_triliovault-ceph.conf.tplvi ../templates/bin/_triliovault-ceph.conf.tplcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils
./get_ceph.shcat ../values_overrides/ceph.yaml
cat ../templates/bin/_triliovault-ceph.conf.tplcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils./create_image_pull_secret.sh <TRILIO_REGISTRY_USERNAME> <TRILIO_REGISTRY_PASSWORD>kubectl describe secret triliovault-image-registry -n trilio-openstackcd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils/
vi install.shhelm upgrade --install trilio-openstack ./trilio-openstack --namespace=trilio-openstack \
--values=./trilio-openstack/values_overrides/image_pull_secrets.yaml \
--values=./trilio-openstack/values_overrides/keystone.yaml \
--values=./trilio-openstack/values_overrides/s3.yaml \
--values=./trilio-openstack/values_overrides/2023.2.yaml \
--values=./trilio-openstack/values_overrides/admin_creds.yaml \
--values=./trilio-openstack/values_overrides/tls_public_endpoint.yaml \
--values=./trilio-openstack/values_overrides/ceph.yaml \
--values=./trilio-openstack/values_overrides/db_drop.yaml \
--values=./trilio-openstack/values_overrides/ingress.yaml \
--values=./trilio-openstack/values_overrides/triliovault_passwords.yaml
echo -e "Waiting for TrilioVault pods to reach running state"
./trilio-openstack/utils/wait_for_pods.sh trilio-openstack
kubectl get podscd triliovault-cfg-scripts/openstack-helm/trilio-openstack/utils/
./install.shkubectl get service -n openstack | grep LoadBalancerpublic-openstack LoadBalancer 10.105.43.185 192.168.2.50 80:30162/TCP,443:30829/TCPkubectl -n trilio-openstack get ingressroot@master:~# kubectl -n trilio-openstack get ingress
NAME CLASS HOSTS ADDRESS PORTS AGE
triliovault-datamover nginx triliovault-datamover,triliovault-datamover.trilio-openstack,triliovault-datamover.trilio-openstack.svc.cluster.local 192.168.2.5 80 14h
triliovault-datamover-cluster-fqdn nginx-cluster triliovault-datamover.triliodata.demo 80, 443 14h
triliovault-datamover-namespace-fqdn nginx triliovault-datamover.triliodata.demo 192.168.2.5 80, 443 14h
triliovault-wlm nginx triliovault-wlm,triliovault-wlm.trilio-openstack,triliovault-wlm.trilio-openstack.svc.cluster.local 192.168.2.5 80 14h
triliovault-wlm-cluster-fqdn nginx-cluster triliovault-wlm.triliodata.demo 80, 443 14h
triliovault-wlm-namespace-fqdn nginx triliovault-wlm.triliodata.demo 192.168.2.5 80, 443 14h
kubectl -n openstack get deployment ingress-nginx-controller -o yaml | grep watch-namespacekubectl edit deployment ingress-nginx-controller -n ingress-nginx--watch-namespace=trilio-openstack,openstackkubectl rollout restart deployment ingress-nginx-controller -n openstackhelm status trilio-openstackkubectl get pods -n trilio-openstackNAME READY STATUS RESTARTS AGE
rabbitmq-server-0 1/1 Running 0 13h
trilio-object-store-s3-bt2-lhdnx 1/1 Running 0 13h
trilio-object-store-s3-bt2-szwtg 1/1 Running 0 13h
trilio-object-store-s3-bt2-x9cnr 1/1 Running 0 13h
triliovault-datamover-api-7cdf475fdf-cz9nw 1/1 Running 0 13h
triliovault-datamover-api-7cdf475fdf-mq86z 1/1 Running 0 13h
triliovault-datamover-api-7cdf475fdf-tm9ww 1/1 Running 0 13h
triliovault-datamover-db-init-pdf58 0/1 Completed 0 13h
triliovault-datamover-db-sync-bln62 0/1 Completed 0 13h
triliovault-datamover-ks-endpoints-g744n 0/3 Completed 0 13h
triliovault-datamover-ks-service-gzdcw 0/1 Completed 0 13h
triliovault-datamover-ks-user-dvbtd 0/1 Completed 0 13h
triliovault-datamover-openstack-compute-node-g258v 1/1 Running 0 13h
triliovault-datamover-openstack-compute-node-k89b2 1/1 Running 0 13h
triliovault-datamover-openstack-compute-node-twz5t 1/1 Running 0 13h
triliovault-datamover-rabbit-init-cx72m 0/1 Completed 0 13h
triliovault-wlm-api-545cb949dc-7nlr2 1/1 Running 0 13h
triliovault-wlm-api-545cb949dc-bvzdb 1/1 Running 0 13h
triliovault-wlm-api-545cb949dc-rtwqm 1/1 Running 0 13h
triliovault-wlm-cloud-trust-jvv2c 0/1 Completed 0 13h
triliovault-wlm-cron-86946df7f5-mxs56 1/1 Running 0 13h
triliovault-wlm-db-init-7s7v2 0/1 Completed 0 13h
triliovault-wlm-db-sync-b88td 0/1 Completed 0 13h
triliovault-wlm-ks-endpoints-gkh2l 0/3 Completed 0 13h
triliovault-wlm-ks-service-74zj6 0/1 Completed 0 13h
triliovault-wlm-ks-user-fb4xs 0/1 Completed 0 13h
triliovault-wlm-rabbit-init-2dqq7 0/1 Completed 0 13h
triliovault-wlm-scheduler-7cf86b59f4-z6v78 1/1 Running 0 13h
triliovault-wlm-workloads-6455dc5c9f-8fzn8 1/1 Running 0 13h
triliovault-wlm-workloads-6455dc5c9f-mq45v 1/1 Running 0 13h
triliovault-wlm-workloads-6455dc5c9f-rjd6w 1/1 Running 0 13h
kubectl get jobs -n trilio-openstackNAME STATUS COMPLETIONS DURATION AGE
triliovault-datamover-db-init Complete 1/1 6s 13h
triliovault-datamover-db-sync Complete 1/1 9s 13h
triliovault-datamover-ks-endpoints Complete 1/1 17s 13h
triliovault-datamover-ks-service Complete 1/1 19s 13h
triliovault-datamover-ks-user Complete 1/1 20s 13h
triliovault-datamover-rabbit-init Complete 1/1 4s 13h
triliovault-wlm-cloud-trust Complete 1/1 93s 13h
triliovault-wlm-db-init Complete 1/1 5s 13h
triliovault-wlm-db-sync Complete 1/1 20s 13h
triliovault-wlm-ks-endpoints Complete 1/1 17s 13h
triliovault-wlm-ks-service Complete 1/1 19s 13h
triliovault-wlm-ks-user Complete 1/1 22s 13h
triliovault-wlm-rabbit-init Complete 1/1 4s 13hkubectl get pvc -n trilio-openstacktriliovault-nfs-pvc-172-25-0-10-mnt-tvault-42424 Bound triliovault-nfs-pv-172-25-0-10-mnt-tvault-42424 20Gi RWX nfs 6dkubectl get nodes --show-labels | grep triliovault-control-plane
-- SSH to these kuberentes nodes
ssh <KUBERNETES_NODE_NAME>
-- See logs
vi /var/log/triliovault-datamover-api/triliovault-datamover-api.log
## Other approach: kubectl stdout and stderr logs
-- List triliovault-datamover-api pods
kubectl get pods | grep triliovault-datamover-api
-- See logs
kubectl logs <triliovault-datamover-api-pod-name>
# Example:
root@helm1:~# kubectl get pods | grep triliovault-datamover-api
triliovault-datamover-api-c87899fb7-dq2sd 1/1 Running 0 3d18h
triliovault-datamover-api-c87899fb7-j4fdz 1/1 Running 0 3d18h
triliovault-datamover-api-c87899fb7-nm8pt 1/1 Running 0 3d18h
root@helm1:~# kubectl logs triliovault-datamover-api-c87899fb7-dq2sdkubectl get nodes --show-labels | grep openstack-compute-node
-- SSH to these kuberentes nodes
ssh <KUBERNETES_NODE_NAME>
-- See logs
vi /var/log/triliovault-datamover/triliovault-datamover.log
## Other approach: kubectl stdout and stderr logs
-- List triliovault-datamover-api pods
kubectl get pods | grep triliovault-datamover-openstack
-- See logs
kubectl logs <triliovault-datamover-pod-name>
# Example:
root@helm1:~# kubectl get pods | grep triliovault-datamover-openstack
triliovault-datamover-openstack-compute-node-2krmj 1/1 Running 0 3d19h
triliovault-datamover-openstack-compute-node-9f5w7 1/1 Running 0 3d19h
root@helm1:~# kubectl logs triliovault-datamover-openstack-compute-node-2krmjkubectl get nodes --show-labels | grep triliovault-control-plane
-- SSH to these kuberentes nodes
ssh <KUBERNETES_NODE_NAME>
-- Log files are available in following directory.
ls /var/log/triliovault-wlm/
## Sample command output
root@helm4:~# ls -ll /var/log/triliovault-wlm/
total 26576
-rw-r--r-- 1 42424 42424 2079322 Mar 20 07:55 triliovault-wlm-api.log
-rw-r--r-- 1 42424 42424 25000088 Mar 20 00:41 triliovault-wlm-api.log.1
-rw-r--r-- 1 42424 42424 12261 Mar 16 12:40 triliovault-wlm-cron.log
-rw-r--r-- 1 42424 42424 10263 Mar 16 12:36 triliovault-wlm-scheduler.log
-rw-r--r-- 1 42424 42424 87918 Mar 16 12:36 triliovault-wlm-workloads.log
## Other approach: kubectl stdout and stderr logs
-- List triliovault-wlm services pods
kubectl get pods | grep triliovault-wlm
-- See logs
kubectl logs <triliovault-wlm-service-pod-name>
# Example:
root@helm1:~# kubectl get pods | grep triliovault-wlm
triliovault-wlm-api-7b956f7b8-84gtw 1/1 Running 0 3d19h
triliovault-wlm-api-7b956f7b8-85mdk 1/1 Running 0 3d19h
triliovault-wlm-api-7b956f7b8-hpcpt 1/1 Running 0 3d19h
triliovault-wlm-cloud-trust-rdh8n 0/1 Completed 0 3d19h
triliovault-wlm-cron-78bdb4b959-wzrfs 1/1 Running 0 3d19h
triliovault-wlm-db-drop-dhfgj 0/1 Completed 0 3d19h
triliovault-wlm-db-init-snrsr 0/1 Completed 0 3d19h
triliovault-wlm-db-sync-wffk5 0/1 Completed 0 3d19h
triliovault-wlm-ks-endpoints-zvqtf 0/3 Completed 0 3d19h
triliovault-wlm-ks-service-6425q 0/1 Completed 0 3d19h
triliovault-wlm-ks-user-fmgsx 0/1 Completed 0 3d19h
triliovault-wlm-rabbit-init-vsdn6 0/1 Completed 0 3d19h
triliovault-wlm-scheduler-649b95ffd6-bkqxt 1/1 Running 0 3d19h
triliovault-wlm-workloads-6b98679d45-2kjdq 1/1 Running 0 3d19h
triliovault-wlm-workloads-6b98679d45-mxvhp 1/1 Running 0 3d19h
triliovault-wlm-workloads-6b98679d45-v4dn8 1/1 Running 0 3d19h
# kubectl logs triliovault-wlm-api-7b956f7b8-84gtw
# kubectl logs triliovault-wlm-cron-78bdb4b959-wzrfs
# kubectl logs triliovault-wlm-scheduler-649b95ffd6-bkqxt
# kubectl logs triliovault-wlm-workloads-6b98679d45-mxvhpkubectl describe secret triliovault-image-registry -n openstackkubectl create secret docker-registry triliovault-image-registry \
--docker-server="docker.io" \
--docker-username=<TRILIO_REGISTRY_USERNAME> \
--docker-password=<TRILIO_REGISTRY_PASSWORD> \
-n openstackkubectl -n openstack patch deployment horizon \
--type='strategic' \
-p '{
"spec": {
"template": {
"spec": {
"containers": [
{
"name": "horizon",
"image": "docker.io/trilio/trilio-horizon-plugin-helm:<TRILIOVAULT_HORIZON_PLUGIN_IMAGE_TAG>"
}
],
"imagePullSecrets": [
{
"name": "triliovault-image-registry"
}
]
}
}
}
}'kubectl get pods -n openstack -l application=horizon,component=server -o jsonpath="{.items[*].spec.containers[*].image}" | tr ' ' '\n'HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 21 Jan 2021 11:43:36 GMT
Content-Type: application/json
Content-Length: 868
Connection: keep-alive
X-Compute-Request-Id: req-2151b327-ea74-4eec-b606-f0df358bc2a0
{
"trust":[
{
"created_at":"2021-01-21T11:43:36.140407",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"trust-b03daf38-1615-48d6-88f9-a807c728e786",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":"adfa32d7746a4341b27377d6f7c61adb",
"value":"1c981a15e7a54242ae54eee6f8d32e6a",
"description":"token id for user adfa32d7746a4341b27377d6f7c61adb project 4dfe98a43bfa404785a812020066b4d6",
"category":"identity",
"type":"trust_id",
"public":false,
"hidden":1,
"status":"available",
"is_public":false,
"is_hidden":true,
"metadata":[
]
}
]
}{
"trusts":{
"role_name":"member",
"is_cloud_trust":false
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 21 Jan 2021 11:39:12 GMT
Content-Type: application/json
Content-Length: 888
Connection: keep-alive
X-Compute-Request-Id: req-3c2f6acb-9973-4805-bae3-cd8dbcdc2cb4
{
"trust":{
"created_at":"2020-11-26T13:15:29.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"trust-54e24d8d-6bcf-449e-8021-708b4ebc65e1",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":"adfa32d7746a4341b27377d6f7c61adb",
"value":"703dfabb4c5942f7a1960736dd84f4d4",
"description":"token id for user adfa32d7746a4341b27377d6f7c61adb project 4dfe98a43bfa404785a812020066b4d6",
"category":"identity",
"type":"trust_id",
"public":false,
"hidden":true,
"status":"available",
"metadata":[
{
"created_at":"2020-11-26T13:15:29.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"86aceea1-9121-43f9-b55c-f862052374ab",
"settings_name":"trust-54e24d8d-6bcf-449e-8021-708b4ebc65e1",
"settings_project_id":"4dfe98a43bfa404785a812020066b4d6",
"key":"role_name",
"value":"member"
}
]
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 21 Jan 2021 11:41:51 GMT
Content-Type: application/json
Content-Length: 888
Connection: keep-alive
X-Compute-Request-Id: req-d838a475-f4d3-44e9-8807-81a9c32ea2a8{
"scheduler_enabled":true,
"trust":{
"created_at":"2021-01-21T11:43:36.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"trust-b03daf38-1615-48d6-88f9-a807c728e786",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":"adfa32d7746a4341b27377d6f7c61adb",
"value":"1c981a15e7a54242ae54eee6f8d32e6a",
"description":"token id for user adfa32d7746a4341b27377d6f7c61adb project 4dfe98a43bfa404785a812020066b4d6",
"category":"identity",
"type":"trust_id",
"public":false,
"hidden":true,
"status":"available",
"metadata":[
{
"created_at":"2021-01-21T11:43:36.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"d98d283a-b096-4a68-826a-36f99781787d",
"settings_name":"trust-b03daf38-1615-48d6-88f9-a807c728e786",
"settings_project_id":"4dfe98a43bfa404785a812020066b4d6",
"key":"role_name",
"value":"member"
}
]
},
"is_valid":true,
"scheduler_obj":{
"workload_id":"209c13fa-e743-4ccd-81f7-efdaff277a1f",
"user_id":"adfa32d7746a4341b27377d6f7c61adb",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_domain_id":"default",
"user":"adfa32d7746a4341b27377d6f7c61adb",
"tenant":"4dfe98a43bfa404785a812020066b4d6"
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 04 Feb 2021 11:55:43 GMT
Content-Type: application/json
Content-Length: 403
Connection: keep-alive
X-Compute-Request-Id: req-ac16c258-7890-4ae7-b7f4-015b5aa4eb99
{
"settings":[
{
"created_at":"2021-02-04T11:55:43.890855",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"smtp_port",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":null,
"value":"8080",
"description":null,
"category":null,
"type":"email_settings",
"public":false,
"hidden":0,
"status":"available",
"is_public":false,
"is_hidden":false
}
]
}{
"settings":[
{
"category":null,
"name":<String Setting_name>,
"is_public":false,
"is_hidden":false,
"metadata":{
},
"type":<String Setting type>,
"value":<String Setting Value>,
"description":null
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 04 Feb 2021 12:01:27 GMT
Content-Type: application/json
Content-Length: 380
Connection: keep-alive
X-Compute-Request-Id: req-404f2808-7276-4c2b-8870-8368a048c28c
{
"setting":{
"created_at":"2021-02-04T11:55:43.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"smtp_port",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":null,
"value":"8080",
"description":null,
"category":null,
"type":"email_settings",
"public":false,
"hidden":false,
"status":"available",
"metadata":[
]
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 04 Feb 2021 12:05:59 GMT
Content-Type: application/json
Content-Length: 403
Connection: keep-alive
X-Compute-Request-Id: req-e92e2c38-b43a-4046-984e-64cea3a0281f
{
"settings":[
{
"created_at":"2021-02-04T11:55:43.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"name":"smtp_port",
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":null,
"value":"8080",
"description":null,
"category":null,
"type":"email_settings",
"public":false,
"hidden":0,
"status":"available",
"is_public":false,
"is_hidden":false
}
]
}{
"settings":[
{
"category":null,
"name":<String Setting_name>,
"is_public":false,
"is_hidden":false,
"metadata":{
},
"type":<String Setting type>,
"value":<String Setting Value>,
"description":null
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 04 Feb 2021 11:49:17 GMT
Content-Type: application/json
Content-Length: 1223
Connection: keep-alive
X-Compute-Request-Id: req-5a8303aa-6c90-4cd9-9b6a-8c200f9c2473HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Tue, 17 Nov 2020 10:34:10 GMT
Content-Type: application/json
Content-Length: 7888
Connection: keep-alive
X-Compute-Request-Id: req-9d73e5e6-ca5a-4c07-bdf2-ec2e688fc339
{
"workloads":[
{
"created_at":"2020-11-02T13:40:06.000000",
"updated_at":"2020-11-09T09:53:30.000000",
"id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"availability_zone":"nova",
"workload_type_id":"f82ce76f-17fe-438b-aa37-7a023058e50d",
"name":"Workload_1",
"description":"no-description",
"interval":null,
"storage_usage":null,
"instances":null,
"metadata":[
{
"created_at":"2020-11-09T09:57:23.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"ee27bf14-e460-454b-abf5-c17e3d484ec2",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"63cd8d96-1c4a-4e61-b1e0-3ae6a17bf533",
"value":"c8468146-8117-48a4-bfd7-49381938f636"
},
{
"created_at":"2020-11-05T10:27:06.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"22d3e3d6-5a37-48e9-82a1-af2dda11f476",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"67d6a100-fee6-4aa5-83a1-66b070d2eabe",
"value":"1fb104bf-7e2b-4cb6-84f6-96aabc8f1dd2"
},
{
"created_at":"2020-11-09T09:37:20.000000",
"updated_at":"2020-11-09T09:57:23.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"61615532-6165-45a2-91e2-fbad9eb0b284",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"b083bb70-e384-4107-b951-8e9e7bbac380",
"value":"c8468146-8117-48a4-bfd7-49381938f636"
},
{
"created_at":"2020-11-02T13:40:24.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"5a53c8ee-4482-4d6a-86f2-654d2b06e28c",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"backup_media_target",
"value":"10.10.2.20:/upstream"
},
{
"created_at":"2020-11-05T10:27:14.000000",
"updated_at":"2020-11-09T09:57:23.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"5cb4dc86-a232-4916-86bf-42a0d17f1439",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"e33c1eea-c533-4945-864d-0da1fc002070",
"value":"c8468146-8117-48a4-bfd7-49381938f636"
},
{
"created_at":"2020-11-02T13:40:06.000000",
"updated_at":"2020-11-02T14:10:30.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"506cd466-1e15-416f-9f8e-b9bdb942f3e1",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"hostnames",
"value":"[\"cirros-1\", \"cirros-2\"]"
},
{
"created_at":"2020-11-02T13:40:06.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"093a1221-edb6-4957-8923-cf271f7e43ce",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"pause_at_snapshot",
"value":"0"
},
{
"created_at":"2020-11-02T13:40:06.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"79baaba8-857e-410f-9d2a-8b14670c4722",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"policy_id",
"value":"b79aa5f3-405b-4da4-96e2-893abf7cb5fd"
},
{
"created_at":"2020-11-02T13:40:06.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"4e23fa3d-1a79-4dc8-86cb-dc1ecbd7008e",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"preferredgroup",
"value":"[]"
},
{
"created_at":"2020-11-02T14:10:30.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"ed06cca6-83d8-4d4c-913b-30c8b8418b80",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"topology",
"value":"\"\\\"\\\"\""
},
{
"created_at":"2020-11-02T13:40:23.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"4b6a80f7-b011-48d4-b5fd-f705448de076",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"key":"workload_approx_backup_size",
"value":"6"
}
],
"jobschedule":"(dp0\nVfullbackup_interval\np1\nV-1\np2\nsVretention_policy_type\np3\nVNumber of Snapshots to Keep\np4\nsVend_date\np5\nVNo End\np6\nsVstart_time\np7\nV01:45 PM\np8\nsVinterval\np9\nV5\np10\nsVenabled\np11\nI00\nsVretention_policy_value\np12\nV10\np13\nsVtimezone\np14\nVUTC\np15\nsVstart_date\np16\nV11/02/2020\np17\nsVappliance_timezone\np18\nVUTC\np19\ns.",
"status":"locked",
"error_msg":null,
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/4dfe98a43bfa404785a812020066b4d6/workloads/18b809de-d7c8-41e2-867d-4a306407fb11"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/4dfe98a43bfa404785a812020066b4d6/workloads/18b809de-d7c8-41e2-867d-4a306407fb11"
}
],
"scheduler_trust":null
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Tue, 17 Nov 2020 10:42:01 GMT
Content-Type: application/json
Content-Length: 120143
Connection: keep-alive
X-Compute-Request-Id: req-b443f6e7-8d8e-413f-8d91-7c30ba166e8c
{
"workloads":[
{
"created_at":"2019-04-24T14:09:20.000000",
"updated_at":"2019-05-16T09:10:17.000000",
"id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"user_id":"6ef8135faedc4259baac5871e09f0044",
"project_id":"863b6e2a8e4747f8ba80fdce1ccf332e",
"availability_zone":"nova",
"workload_type_id":"f82ce76f-17fe-438b-aa37-7a023058e50d",
"name":"comdirect_test",
"description":"Daily UNIX Backup 03:15 PM Full 7D Keep 8",
"interval":null,
"storage_usage":null,
"instances":null,
"metadata":[
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":false,
"created_at":"2019-05-16T09:13:54.000000",
"updated_at":null,
"value":"ca544215-1182-4a8f-bf81-910f5470887a",
"version":"3.2.46",
"key":"40965cbb-d352-4618-b8b0-ea064b4819bb",
"deleted_at":null,
"id":"5184260e-8bb3-4c52-abfa-1adc05fe6997"
},
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":true,
"created_at":"2019-04-24T14:09:30.000000",
"updated_at":"2019-05-16T09:01:23.000000",
"value":"10.10.2.20:/upstream",
"version":"3.2.46",
"key":"backup_media_target",
"deleted_at":"2019-05-16T09:01:23.000000",
"id":"02dd0630-7118-485c-9e42-b01d23aa882c"
},
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":false,
"created_at":"2019-05-16T09:13:51.000000",
"updated_at":null,
"value":"51693eca-8714-49be-b409-f1f1709db595",
"version":"3.2.46",
"key":"eb7d6b13-21e4-45d1-b888-d3978ab37216",
"deleted_at":null,
"id":"4b79a4ef-83d6-4e5a-afb3-f4e160c5f257"
},
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":true,
"created_at":"2019-04-24T14:09:20.000000",
"updated_at":"2019-05-16T09:01:23.000000",
"value":"[\"Comdirect_test-2\", \"Comdirect_test-1\"]",
"version":"3.2.46",
"key":"hostnames",
"deleted_at":"2019-05-16T09:01:23.000000",
"id":"0cb6a870-8f30-4325-a4ce-e9604370198e"
},
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":false,
"created_at":"2019-04-24T14:09:20.000000",
"updated_at":"2019-05-16T09:01:23.000000",
"value":"0",
"version":"3.2.46",
"key":"pause_at_snapshot",
"deleted_at":null,
"id":"5d4f109c-9dc2-48f3-a12a-e8b8fa4f5be9"
},
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":true,
"created_at":"2019-04-24T14:09:20.000000",
"updated_at":"2019-05-16T09:01:23.000000",
"value":"[]",
"version":"3.2.46",
"key":"preferredgroup",
"deleted_at":"2019-05-16T09:01:23.000000",
"id":"9a223fbc-7cad-4c2c-ae8a-75e6ee8a6efc"
},
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":true,
"created_at":"2019-04-24T14:11:49.000000",
"updated_at":"2019-05-16T09:01:23.000000",
"value":"\"\\\"\\\"\"",
"version":"3.2.46",
"key":"topology",
"deleted_at":"2019-05-16T09:01:23.000000",
"id":"77e436c0-0921-4919-97f4-feb58fb19e06"
},
{
"workload_id":"0ed39f25-5df2-4cc5-820f-2af2cde6aa67",
"deleted":true,
"created_at":"2019-04-24T14:09:30.000000",
"updated_at":"2019-05-16T09:01:23.000000",
"value":"121",
"version":"3.2.46",
"key":"workload_approx_backup_size",
"deleted_at":"2019-05-16T09:01:23.000000",
"id":"79aa04dd-a102-4bd8-b672-5b7a6ce9e125"
}
],
"jobschedule":"(dp1\nVfullbackup_interval\np2\nV7\nsVretention_policy_type\np3\nVNumber of days to retain Snapshots\np4\nsVend_date\np5\nV05/31/2019\np6\nsVstart_time\np7\nS'02:15 PM'\np8\nsVinterval\np9\nV24 hrs\np10\nsVenabled\np11\nI01\nsVretention_policy_value\np12\nI8\nsS'appliance_timezone'\np13\nS'UTC'\np14\nsVtimezone\np15\nVAfrica/Porto-Novo\np16\nsVstart_date\np17\nS'04/24/2019'\np18\ns.",
"status":"locked",
"error_msg":null,
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/4dfe98a43bfa404785a812020066b4d6/workloads/orphan_workloads/4dfe98a43bfa404785a812020066b4d6/workloads/0ed39f25-5df2-4cc5-820f-2af2cde6aa67"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/4dfe98a43bfa404785a812020066b4d6/workloads/orphan_workloads/4dfe98a43bfa404785a812020066b4d6/workloads/0ed39f25-5df2-4cc5-820f-2af2cde6aa67"
}
],
"scheduler_trust":null
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Tue, 17 Nov 2020 11:03:55 GMT
Content-Type: application/json
Content-Length: 100
Connection: keep-alive
X-Compute-Request-Id: req-0e58b419-f64c-47e1-adb9-21ea2a255839
{
"workloads":{
"imported_workloads":[
"faa03-f69a-45d5-a6fc-ae0119c77974"
],
"failed_workloads":[
]
}
}{
"workload_ids":[
"<workload_id>"
],
"upgrade":true
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 18 Nov 2020 15:40:56 GMT
Content-Type: application/json
Content-Length: 1625
Connection: keep-alive
X-Compute-Request-Id: req-2ad95c02-54c6-4908-887b-c16c5e2f20fe
{
"quota_types":[
{
"created_at":"2020-10-19T10:05:52.000000",
"updated_at":"2020-10-19T10:07:32.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"1c5d4290-2e08-11ea-889c-7440bb00b67d",
"display_name":"Workloads",
"display_description":"Total number of workload creation allowed per project",
"status":"available"
},
{
"created_at":"2020-10-19T10:05:52.000000",
"updated_at":"2020-10-19T10:07:32.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"b7273a06-2e08-11ea-889c-7440bb00b67d",
"display_name":"Snapshots",
"display_description":"Total number of snapshot creation allowed per project",
"status":"available"
},
{
"created_at":"2020-10-19T10:05:52.000000",
"updated_at":"2020-10-19T10:07:32.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"be323f58-2e08-11ea-889c-7440bb00b67d",
"display_name":"VMs",
"display_description":"Total number of VMs allowed per project",
"status":"available"
},
{
"created_at":"2020-10-19T10:05:52.000000",
"updated_at":"2020-10-19T10:07:32.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"c61324d0-2e08-11ea-889c-7440bb00b67d",
"display_name":"Volumes",
"display_description":"Total number of volume attachments allowed per project",
"status":"available"
},
{
"created_at":"2020-10-19T10:05:52.000000",
"updated_at":"2020-10-19T10:07:32.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"f02dd7a6-2e08-11ea-889c-7440bb00b67d",
"display_name":"Storage",
"display_description":"Total storage (in Bytes) allowed per project",
"status":"available"
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 18 Nov 2020 15:44:43 GMT
Content-Type: application/json
Content-Length: 342
Connection: keep-alive
X-Compute-Request-Id: req-5bf629fe-ffa2-4c90-b704-5178ba2ab09b
{
"quota_type":{
"created_at":"2020-10-19T10:05:52.000000",
"updated_at":"2020-10-19T10:07:32.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"1c5d4290-2e08-11ea-889c-7440bb00b67d",
"display_name":"Workloads",
"display_description":"Total number of workload creation allowed per project",
"status":"available"
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 18 Nov 2020 15:51:51 GMT
Content-Type: application/json
Content-Length: 24
Connection: keep-alive
X-Compute-Request-Id: req-08c8cdb6-b249-4650-91fb-79a6f7497927
{
"allowed_quotas":[
{
}
]
}{
"allowed_quotas":[
{
"project_id":"<project_id>",
"quota_type_id":"<quota_type_id>",
"allowed_value":"<integer>",
"high_watermark":"<Integer>"
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 18 Nov 2020 16:01:39 GMT
Content-Type: application/json
Content-Length: 766
Connection: keep-alive
X-Compute-Request-Id: req-e570ce15-de0d-48ac-a9e8-60af429aebc0
{
"allowed_quotas":[
{
"id":"262b117d-e406-4209-8964-004b19a8d422",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"quota_type_id":"1c5d4290-2e08-11ea-889c-7440bb00b67d",
"allowed_value":5,
"high_watermark":4,
"version":"4.0.115",
"quota_type_name":"Workloads"
},
{
"id":"68e7203d-8a38-4776-ba58-051e6d289ee0",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"quota_type_id":"f02dd7a6-2e08-11ea-889c-7440bb00b67d",
"allowed_value":-1,
"high_watermark":-1,
"version":"4.0.115",
"quota_type_name":"Storage"
},
{
"id":"ed67765b-aea8-4898-bb1c-7c01ecb897d2",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"quota_type_id":"be323f58-2e08-11ea-889c-7440bb00b67d",
"allowed_value":50,
"high_watermark":25,
"version":"4.0.115",
"quota_type_name":"VMs"
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 18 Nov 2020 16:15:07 GMT
Content-Type: application/json
Content-Length: 268
Connection: keep-alive
X-Compute-Request-Id: req-d87a57cd-c14c-44dd-931e-363158376cb7
{
"allowed_quotas":{
"id":"262b117d-e406-4209-8964-004b19a8d422",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"quota_type_id":"1c5d4290-2e08-11ea-889c-7440bb00b67d",
"allowed_value":5,
"high_watermark":4,
"version":"4.0.115",
"quota_type_name":"Workloads"
}
}HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Wed, 18 Nov 2020 16:24:04 GMT
Content-Type: application/json
Content-Length: 24
Connection: keep-alive
X-Compute-Request-Id: req-a4c02ee5-b86e-4808-92ba-c363b287f1a2
{"allowed_quotas": [{}]}{
"allowed_quotas":{
"project_id":"c76b3355a164498aa95ddbc960adc238",
"allowed_value":"20000",
"high_watermark":"18000"
}
}HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Wed, 18 Nov 2020 16:33:09 GMT
Content-Type: text/html; charset=UTF-8
Content-Length: 0
Connection: keep-alivecd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
chmod +x update_helm_annotations.sh
./update_helm_annotations.shcd ctlplane-scripts
./deploy_tvo_control_plane.shmv triliovault-cfg-scripts triliovault-cfg-scripts-old
git clone -b {{ trilio_branch }} https://github.com/trilioData/triliovault-cfg-scripts.gitcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
vi tvo-operator-inputs.yaml database:
common:
use_trilio_galera: false
root_user_name: "root"
host: "openstack.openstack.svc"
port: 3306cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
./deploy_tvo_control_plane.shchmod +x install_operator.sh
./install_operator.sh <TVO_OPERATOR_CONTAINER_IMAGE_URL>[openstackdev@localhost]$ oc get pods
NAME READY STATUS RESTARTS AGE
job-triliovault-datamover-api-db-init-kwt7q 0/1 Completed 0 8m43s
job-triliovault-datamover-api-keystone-init-gggcm 0/1 Completed 0 9m30s
job-triliovault-datamover-api-rabbitmq-init-mtck7 0/1 Completed 0 9m16s
job-triliovault-wlm-cloud-trust-f6xt2 0/1 Completed 0 8m27s
job-triliovault-wlm-db-init-c2tpm 0/1 Completed 0 8m38s
job-triliovault-wlm-keystone-init-mw8q8 0/1 Completed 0 9m12s
job-triliovault-wlm-rabbitmq-init-qwvgr 0/1 Completed 0 8m47s
trilio-rabbitmq-cluster-server-0 1/1 Running 0 10m
trilio-rabbitmq-cluster-server-1 1/1 Running 0 10m
trilio-rabbitmq-cluster-server-2 1/1 Running 0 10m
triliovault-datamover-api-7754567c94-57cqk 1/1 Running 0 8m27s
triliovault-datamover-api-7754567c94-9q47t 1/1 Running 0 8m27s
triliovault-datamover-api-7754567c94-rk6qc 1/1 Running 0 8m27s
triliovault-object-store-bt1-s3-fk7qs 1/1 Running 0 8m28s
triliovault-object-store-bt1-s3-lhvv7 1/1 Running 0 8m28s
triliovault-object-store-bt1-s3-p85p7 1/1 Running 0 8m28s
triliovault-wlm-api-85469585c7-8bbm2 1/1 Running 0 8m27s
triliovault-wlm-api-85469585c7-bf8nx 1/1 Running 0 8m27s
triliovault-wlm-api-85469585c7-z8q64 1/1 Running 0 8m27s
triliovault-wlm-cron-796c4d4554-7tgfh 1/1 Running 0 8m27s
triliovault-wlm-scheduler-65c6d68c94-8599n 1/1 Running 0 8m27s
triliovault-wlm-scheduler-65c6d68c94-pqqqp 1/1 Running 0 8m27s
triliovault-wlm-scheduler-65c6d68c94-rj6lb 1/1 Running 0 8m27s
triliovault-wlm-workloads-6b7f65fc4f-97gvj 1/1 Running 0 8m27s
triliovault-wlm-workloads-6b7f65fc4f-9mt7n 1/1 Running 0 8m27s
triliovault-wlm-workloads-6b7f65fc4f-wrs8f 1/1 Running 0 8m27smkdir ~/trilio/oc -n openstack get secret osp-secret -o jsonpath='{.data.DbRootPassword}' | base64 -doc -n openstack rsh openstack-galera-0mysqldump -u root -p\
--single-transaction \
--no-create-info \
--no-create-db \
--skip-triggers \
--skip-routines \
--skip-events \
--replace \
workloadmgr > /tmp/t40-workloadmgr.sql
### Exit from podcd ~/trilio/
oc -n openstack cp openstack-galera-0:/tmp/t40-workloadmgr.sql ./t40-workloadmgr.sqlcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
vi tvo-operator-inputs.yaml database:
common:
use_trilio_galera: true
root_user_name: "root"
host: "trilio-galera-cluster.trilio-openstack.svc"
port: 3306cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
./deploy_tvo_control_plane.sh[openstackdev@localhost ctlplane-scripts]$ oc get pods
NAME READY STATUS RESTARTS AGE
job-triliovault-datamover-api-db-init-flcvl 0/1 Completed 0 7m58s
job-triliovault-datamover-api-keystone-init-rfzxb 0/1 Completed 0 8m34s
job-triliovault-datamover-api-rabbitmq-init-zff58 0/1 Completed 0 8m20s
job-triliovault-wlm-cloud-trust-r99fr 0/1 Completed 0 7m42s
job-triliovault-wlm-db-init-l84hl 0/1 Completed 0 7m53s
job-triliovault-wlm-keystone-init-6tp6r 0/1 Completed 0 8m16s
job-triliovault-wlm-rabbitmq-init-9vwd8 0/1 Completed 0 8m2s
trilio-galera-cluster-galera-0 1/1 Running 0 9m17s
trilio-galera-cluster-galera-1 1/1 Running 0 9m17s
trilio-galera-cluster-galera-2 1/1 Running 0 9m17s
trilio-galera-wait-job-l4klg 0/1 Completed 0 9m17s
trilio-rabbitmq-cluster-server-0 1/1 Running 0 10m
trilio-rabbitmq-cluster-server-1 1/1 Running 0 10m
trilio-rabbitmq-cluster-server-2 1/1 Running 0 10m
triliovault-datamover-api-7d6977869-7gtfr 1/1 Running 0 7m43s
triliovault-datamover-api-7d6977869-lnvjb 1/1 Running 0 7m43s
triliovault-datamover-api-7d6977869-zqhqx 1/1 Running 0 7m43s
triliovault-object-store-bt1-s3-27bxz 1/1 Running 0 7m43s
triliovault-object-store-bt1-s3-hfrhw 1/1 Running 0 7m43s
triliovault-object-store-bt1-s3-phr6n 1/1 Running 0 7m43s
triliovault-wlm-api-566d9778d-287xk 1/1 Running 0 7m42s
triliovault-wlm-api-566d9778d-gzqq6 1/1 Running 0 7m42s
triliovault-wlm-api-566d9778d-hsksj 1/1 Running 0 7m42s
triliovault-wlm-cron-85b45d4447-97r84 1/1 Running 0 7m42s
triliovault-wlm-scheduler-7558f58dc9-2lfnt 1/1 Running 0 7m42s
triliovault-wlm-scheduler-7558f58dc9-8v6ts 1/1 Running 0 7m42s
triliovault-wlm-scheduler-7558f58dc9-pqwqk 1/1 Running 0 7m42s
triliovault-wlm-workloads-8468959778-6r2tp 1/1 Running 0 7m42s
triliovault-wlm-workloads-8468959778-c5pzz 1/1 Running 0 7m42s
triliovault-wlm-workloads-8468959778-dffcw 1/1 Running 0 7m42scd ~/trilio/
oc -n trilio-openstack cp ./t40-workloadmgr.sql trilio-galera-cluster-galera-0:/tmp/t40-workloadmgr.sqloc -n trilio-openstack get secret trilio-openstack-secret -o jsonpath='{.data.DbRootPassword}' | base64 -doc -n trilio-openstack rsh trilio-galera-cluster-galera-0du -sh /tmp/t40-workloadmgr.sqlmysql -u root -p workloadmgr < /tmp/t40-workloadmgr.sqloc -n trilio-openstack get pods
oc -n trilio-openstack delete pod <wlm-cron-pod>cp triliovault-cfg-scripts-old/redhat-director-scripts/rhosp18/ctlplane-scripts/tvo-backup-target-cr-amazon-s3.yaml triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
cp triliovault-cfg-scripts-old/redhat-director-scripts/rhosp18/ctlplane-scripts/tvo-backup-target-cr-other-s3.yaml triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
cp triliovault-cfg-scripts-old/redhat-director-scripts/rhosp18/dataplane-scripts/<BACKUP-TARGET-NAME> triliovault-cfg-scripts/redhat-director-scripts/rhosp18/dataplane-scripts/cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts
vi tvo-backup-target-cr-amazon-s3.yaml
vi tvo-backup-target-cr-other-s3.yaml images:
triliovault_object_store: "registry.connect.redhat.com/trilio/trilio-wlm:<NEW-BUILD-TAG>"oc -n trilio-openstack apply -f tvo-backup-target-cr-amazon-s3.yaml
oc -n trilio-openstack apply -f tvo-backup-target-cr-other-s3.yamloc -n trilio-openstack get pods | grep <backup-target-name>
oc -n trilio-openstack describe pod <pod-name> | grep Image:cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/dataplane-scripts
vi <BACKUP-TARGET-NAME>/cm-trilio-backup-target.yaml
trilio_env.yml: |
triliovault_wlm_image: "registry.connect.redhat.com/trilio/trilio-wlm:<NEW-BUILD-TAG>"oc -n openstack apply -f <BACKUP-TARGET-NAME>/cm-trilio-backup-target.yamlvi <BACKUP-TARGET-NAME>/trilio-add-backup-target-service.yaml
openStackAnsibleEERunnerImage: registry.connect.redhat.com/trilio/trilio-ansible-runner-rhoso:<NEW-BUILD-TAG>
oc -n openstack apply -f <BACKUP-TARGET-NAME>/trilio-add-backup-target-service.yamlvi <BACKUP-TARGET-NAME>/trilio-add-backup-target-deployment.yaml
metadata:
name: edpm-trilio-add-backup-target-s3-bt4
oc -n openstack apply -f <BACKUP-TARGET-NAME>/trilio-add-backup-target-deployment.yamlpodman ps -a | grep <BACKUP-TARGET-NAME>trilio-openstacktvo-operator-inputs.yaml in HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 04 Nov 2020 12:58:38 GMT
Content-Type: application/json
Content-Length: 266
Connection: keep-alive
X-Compute-Request-Id: req-ed391cf9-aa56-4c53-8153-fd7fb238c4b9
{
"snapshots":[
{
"id":"1ff16412-a0cd-4e6a-9b4a-b5d4440fffc4",
"created_at":"2020-11-02T14:03:18.000000",
"status":"available",
"snapshot_type":"full",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"name":"snapshot",
"description":"-",
"host":"TVM1"
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 04 Nov 2020 13:58:38 GMT
Content-Type: application/json
Content-Length: 283
Connection: keep-alive
X-Compute-Request-Id: req-fb8dc382-e5de-4665-8d88-c75b2e473f5c
{
"snapshot":{
"id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"created_at":"2020-11-04T13:58:37.694637",
"status":"creating",
"snapshot_type":"full",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"name":"API taken 2",
"description":"API taken description 2",
"host":""
}
}{
"snapshot":{
"is_scheduled":<true/false>,
"name":"<name>",
"description":"<description>"
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 04 Nov 2020 14:07:18 GMT
Content-Type: application/json
Content-Length: 6609
Connection: keep-alive
X-Compute-Request-Id: req-f88fb28f-f4ce-4585-9c3c-ebe08a3f60cd
{
"snapshot":{
"id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"created_at":"2020-11-04T13:58:37.000000",
"updated_at":"2020-11-04T14:06:03.000000",
"finished_at":"2020-11-04T14:06:03.000000",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"status":"available",
"snapshot_type":"full",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"instances":[
{
"id":"67d6a100-fee6-4aa5-83a1-66b070d2eabe",
"name":"cirros-2",
"status":"available",
"metadata":{
"availability_zone":"nova",
"config_drive":"",
"data_transfer_time":"0",
"object_store_transfer_time":"0",
"root_partition_type":"Linux",
"trilio_ordered_interfaces":"192.168.100.80",
"vm_metadata":"{\"workload_name\": \"Workload_1\", \"workload_id\": \"18b809de-d7c8-41e2-867d-4a306407fb11\", \"trilio_ordered_interfaces\": \"192.168.100.80\", \"config_drive\": \"\"}",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"workload_name":"Workload_1"
},
"flavor":{
"vcpus":"1",
"ram":"512",
"disk":"1",
"ephemeral":"0"
},
"security_group":[
{
"name":"default",
"security_group_type":"neutron"
}
],
"nics":[
{
"mac_address":"fa:16:3e:cf:10:91",
"ip_address":"192.168.100.80",
"network":{
"id":"5fb7027d-a2ac-4a21-9ee1-438c281d2b26",
"name":"robert_internal",
"cidr":null,
"network_type":"neutron",
"subnet":{
"id":"b7b54304-aa82-4d50-91e6-66445ab56db4",
"name":"robert_internal",
"cidr":"192.168.100.0/24",
"ip_version":4,
"gateway_ip":"192.168.100.1"
}
}
}
],
"vdisks":[
{
"label":null,
"resource_id":"fa888089-5715-4228-9e5a-699f8f9d59ba",
"restore_size":1073741824,
"vm_id":"67d6a100-fee6-4aa5-83a1-66b070d2eabe",
"volume_id":"51491d30-9818-4332-b056-1f174e65d3e3",
"volume_name":"51491d30-9818-4332-b056-1f174e65d3e3",
"volume_size":"1",
"volume_type":"iscsi",
"volume_mountpoint":"/dev/vda",
"availability_zone":"nova",
"metadata":{
"readonly":"False",
"attached_mode":"rw"
}
}
]
},
{
"id":"e33c1eea-c533-4945-864d-0da1fc002070",
"name":"cirros-1",
"status":"available",
"metadata":{
"availability_zone":"nova",
"config_drive":"",
"data_transfer_time":"0",
"object_store_transfer_time":"0",
"root_partition_type":"Linux",
"trilio_ordered_interfaces":"192.168.100.176",
"vm_metadata":"{\"workload_name\": \"Workload_1\", \"workload_id\": \"18b809de-d7c8-41e2-867d-4a306407fb11\", \"trilio_ordered_interfaces\": \"192.168.100.176\", \"config_drive\": \"\"}",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"workload_name":"Workload_1"
},
"flavor":{
"vcpus":"1",
"ram":"512",
"disk":"1",
"ephemeral":"0"
},
"security_group":[
{
"name":"default",
"security_group_type":"neutron"
}
],
"nics":[
{
"mac_address":"fa:16:3e:cf:4d:27",
"ip_address":"192.168.100.176",
"network":{
"id":"5fb7027d-a2ac-4a21-9ee1-438c281d2b26",
"name":"robert_internal",
"cidr":null,
"network_type":"neutron",
"subnet":{
"id":"b7b54304-aa82-4d50-91e6-66445ab56db4",
"name":"robert_internal",
"cidr":"192.168.100.0/24",
"ip_version":4,
"gateway_ip":"192.168.100.1"
}
}
}
],
"vdisks":[
{
"label":null,
"resource_id":"c8293bb0-031a-4d33-92ee-188380211483",
"restore_size":1073741824,
"vm_id":"e33c1eea-c533-4945-864d-0da1fc002070",
"volume_id":"365ad75b-ca76-46cb-8eea-435535fd2e22",
"volume_name":"365ad75b-ca76-46cb-8eea-435535fd2e22",
"volume_size":"1",
"volume_type":"iscsi",
"volume_mountpoint":"/dev/vda",
"availability_zone":"nova",
"metadata":{
"readonly":"False",
"attached_mode":"rw"
}
}
]
}
],
"name":"API taken 2",
"description":"API taken description 2",
"host":"TVM1",
"size":44171264,
"restore_size":2147483648,
"uploaded_size":44171264,
"progress_percent":100,
"progress_msg":"Snapshot of workload is complete",
"warning_msg":null,
"error_msg":null,
"time_taken":428,
"pinned":false,
"metadata":[
{
"created_at":"2020-11-04T14:05:57.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"16fc1ce5-81b2-4c07-ac63-6c9232e0418f",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"backup_media_target",
"value":"10.10.2.20:/upstream"
},
{
"created_at":"2020-11-04T13:58:37.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"5a56bbad-9957-4fb3-9bbc-469ec571b549",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"cancel_requested",
"value":"0"
},
{
"created_at":"2020-11-04T14:05:29.000000",
"updated_at":"2020-11-04T14:05:45.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"d36abef7-9663-4d88-8f2e-ef914f068fb4",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"data_transfer_time",
"value":"0"
},
{
"created_at":"2020-11-04T14:05:57.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"c75f9151-ef87-4a74-acf1-42bd2588ee64",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"hostnames",
"value":"[\"cirros-1\", \"cirros-2\"]"
},
{
"created_at":"2020-11-04T14:05:29.000000",
"updated_at":"2020-11-04T14:05:45.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"02916cce-79a2-4ad9-a7f6-9d9f59aa8424",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"object_store_transfer_time",
"value":"0"
},
{
"created_at":"2020-11-04T14:05:57.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"96efad2f-a24f-4cde-8e21-9cd78f78381b",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"pause_at_snapshot",
"value":"0"
},
{
"created_at":"2020-11-04T14:05:57.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"572a0b21-a415-498f-b7fa-6144d850ef56",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"policy_id",
"value":"b79aa5f3-405b-4da4-96e2-893abf7cb5fd"
},
{
"created_at":"2020-11-04T14:05:57.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"dfd7314d-8443-4a95-8e2a-7aad35ef97ea",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"preferredgroup",
"value":"[]"
},
{
"created_at":"2020-11-04T14:05:57.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"2e17e1e4-4bb1-48a9-8f11-c4cd2cfca2a9",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"topology",
"value":"\"\\\"\\\"\""
},
{
"created_at":"2020-11-04T14:05:57.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"33762790-8743-4e20-9f50-3505a00dbe76",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"key":"workload_approx_backup_size",
"value":"6"
}
],
"restores_info":""
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 04 Nov 2020 14:18:36 GMT
Content-Type: application/json
Content-Length: 56
Connection: keep-alive
X-Compute-Request-Id: req-82ffb2b6-b28e-4c73-89a4-310890960dbc
{"task": {"id": "a73de236-6379-424a-abc7-33d553e050b7"}}
HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Wed, 04 Nov 2020 14:26:44 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-47a5a426-c241-429e-9d69-d40aed0dd68dtrilio_defaults.yaml in overcloud deploy command with `-e` option as shown below.triliovault_nfs_map_input.yml in the current directory and provide compute host and NFS share/IP map.triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_nfs_map.yamltrilio_nfs_map.yaml) in overcloud deploy command with '-e' option as shown below.MultiIPNfsEnabled is set to true in the trilio_env.yaml file and that NFS is used as a backup target. oc get network-attachment-definition -n trilio-openstack NAME AGE
storage 2m5sgit clone -b {{ trilio_branch }} https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/oc create namespace trilio-openstackecho -n "<password_string>" | base64cd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
vi trilio-openstack-secret.yaml
oc apply -f trilio-openstack-secret.yaml -n trilio-openstackcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
chmod +x create-image-pull-secret.sh
./create-image-pull-secret.sh <IMAGE_REGISTRY_URL> <IMAGE_REGISTRY_USER>
Note: Use the following URLs for Trilio image registries, as needed.
Default RedHat registry url for Trilio images: registry.connect.redhat.comcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts
chmod +x install_operator.sh
./install_operator.sh <TVO_OPERATOR_CONTAINER_IMAGE_URL>
Example:-
Redhat_registry_tvo_operator_conatiner_image_Url:- ./install_operator.sh registry.connect.redhat.com/trilio/trilio-openstack-operator-rhoso:6.1.0-rhoso18.0oc get pods -A | grep tvo-operatoroc get crds | grep tvocd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts
chmod +x set_operator_inputs.py
./set_operator_inputs.py <TRILIO-CONTAINER-IMAGE-TAG>vi tvo-operator-inputs.yaml database:
common:
use_trilio_galera: false
root_user_name: "root"
host: "openstack.openstack.svc"
port: 3306 database:
common:
use_trilio_galera: true
root_user_name: "root"
host: "trilio-galera-cluster.trilio-openstack.svc"
port: 3306oc get nodesoc label nodes <Openshift_node_name1> trilio-control-plane=enabled
oc label nodes <Openshift_node_name2> trilio-control-plane=enabled
oc label nodes <Openshift_node_name3> trilio-control-plane=enabledoc get nodes -l trilio-control-plane=enabledcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
vi certificate.yamlcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
./create_cert_secrets.shoc -n trilio-openstack describe secret cert-triliovault-datamover-public-svc
oc -n trilio-openstack describe secret cert-triliovault-datamover-internal-svc
oc -n trilio-openstack describe secret cert-triliovault-wlm-public-svc
oc -n trilio-openstack describe secret cert-triliovault-wlm-internal-svc
oc -n trilio-openstack describe secret cert-trilio-rabbitmq-cluster
oc -n trilio-openstack describe secret cert-trilio-galera-clustercd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/ctlplane-scripts/
./deploy_tvo_control_plane.shoc get pods -A | grep tvo-operator
oc -n <namespace> logs -f <pod-name> oc -n trilio-openstack get tvocontrolplane
oc describe tvocontrolplane <TVO_CONTROL_PLANE_OBEJCT_NAME> -n trilio-openstack[root@localhost ctlplane-scripts]# oc -n trilio-openstack get pods
NAME READY STATUS RESTARTS AGE
job-triliovault-datamover-api-db-init-7jcdd 0/1 Completed 0 2m40s
job-triliovault-datamover-api-keystone-init-kzp2b 0/1 Completed 0 3m12s
job-triliovault-datamover-api-rabbitmq-init-96967 0/1 Completed 0 3m
job-triliovault-wlm-cloud-trust-v5f6z 0/1 Completed 0 2m22s
job-triliovault-wlm-db-init-hgsmq 0/1 Completed 0 2m35s
job-triliovault-wlm-keystone-init-98jmw 0/1 Completed 0 2m56s
job-triliovault-wlm-rabbitmq-init-696s9 0/1 Completed 0 2m44s
trilio-galera-cluster-galera-0 1/1 Running 0 3m56s
trilio-galera-cluster-galera-1 1/1 Running 0 3m56s
trilio-galera-cluster-galera-2 1/1 Running 0 3m56s
trilio-galera-wait-job-zjlcb 0/1 Completed 0 3m56s
trilio-rabbitmq-cluster-server-0 1/1 Running 0 5m
trilio-rabbitmq-cluster-server-1 1/1 Running 0 5m
trilio-rabbitmq-cluster-server-2 1/1 Running 0 5m
triliovault-datamover-api-6df7bc9d9d-7rd44 1/1 Running 0 2m22s
triliovault-datamover-api-6df7bc9d9d-ftd98 1/1 Running 0 2m22s
triliovault-datamover-api-6df7bc9d9d-nrn4j 1/1 Running 0 2m22s
triliovault-object-store-bt1-s3-cvm54 1/1 Running 0 2m23s
triliovault-object-store-bt1-s3-j5lb6 1/1 Running 0 2m23s
triliovault-object-store-bt1-s3-r6rkr 1/1 Running 0 2m23s
triliovault-object-store-bt2-s3-25ltf 1/1 Running 0 2m22s
triliovault-object-store-bt2-s3-52hmb 1/1 Running 0 2m22s
triliovault-object-store-bt2-s3-krnlx 1/1 Running 0 2m22s
triliovault-wlm-api-54fbf49c9d-fw7ww 1/1 Running 0 2m22s
triliovault-wlm-api-54fbf49c9d-m97sw 1/1 Running 0 2m22s
triliovault-wlm-api-54fbf49c9d-xssws 1/1 Running 0 2m22s
triliovault-wlm-cron-5fd56b8685-zq65m 1/1 Running 0 2m22s
triliovault-wlm-scheduler-78d9f77499-gbfxn 1/1 Running 0 2m22s
triliovault-wlm-scheduler-78d9f77499-k77ct 1/1 Running 0 2m22s
triliovault-wlm-scheduler-78d9f77499-lg224 1/1 Running 0 2m22s
triliovault-wlm-workloads-579f587f64-g7jtb 1/1 Running 0 2m22s
triliovault-wlm-workloads-579f587f64-ktf9n 1/1 Running 0 2m22s
triliovault-wlm-workloads-579f587f64-tmv52 1/1 Running 0 2m22s
[root@localhost ctlplane-scripts]#oc -n trilio-openstack logs <job-triliovault-wlm-cloud-trust> oc config set-context --current --namespace=openstackcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/dataplane-scripts/
oc -n openstack apply -f ../ctlplane-scripts/trilio-openstack-secret.yamlcd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/dataplane-scripts/
chmod +x set_data_plane_inputs.py
./set_data_plane_inputs.pycd triliovault-cfg-scripts/redhat-director-scripts/rhosp18/dataplane-scripts/
vi cm-trilio-datamover.yaml## Create config map
oc -n openstack apply -f cm-trilio-datamover.yamlvi trilio-datamover-service.yamloc -n openstack apply -f trilio-datamover-service.yamloc -n openstack get OpenStackDataPlaneNodeSetvi trilio-data-plane-deployment.yaml## To check list of deployment names alreday used, please use following command
oc -n openstack get OpenStackDataPlaneDeployment## Trigger deployment
oc -n openstack apply -f trilio-data-plane-deployment.yamloc -n openstack get pod -l openstackdataplanedeployment=<OpenStackDataPlaneDeployment_NAME>
oc -n openstack logs -f <trilio-datamover-pod-name>podman ps | grep trilio[kni@localhost ~]$ oc get openstackversion -n openstack
NAME TARGET VERSION AVAILABLE VERSION DEPLOYED VERSION
openstack-controlplane 18.0.2-20240923.2 18.0.2-20240923.2 18.0.2-20240923.2oc edit openstackversion <OPENSTACKVERSION_RESOURCE_NAME> -n openstackoc edit openstackversion openstack-controlplane -n openstackapiVersion: core.openstack.org/v1beta1
kind: OpenStackVersion
metadata:
name: openstack-controlplane
spec:
customContainerImages:
horizonImage: docker.io/trilio/trilio-horizon-plugin:<IMAGE_TAG>
[...]oc describe openstackversion <OPENSTACKVERSION_RESOURCE_NAME> -n openstackoc annotate route horizon -n openstack haproxy.router.openshift.io/timeout=180s --overwriteoc get route horizon -n openstack -o yaml | grep timeouthaproxy.router.openshift.io/timeout: 180s oc get network-attachment-definition storage -n openstack -o yaml > storage-nad.yaml sed -i '/ namespace:/d' storage-nad.yaml
sed -i '/ resourceVersion:/d' storage-nad.yaml
sed -i '/ uid:/d' storage-nad.yaml
sed -i '/ creationTimestamp:/d' storage-nad.yaml oc apply -f storage-nad.yaml -n trilio-openstack pod:
triliovault_wlm_api:
networks: ""
debug_mode: false
triliovault_wlm_scheduler:
networks: ""
debug_mode: false
triliovault_wlm_cron:
networks: ""
debug_mode: false
triliovault_wlm_workloads:
networks: ""
debug_mode: false
triliovault_object_store:
networks: ""
debug_mode: false
triliovault_datamover_api:
networks: ""
debug_mode: false pod:
triliovault_wlm_api:
networks: "storage,tenant"workloadmgr restore-list [--snapshot_id <snapshot_id>]workloadmgr restore-show [--output <output>] <restore_id>workloadmgr restore-delete <restores_id>workloadmgr restore-cancel <restore_id>workloadmgr snapshot-oneclick-restore [--display-name <display-name>]
[--display-description <display-description>]
<snapshot_id>workloadmgr snapshot-selective-restore [--display-name <display-name>]
[--display-description <display-description>]
[--filename <filename>]
<snapshot_id>workloadmgr snapshot-inplace-restore [--display-name <display-name>]
[--display-description <display-description>]
[--filename <filename>]
<snapshot_id>{
oneclickrestore: False,
restore_type: selective,
type: openstack,
openstack:
{
instances:
[
{
include: True,
id: 890888bc-a001-4b62-a25b-484b34ac6e7e,
name: cdcentOS-1,
availability_zone:,
nics: [],
vdisks:
[
{
id: 4cc2b474-1f1b-4054-a922-497ef5564624,
new_volume_type:,
availability_zone: nova
}
],
flavor:
{
ram: 512,
ephemeral: 0,
vcpus: 1,
swap:,
disk: 1,
id: 1
}
}
],
restore_topology: True,
networks_mapping:
{
networks: []
}
}
}'instances':[
{
'name':'cdcentOS-1-selective',
'availability_zone':'US-East',
'nics':[
{
'mac_address':'fa:16:3e:00:bd:60',
'ip_address':'192.168.0.100',
'id':'8b871820-f92e-41f6-80b4-00555a649b4c',
'network':{
'subnet':{
'id':'2b1506f4-2a7a-4602-a8b9-b7e8a49f95b8'
},
'id':'d5047e84-077e-4b38-bc43-e3360b0ad174'
}
}
],
'vdisks':[
{
'id':'4cc2b474-1f1b-4054-a922-497ef5564624',
'new_volume_type':'ceph',
'availability_zone':'nova'
}
],
'flavor':{
'ram':2048,
'ephemeral':0,
'vcpus':1,
'swap':'',
'disk':20,
'id':'2'
},
'include':True,
'id':'890888bc-a001-4b62-a25b-484b34ac6e7e'
}
]restore_topology:Truerestore_topology:False{
'oneclickrestore':False,
'openstack':{
'instances':[
{
'name':'cdcentOS-1-selective',
'availability_zone':'US-East',
'nics':[
{
'mac_address':'fa:16:3e:00:bd:60',
'ip_address':'192.168.0.100',
'id':'8b871820-f92e-41f6-80b4-00555a649b4c',
'network':{
'subnet':{
'id':'2b1506f4-2a7a-4602-a8b9-b7e8a49f95b8'
},
'id':'d5047e84-077e-4b38-bc43-e3360b0ad174'
}
}
],
'vdisks':[
{
'id':'4cc2b474-1f1b-4054-a922-497ef5564624',
'new_volume_type':'ceph',
'availability_zone':'nova'
}
],
'flavor':{
'ram':2048,
'ephemeral':0,
'vcpus':1,
'swap':'',
'disk':20,
'id':'2'
},
'include':True,
'id':'890888bc-a001-4b62-a25b-484b34ac6e7e'
}
],
'restore_topology':False,
'networks_mapping':{
'networks':[
{
'snapshot_network':{
'subnet':{
'id':'8b609440-4abf-4acf-a36b-9a0fa70c383c'
},
'id':'8b871820-f92e-41f6-80b4-00555a649b4c'
},
'target_network':{
'subnet':{
'id':'2b1506f4-2a7a-4602-a8b9-b7e8a49f95b8'
},
'id':'d5047e84-077e-4b38-bc43-e3360b0ad174',
'name':'internal'
}
}
]
}
},
'restore_type':'selective',
'type':'openstack'
}{
'oneclickrestore':False,
'restore_type':'inplace',
'type':'openstack',
'openstack':{
'instances':[
{
'restore_boot_disk':True,
'include':True,
'id':'ba8c27ab-06ed-4451-9922-d919171078de',
'vdisks':[
{
'restore_cinder_volume':True,
'id':'04d66b70-6d7c-4d1b-98e0-11059b89cba6',
}
]
}
]
}
}cd /home/stack
source stackrc
git clone -b {{ trilio_branch }} https://github.com/trilioData/triliovault-cfg-scripts.git
cd triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>cd triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/scripts/
chmod +x *.sh cp <S3_SELF_SIGNED_CERT_CA_CHAIN_FILE> /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/puppet/trilio/files/s3-cert-<S3_BACKUP_TARGET_NAME>.pem
cp s3-ca.pem /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/puppet/trilio/files/s3-cert-BT2_S3.pem- OS::TripleO::Services::TrilioDatamoverApi
- OS::TripleO::Services::TrilioWlmApi
- OS::TripleO::Services::TrilioWlmWorkloads
- OS::TripleO::Services::TrilioWlmScheduler
- OS::TripleO::Services::TrilioWlmCron
- OS::TripleO::Services::TrilioObjectStore- OS::TripleO::Services::TrilioDatamover
- OS::TripleO::Services::TrilioObjectStoreparameter_defaults:
ContainerImagePrepare:
- push_destination: false
set:
namespace: registry.redhat.io/...
...
...
ContainerImageRegistryCredentials:
registry.redhat.io:
myuser: 'p@55w0rd!'
registry.connect.redhat.com:
myuser: 'p@55w0rd!'
ContainerImageRegistryLogin: true$ grep '<CONTAINER-TAG-VERSION>-rhosp17.1' trilio_env.yaml
ContainerTriliovaultDatamoverImage: undercloudqa162.ctlplane.trilio.local:8787/trilio/trilio-datamover:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerTriliovaultDatamoverApiImage: undercloudqa162.ctlplane.trilio.local:8787/trilio/trilio-datamover-api:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerTriliovaultWlmImage: undercloudqa162.ctlplane.trilio.local:8787/trilio/trilio-wlm:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerHorizonImage: undercloudqa162.ctlplane.trilio.local:8787/trilio/trilio-horizon-plugin:<CONTAINER-TAG-VERSION>-rhosp17.1cd /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/scripts/
sudo ./prepare_trilio_images.sh <UNDERCLOUD_REGISTRY_HOSTNAME> <CONTAINER-TAG-VERSION>-rhosp17.1
## Example of running the script with parameters
sudo ./prepare_trilio_images.sh undercloudqa17.ctlplane.trilio.local 6.0.0-rhosp17.1
## Verify changes
grep '<CONTAINER-TAG-VERSION>-rhosp17.1' ../environments/trilio_env.yaml
ContainerTriliovaultDatamoverImage: undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-datamover:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerTriliovaultDatamoverApiImage: undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-datamover-api:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerTriliovaultWlmImage: undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-wlm:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerHorizonImage: undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-horizon-plugin:<CONTAINER-TAG-VERSION>-rhosp17.1
$ openstack tripleo container image list | grep <CONTAINER-TAG-VERSION>-rhosp17.1
| docker://undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-datamover-api:<CONTAINER-TAG-VERSION>-rhosp17.1 |
| docker://undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-horizon-plugin:<CONTAINER-TAG-VERSION>-rhosp17.1 |
| docker://undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-datamover:<CONTAINER-TAG-VERSION>-rhosp17.1 |
| docker://undercloudqa17.ctlplane.trilio.local:8787/trilio/trilio-wlm:<CONTAINER-TAG-VERSION>-rhosp17.1 |cd /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/environments
$ grep '<CONTAINER-TAG-VERSION>-rhosp17.1' trilio_env.yaml
ContainerTriliovaultDatamoverImage: <SATELLITE_REGISTRY_URL>/trilio/trilio-datamover:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerTriliovaultDatamoverApiImage: <SATELLITE_REGISTRY_URL>/trilio/trilio-datamover-api:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerTriliovaultWlmImage: <SATELLITE_REGISTRY_URL>/trilio/trilio-wlm:<CONTAINER-TAG-VERSION>-rhosp17.1
ContainerHorizonImage: <SATELLITE_REGISTRY_URL>/trilio/trilio-horizon-plugin:<CONTAINER-TAG-VERSION>-rhosp17.1cd redhat-director-scripts/rhosp17/scripts/
dnf install python3-ruamel-yaml
python3 update_object_store_yaml.pycd /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/scripts/
./generate_passwords.sh-e /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/passwords.yamlsource <OVERCLOUD_RC_FILE>vi /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_env.yamlopenstack role add --user <cloud-Admin-UserName> --domain <Cloud-Admin-DomainName> admin
# Example
openstack role add --user admin --domain default admincd /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/scripts
./create_wlm_ids_conf.shcat /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/puppet/trilio/files/triliovault_wlm_ids.confmodprobe nbd nbds_max=128
lsmod | grep nbdmodprobe fuse
lsmod | grep fusesource stackrccd /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/scripts/
./upload_puppet_module.sh
## Output of above command looks like following
Creating tarball...
Tarball created.
renamed '/tmp/puppet-modules-MUIyvXI/puppet-modules.tar.gz' -> '/var/lib/tripleo/artifacts/overcloud-artifacts/puppet-modules.tar.gz'
Creating heat environment file: /home/stack/.tripleo/environments/puppet-modules-url.yaml
[stack@uc17-1 scripts]$ cat /home/stack/.tripleo/environments/puppet-modules-url.yaml
parameter_defaults:
DeployArtifactFILEs:
- /var/lib/tripleo/artifacts/overcloud-artifacts/puppet-modules.tar.gz
## Above command creates following file.
ls -ll /home/stack/.tripleo/environments/puppet-modules-url.yaml-e /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_defaults.yaml openstack overcloud deploy --stack overcloudtrain5 --templates \
--libvirt-type qemu \
--ntp-server 192.168.1.34 \
-e /home/stack/templates/node-info.yaml \
-e /home/stack/containers-prepare-parameter.yaml \
-e /home/stack/templates/ceph-config.yaml \
-e /home/stack/templates/cinder_size.yaml \
-e /usr/share/openstack-tripleo-heat-templates/environments/services/barbican.yaml \
-e /usr/share/openstack-tripleo-heat-templates/environments/barbican-backend-simple-crypto.yaml \
-e /home/stack/templates/configure-barbican.yaml \
-e /home/stack/templates/multidomain_horizon.yaml \
-e /usr/share/openstack-tripleo-heat-templates/environments/services/neutron-ovs.yaml \
-e /usr/share/openstack-tripleo-heat-templates/environments/ssl/enable-internal-tls.yaml \
-e /home/stack/templates/tls-parameters.yaml \
-e /usr/share/openstack-tripleo-heat-templates/environments/services/haproxy-public-tls-certmonger.yaml \
-e /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/environments/trilio_env.yaml \
-e /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/environments/trilio_env_tls_everywhere_dns.yaml \
-e /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/environments/trilio_defaults.yaml \
-e /home/stack/triliovault-cfg-scripts/redhat-director-scripts/rhosp17/environments/trilio_passwords.yaml \
-r /usr/share/openstack-tripleo-heat-templates/roles_data.yamlopenstack stack failures list overcloud
heat stack-list --show-nested -f "status=FAILED"
heat resource-list --nested-depth 5 overcloud | grep FAILEDpodman logs <trilio-container-name>
tailf /var/log/containers/<trilio-container-name>/<trilio-container-name>.logcd triliovault-cfg-scripts/common/(undercloud) [stack@ucqa161 ~]$ openstack server list
+--------------------------------------+-------------------------------+--------+----------------------+----------------+---------+
| ID | Name | Status | Networks | Image | Flavor |
+--------------------------------------+-------------------------------+--------+----------------------+----------------+---------+
| 8c3d04ae-fcdd-431c-afa6-9a50f3cb2c0d | overcloudtrain1-controller-2 | ACTIVE | ctlplane=172.30.5.18 | overcloud-full | control |
| 103dfd3e-d073-4123-9223-b8cf8c7398fe | overcloudtrain1-controller-0 | ACTIVE | ctlplane=172.30.5.11 | overcloud-full | control |
| a3541849-2e9b-4aa0-9fa9-91e7d24f0149 | overcloudtrain1-controller-1 | ACTIVE | ctlplane=172.30.5.25 | overcloud-full | control |
| 74a9f530-0c7b-49c4-9a1f-87e7eeda91c0 | overcloudtrain1-novacompute-0 | ACTIVE | ctlplane=172.30.5.30 | overcloud-full | compute |
| c1664ac3-7d9c-4a36-b375-0e4ee19e93e4 | overcloudtrain1-novacompute-1 | ACTIVE | ctlplane=172.30.5.15 | overcloud-full | compute |
+--------------------------------------+-------------------------------+--------+----------------------+----------------+---------+$ cat /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_nfs_map.yaml
# TriliovaultMultiIPNfsMap represents datamover, WLM nodes (compute and controller nodes) and it's NFS share mapping.
parameter_defaults:
TriliovaultMultiIPNfsMap:
overcloudtrain4-controller-0: 172.30.1.11:/rhospnfs
overcloudtrain4-controller-1: 172.30.1.11:/rhospnfs
overcloudtrain4-controller-2: 172.30.1.11:/rhospnfs
overcloudtrain4-novacompute-0: 172.30.1.12:/rhospnfs
overcloudtrain4-novacompute-1: 172.30.1.13:/rhospnfssudo pip3 install PyYAML==5.1 3python3 ./generate_nfs_map.pygrep ':.*:' triliovault_nfs_map_output.yml >> ../redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_nfs_map.yaml
-e /home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_nfs_map.yaml/home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_env.yaml/var/lib/config-data/puppet-generated/haproxy/etc/haproxy/haproxy.cfglisten triliovault_datamover_api
bind 172.30.4.53:13784 transparent ssl crt /etc/pki/tls/private/overcloud_endpoint.pem
bind 172.30.4.53:8784 transparent ssl crt /etc/pki/tls/certs/haproxy/overcloud-haproxy-internal_api.pem
balance roundrobin
http-request set-header X-Forwarded-Proto https if { ssl_fc }
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
http-request set-header X-Forwarded-Port %[dst_port]
maxconn 50000
option httpchk
option httplog
option forwardfor
retries 5
timeout check 10m
timeout client 10m
timeout connect 10m
timeout http-request 10m
timeout queue 10m
timeout server 10m
server overcloudtraindev2-controller-0.internalapi.trilio.local 172.30.4.57:8784 check fall 5 inter 2000 rise 2 verifyhost overcloudtraindev2-controller-0.internalapi.trilio.localretries 5
timeout http-request 10m
timeout queue 10m
timeout connect 10m
timeout client 10m
timeout server 10m
timeout check 10m
balance roundrobin
maxconn 50000/home/stack/triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/services/triliovault-datamover-api.yaml tripleo::haproxy::trilio_datamover_api::options:
'retries': '5'
'maxconn': '50000'
'balance': 'roundrobin'
'timeout http-request': '10m'
'timeout queue': '10m'
'timeout connect': '10m'
'timeout client': '10m'
'timeout server': '10m'
'timeout check': '10m'triliovault-cfg-scripts/redhat-director-scripts/<RHOSP_RELEASE_DIRECTORY>/environments/trilio_env.yamlTrilioDatamoverOptVolumes:
- <mount-dir-on-compute-host>:<mount-dir-inside-the-datamover-container>
## For example, below is the `/mnt/mount-on-host` mount directory mounted on Compute host that directory you want to mount on the `/mnt/mount-inside-container` directory inside the Datamover container
[root@overcloudtrain5-novacompute-0 heat-admin]# df -h | grep 172.
172.25.0.10:/mnt/tvault/42436 2.5T 2.3T 234G 91% /mnt/mount-on-host
## Then provide that mount in the below format
TrilioDatamoverOptVolumes:
- /mnt/mount-on-host:/mnt/mount-inside-container[root@overcloudtrain5-novacompute-0 heat-admin]# podman exec -itu root triliovault_datamover bash
[root@overcloudtrain5-novacompute-0 heat-admin]# df -h | grep 172.
172.25.0.10:/mnt/tvault/42436 2.5T 2.3T 234G 91% /mnt/mount-inside-containerThis document explains the concepts of Backup Targets and Backup Target Types in Trilio, their purpose, and how they provide additional flexibility and control for backup storage management.
















[DEFAULT]
.
.
enabled_backends = NFS_BT1, S3_BT1, S3_BT2, S3_BT3
.
.# NFS Backup Target-1 as a default backup target
[NFS_BT1]
vault_storage_type = nfs
vault_storage_filesystem_export = 192.168.1.35:/mnt/trilio/share1
vault_storage_nfs_options = nolock,soft,timeo=600,intr,lookupcache=none,retrans=10
is_default = 1
# Ceph S3 Backup Target-2
[S3_BT2]
vault_storage_type = s3
vault_s3_endpoint_url = https://cephs3.triliodata.demo
vault_s3_bucket = trilio-test-bucket
vault_storage_filesystem_export = cephs3.triliodata.demo/trilio-test-bucket
immutable = 0
is_default = 0
# Ceph S3 Backup Target-3 with Object-lock enabled bucket
[S3_BT3]
vault_storage_type = s3
vault_s3_endpoint_url = https://cephs3.triliodata.demo
vault_s3_bucket = object-locked-cephs3-bucket
immutable = 1
vault_storage_filesystem_export = cephs3.triliodata.demo/object-locked-cephs3-bucket
# AWS S3 Backup Target-4 with Object-lock enabled bucket
[S3_BT4]
vault_storage_type = s3
vault_s3_endpoint_url =
vault_s3_bucket = object-locked-aws-s3-01
immutable = 1
vault_storage_filesystem_export = object-locked-aws-s3-01
workloadmgr backup-target-createopenstack workloadmgr backup target create# workloadmgr help backup-target-create
usage: workloadmgr backup-target-create [-h] [-f {json,shell,table,value,yaml}] [-c COLUMN] [--noindent]
[--prefix PREFIX] [--max-width <integer>] [--fit-width] [--print-empty]
[--s3-endpoint-url <s3_endpoint_url>] [--s3-bucket <s3_bucket>]
[--filesystem-export <filesystem_export>] [--type <type>]
[--btt-name <btt_name>] [--default] [--immutable]
[--metadata metadata [metadata ...]]
Create backup target
optional arguments:
-h, --help show this help message and exit
--s3-endpoint-url <s3_endpoint_url>
S3 endpoint URL.
--s3-bucket <s3_bucket>
S3 bucket. Required for s3 backup target type
--filesystem-export <filesystem_export>
BT filesystem export path. Required for nfs backup target only. For s3 it's handled internally
--type <type>
Required BT type. Eg. nfs, s3
--btt-name <btt_name>
optional Backup Target Type name. If not provided, then we use filesystem export value to generate BTT name
--default denotes whether Backup Target is default
--immutable denotes whether Backup Target is immutable
--metadata metadata [metadata ...]
Specify a key value pairs to include in the BT metadata Eg. --metadata key1=value1 key2=value2 keyN=valueN
workloadmgr backup-target-deleteopenstack workloadmgr backup target delete# workloadmgr help backup-target-delete
usage: workloadmgr backup-target-delete [-h] <backup_target_id>
Delete existing backup target
positional arguments:
<backup_target_id>
ID of the backup target to delete.
workloadmgr backup-target-listopenstack workloadmgr backup target list# workloadmgr help backup-target-list
usage: workloadmgr backup-target-list [-h]
[-f {csv,json,table,value,yaml}]
[-c COLUMN]
[--quote {all,minimal,none,nonnumeric}]
[--noindent]
[--max-width <integer>]
[--fit-width] [--print-empty]
[--sort-column SORT_COLUMN]
[--sort-ascending | --sort-descending]
List all the backup targets.
options:
-h, --help show this help message and exit
output formatters:
output formatter options
-f {csv,json,table,value,yaml}, --format {csv,json,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
--sort-column SORT_COLUMN
specify the column(s) to sort the data (columns
specified first have a priority, non-existing columns
are ignored), can be repeated
--sort-ascending sort the column(s) in ascending order
--sort-descending sort the column(s) in descending order
CSV Formatter:
--quote {all,minimal,none,nonnumeric}
when to include quotes, defaults to nonnumeric
json formatter:
--noindent whether to disable indenting the JSON
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-showopenstack workloadmgr backup target show# workloadmgr help backup-target-show
usage: workloadmgr backup-target-show [-h]
[-f {json,shell,table,value,yaml}]
[-c COLUMN] [--noindent]
[--prefix PREFIX]
[--max-width <integer>]
[--fit-width] [--print-empty]
<backup_target_id>
Show details about backup targets
positional arguments:
<backup_target_id>
ID of the backup target.
options:
-h, --help show this help message and exit
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX
add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable
--print-empty Print empty table if there is no data to show.workloadmgr backup-target-set-defaultopenstack workloadmgr backup target set default# workloadmgr help backup-target-set-default
usage: workloadmgr backup-target-set-default [-h] [-f {json,shell,table,value,yaml}] [-c COLUMN] [--noindent]
[--prefix PREFIX] [--max-width <integer>] [--fit-width] [--print-empty]
<backup_target_id>
Set existing backup target as default, it's respective one of BTT will be set as default, If BTT doesn't exists then it will be created and
set as default BTT
positional arguments:
<backup_target_id>
ID of the backup target which needs to be set as default
workloadmgr backup-target-type-listopenstack workloadmgr backup target type list# workloadmgr help backup-target-type-list
usage: workloadmgr backup-target-type-list [-h]
[-f {csv,json,table,value,yaml}]
[-c COLUMN]
[--quote {all,minimal,none,nonnumeric}]
[--noindent]
[--max-width <integer>]
[--fit-width] [--print-empty]
[--sort-column SORT_COLUMN]
[--sort-ascending | --sort-descending]
[--detail {True,False}]
[--project-id <project_id>]
List all the backup target types.
options:
-h, --help show this help message and exit
--detail {True,False}
List detail backup target types
--project-id <project_id>
ID of the project.
output formatters:
output formatter options
-f {csv,json,table,value,yaml}, --format {csv,json,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
--sort-column SORT_COLUMN
specify the column(s) to sort the data (columns
specified first have a priority, non-existing columns
are ignored), can be repeated
--sort-ascending sort the column(s) in ascending order
--sort-descending sort the column(s) in descending order
CSV Formatter:
--quote {all,minimal,none,nonnumeric}
when to include quotes, defaults to nonnumeric
json formatter:
--noindent whether to disable indenting the JSON
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-showopenstack workloadmgr backup target type show# workloadmgr help backup-target-type-show
usage: workloadmgr backup-target-type-show [-h] [-f {json,shell,table,value,yaml}]
[--print-empty]
<backup_target_id>
Show details about backup target types
positional arguments:
<backup_target_id>
ID of the backup target.
options:
-h, --help show this help message and exit
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to show
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX
add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use the
--fit-width Fit the table to the display width. Implied if --max-width
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-createopenstack workloadmgr backup target type create# workloadmgr help backup-target-type-create
usage: workloadmgr backup-target-type-create [-h]
[-f {json,shell,table,value,yaml}]
[-c COLUMN] [--noindent]
[--prefix PREFIX]
[--max-width <integer>]
[--fit-width] [--print-empty]
[--default]
[--description <description>]
(--public | --project-ids <project-ids> [<project-ids> ...])
[--metadata <key=key-name>]
[--backup-targets-id <backup_targets_id>]
<name>
Create backup target type
positional arguments:
<name> required BTT name.
options:
-h, --help show this help message and exit
--default denotes whether BTT is default
--description <description>
Optional BTT description. (Default=None)
--public denotes whether BTT is of public type
--project-ids <project-ids> [<project-ids> ...]
Required to assign BTT to projects
--metadata <key=key-name>
Specify a key value pairs to include in the BTT metadata
Specify option multiple times to include multiple keys.
key=value
--backup-targets-id <backup_targets_id>
ID of the backup target for which BTT would be created
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX
add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-modifyopenstack workloadmgr backup target type modify# workloadmgr help backup-target-type-modif
usage: workloadmgr backup-target-type-modify [-h] [-f {json,shell,table,value,yaml
[--print-empty] [--name <name>] [--de
(--public | --project-ids <project-id
[--backup-target-type-id <backup_targ
Modify existing backup target type
options:
-h, --help show this help message and exit
--name <name>
Optional BTT name. (Default=None)
--default denotes whether BTT is default
--description <description>
Optional BTT description. (Default=None)
--public denotes whether BTT is of public type
--project-ids <project-ids> [<project-ids> ...]
Required to assign BTT to projects
--metadata <key=key-name>
Specify a key value pairs to include in the BTT metadata S
--backup-target-type-id <backup_target_type_id>
ID of the backup target type for which given projects will
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to show
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX
add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use the
--fit-width Fit the table to the display width. Implied if --max-width
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-add-projectsopenstack workloadmgr backup target type add projects# workloadmgr help backup-target-type-add-projects
usage: workloadmgr backup-target-type-add-projects [-h]
[-f {json,shell,table,value,yaml}]
[-c COLUMN]
[--noindent]
[--prefix PREFIX]
[--max-width <integer>]
[--fit-width] [--print-empty]
[--backup-target-type-id <backup_target_type_id>]
[--project-ids <project-ids> [<project-ids> ...]]
Assign projects to existing backup target type
options:
-h, --help show this help message and exit
--backup-target-type-id <backup_target_type_id>
ID of the backup target type for which given projects
will be assigned
--project-ids <project-ids> [<project-ids> ...]
Required to assign BTT to projects
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX
add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-remove-projectsopenstack workloadmgr backup target type remove projects# workloadmgr help backup-target-type-remove-projects
usage: workloadmgr backup-target-type-remove-projects [-h]
[-f {json,shell,table,value,yaml}]
[-c COLUMN]
[--noindent]
[--prefix PREFIX]
[--max-width <integer>]
[--fit-width]
[--print-empty]
[--backup-target-type-id <backup_target_type_id>]
[--project-ids <project-ids> [<project-ids> ...]]
Remove already assigned projects from backup target types
options:
-h, --help show this help message and exit
--backup-target-type-id <backup_target_type_id>
ID of the backup target type for which given projects
will be assigned
--project-ids <project-ids> [<project-ids> ...]
Required to assign BTT to projects
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX
add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable --print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-add-metadataopenstack workloadmgr backup target type add metadata# workloadmgr help backup-target-type-add-metadata
usage: workloadmgr backup-target-type-add-metadata [-h]
[-f {json,shell,table,value,yaml}]
[-c COLUMN] [--noindent]
[--prefix PREFIX]
[--max-width <integer>]
[--fit-width]
[--print-empty]
[--backup-target-type-id <backup_target_type_id>]
[--metadata <key=key-name>]
Add metadata to existing backup target type
optional arguments:
-h, --help show this help message and exit
--backup-target-type-id <backup_target_type_id>
ID of the backup target type for which given metadata
will be created
--metadata <key=key-name>
Specify a key value pairs to include in the BTT
metadata Specify option multiple times to include
multiple keys. key=value
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-remove-metadataopenstack workloadmgr backup target type remove metadata# workloadmgr help backup-target-type-remove-metadata
usage: workloadmgr backup-target-type-remove-metadata [-h]
[-f {json,shell,table,value,yaml}]
[-c COLUMN] [--noindent]
[--prefix PREFIX]
[--max-width <integer>]
[--fit-width]
[--print-empty]
[--backup-target-type-id <backup_target_type_id>]
[--metadata-keys <metadata-keys> [<metadata-keys> ...]]
Remove metadata from existing backup target type
optional arguments:
-h, --help show this help message and exit
--backup-target-type-id <backup_target_type_id>
ID of the backup target type for which given projects
will be assigned
--metadata-keys <metadata-keys> [<metadata-keys> ...]
Required to remove metadata of BTT
output formatters:
output formatter options
-f {json,shell,table,value,yaml}, --format {json,shell,table,value,yaml}
the output format, defaults to table
-c COLUMN, --column COLUMN
specify the column(s) to include, can be repeated to
show multiple columns
json formatter:
--noindent whether to disable indenting the JSON
shell formatter:
a format a UNIX shell can parse (variable="value")
--prefix PREFIX add a prefix to all variable names
table formatter:
--max-width <integer>
Maximum display width, <1 to disable. You can also use
the CLIFF_MAX_TERM_WIDTH environment variable, but the
parameter takes precedence.
--fit-width Fit the table to the display width. Implied if --max-
width greater than 0. Set the environment variable
CLIFF_FIT_WIDTH=1 to always enable
--print-empty Print empty table if there is no data to show.
workloadmgr backup-target-type-deleteopenstack workloadmgr backup target type delete# workloadmgr help backup-target-type-delete
usage: workloadmgr backup-target-type-delete [-h] <backup_target_type_id>
Delete existing backup target type
positional arguments:
<backup_target_type_id>
ID of the backup target type to delete.
optional arguments:
-h, --help show this help message and exit
HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 05 Nov 2020 11:28:43 GMT
Content-Type: application/json
Content-Length: 4308
Connection: keep-alive
X-Compute-Request-Id: req-0bc531b6-be6e-43b4-90bd-39ef26ef1463
{
"restores":[
{
"id":"29fdc1f8-1d53-4a10-bb45-e539a64cdbfc",
"created_at":"2020-11-05T10:17:40.000000",
"updated_at":"2020-11-05T10:17:40.000000",
"finished_at":"2020-11-05T10:27:20.000000",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"status":"available",
"restore_type":"restore",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/c76b3355a164498aa95ddbc960adc238/restores/29fdc1f8-1d53-4a10-bb45-e539a64cdbfc"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/c76b3355a164498aa95ddbc960adc238/restores/29fdc1f8-1d53-4a10-bb45-e539a64cdbfc"
}
],
"name":"OneClick Restore",
"description":"-",
"host":"TVM2",
"size":2147483648,
"uploaded_size":2147483648,
"progress_percent":100,
"progress_msg":"Restore from snapshot is complete",
"warning_msg":null,
"error_msg":null,
"time_taken":580,
"restore_options":{
"name":"OneClick Restore",
"oneclickrestore":true,
"restore_type":"oneclick",
"openstack":{
"instances":[
{
"name":"cirros-2",
"id":"67d6a100-fee6-4aa5-83a1-66b070d2eabe",
"availability_zone":"nova"
},
{
"name":"cirros-1",
"id":"e33c1eea-c533-4945-864d-0da1fc002070",
"availability_zone":"nova"
}
]
},
"type":"openstack",
"description":"-"
},
"metadata":[
{
"created_at":"2020-11-05T10:27:20.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"91ab2495-1903-4d75-982b-08a4e480835b",
"restore_id":"29fdc1f8-1d53-4a10-bb45-e539a64cdbfc",
"key":"data_transfer_time",
"value":"0"
},
{
"created_at":"2020-11-05T10:27:20.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"e0e01eec-24e0-4abd-9b8c-19993a320e9f",
"restore_id":"29fdc1f8-1d53-4a10-bb45-e539a64cdbfc",
"key":"object_store_transfer_time",
"value":"0"
},
{
"created_at":"2020-11-05T10:27:20.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"eb909267-ba9b-41d1-8861-a9ec22d6fd84",
"restore_id":"29fdc1f8-1d53-4a10-bb45-e539a64cdbfc",
"key":"restore_user_selected_value",
"value":"Oneclick Restore"
}
]
},
{
"id":"4673d962-f6a5-4209-8d3e-b9f2e9115f07",
"created_at":"2020-11-04T14:37:31.000000",
"updated_at":"2020-11-04T14:37:31.000000",
"finished_at":"2020-11-04T14:45:27.000000",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"status":"error",
"restore_type":"restore",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/c76b3355a164498aa95ddbc960adc238/restores/4673d962-f6a5-4209-8d3e-b9f2e9115f07"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/c76b3355a164498aa95ddbc960adc238/restores/4673d962-f6a5-4209-8d3e-b9f2e9115f07"
}
],
"name":"OneClick Restore",
"description":"-",
"host":"TVM2",
"size":2147483648,
"uploaded_size":2147483648,
"progress_percent":100,
"progress_msg":"",
"warning_msg":null,
"error_msg":"Failed restoring snapshot: Error creating instance e271bd6e-f53e-4ebc-875a-5787cc4dddf7",
"time_taken":476,
"restore_options":{
"name":"OneClick Restore",
"oneclickrestore":true,
"restore_type":"oneclick",
"openstack":{
"instances":[
{
"name":"cirros-2",
"id":"67d6a100-fee6-4aa5-83a1-66b070d2eabe",
"availability_zone":"nova"
},
{
"name":"cirros-1",
"id":"e33c1eea-c533-4945-864d-0da1fc002070",
"availability_zone":"nova"
}
]
},
"type":"openstack",
"description":"-"
},
"metadata":[
{
"created_at":"2020-11-04T14:45:27.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"be6dc7e2-1be2-476b-9338-aed986be3b55",
"restore_id":"4673d962-f6a5-4209-8d3e-b9f2e9115f07",
"key":"data_transfer_time",
"value":"0"
},
{
"created_at":"2020-11-04T14:45:27.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"2e4330b7-6389-4e21-b31b-2503b5441c3e",
"restore_id":"4673d962-f6a5-4209-8d3e-b9f2e9115f07",
"key":"object_store_transfer_time",
"value":"0"
},
{
"created_at":"2020-11-04T14:45:27.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"561c806b-e38a-496c-a8de-dfe96cb3e956",
"restore_id":"4673d962-f6a5-4209-8d3e-b9f2e9115f07",
"key":"restore_user_selected_value",
"value":"Oneclick Restore"
}
]
}
]
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 05 Nov 2020 14:04:45 GMT
Content-Type: application/json
Content-Length: 2639
Connection: keep-alive
X-Compute-Request-Id: req-30640219-e94e-4651-9b9e-49f5574e2a7f
{
"restore":{
"id":"29fdc1f8-1d53-4a10-bb45-e539a64cdbfc",
"created_at":"2020-11-05T10:17:40.000000",
"updated_at":"2020-11-05T10:17:40.000000",
"finished_at":"2020-11-05T10:27:20.000000",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"status":"available",
"restore_type":"restore",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"snapshot_details":{
"created_at":"2020-11-04T13:58:37.000000",
"updated_at":"2020-11-05T10:27:22.000000",
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"snapshot_type":"full",
"display_name":"API taken 2",
"display_description":"API taken description 2",
"size":44171264,
"restore_size":2147483648,
"uploaded_size":44171264,
"progress_percent":100,
"progress_msg":"Creating Instance: cirros-2",
"warning_msg":null,
"error_msg":null,
"host":"TVM1",
"finished_at":"2020-11-04T14:06:03.000000",
"data_deleted":false,
"pinned":false,
"time_taken":428,
"vault_storage_id":null,
"status":"available"
},
"workload_id":"18b809de-d7c8-41e2-867d-4a306407fb11",
"instances":[
{
"id":"1fb104bf-7e2b-4cb6-84f6-96aabc8f1dd2",
"name":"cirros-2",
"status":"available",
"metadata":{
"config_drive":"",
"instance_id":"67d6a100-fee6-4aa5-83a1-66b070d2eabe",
"production":"1"
}
},
{
"id":"b083bb70-e384-4107-b951-8e9e7bbac380",
"name":"cirros-1",
"status":"available",
"metadata":{
"config_drive":"",
"instance_id":"e33c1eea-c533-4945-864d-0da1fc002070",
"production":"1"
}
}
],
"networks":[
],
"subnets":[
],
"routers":[
],
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/c76b3355a164498aa95ddbc960adc238/restores/29fdc1f8-1d53-4a10-bb45-e539a64cdbfc"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/c76b3355a164498aa95ddbc960adc238/restores/29fdc1f8-1d53-4a10-bb45-e539a64cdbfc"
}
],
"name":"OneClick Restore",
"description":"-",
"host":"TVM2",
"size":2147483648,
"uploaded_size":2147483648,
"progress_percent":100,
"progress_msg":"Restore from snapshot is complete",
"warning_msg":null,
"error_msg":null,
"time_taken":580,
"restore_options":{
"name":"OneClick Restore",
"oneclickrestore":true,
"restore_type":"oneclick",
"openstack":{
"instances":[
{
"name":"cirros-2",
"id":"67d6a100-fee6-4aa5-83a1-66b070d2eabe",
"availability_zone":"nova"
},
{
"name":"cirros-1",
"id":"e33c1eea-c533-4945-864d-0da1fc002070",
"availability_zone":"nova"
}
]
},
"type":"openstack",
"description":"-"
},
"metadata":[
]
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 05 Nov 2020 14:21:07 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-0e155b21-8931-480a-a749-6d8764666e4dHTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 05 Nov 2020 15:13:30 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-98d4853c-314c-4f27-bd3f-f81bda1a2840HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Thu, 05 Nov 2020 14:30:56 GMT
Content-Type: application/json
Content-Length: 992
Connection: keep-alive
X-Compute-Request-Id: req-7e18c309-19e5-49cb-a07e-90dd368fddae
{
"restore":{
"id":"3df1d432-2f76-4ebd-8f89-1275428842ff",
"created_at":"2020-11-05T14:30:56.048656",
"updated_at":"2020-11-05T14:30:56.048656",
"finished_at":null,
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"status":"restoring",
"restore_type":"restore",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/c76b3355a164498aa95ddbc960adc238/restores/3df1d432-2f76-4ebd-8f89-1275428842ff"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/c76b3355a164498aa95ddbc960adc238/restores/3df1d432-2f76-4ebd-8f89-1275428842ff"
}
],
"name":"One Click Restore",
"description":"One Click Restore",
"host":"",
"size":0,
"uploaded_size":0,
"progress_percent":0,
"progress_msg":null,
"warning_msg":null,
"error_msg":null,
"time_taken":0,
"restore_options":{
"openstack":{
},
"type":"openstack",
"oneclickrestore":true,
"vmware":{
},
"restore_type":"oneclick"
},
"metadata":[
]
}
}{
"restore":{
"options":{
"openstack":{
},
"type":"openstack",
"oneclickrestore":true,
"vmware":{},
"restore_type":"oneclick"
},
"name":"One Click Restore",
"description":"One Click Restore"
}
}HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Mon, 09 Nov 2020 09:53:31 GMT
Content-Type: application/json
Content-Length: 1713
Connection: keep-alive
X-Compute-Request-Id: req-84f00d6f-1b12-47ec-b556-7b3ed4c2f1d7
{
"restore":{
"id":"778baae0-6c64-4eb1-8fa3-29324215c43c",
"created_at":"2020-11-09T09:53:31.037588",
"updated_at":"2020-11-09T09:53:31.037588",
"finished_at":null,
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"status":"restoring",
"restore_type":"restore",
"snapshot_id":"2e56d167-bad7-43c7-8ede-a613c3fe7844",
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/c76b3355a164498aa95ddbc960adc238/restores/778baae0-6c64-4eb1-8fa3-29324215c43c"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/c76b3355a164498aa95ddbc960adc238/restores/778baae0-6c64-4eb1-8fa3-29324215c43c"
}
],
"name":"API",
"description":"API Created",
"host":"",
"size":0,
"uploaded_size":0,
"progress_percent":0,
"progress_msg":null,
"warning_msg":null,
"error_msg":null,
"time_taken":0,
"restore_options":{
"openstack":{
"instances":[
{
"vdisks":[
{
"new_volume_type":"iscsi",
"id":"365ad75b-ca76-46cb-8eea-435535fd2e22",
"availability_zone":"nova"
}
],
"name":"cirros-1-selective",
"availability_zone":"nova",
"nics":[
],
"flavor":{
"vcpus":1,
"disk":1,
"swap":"",
"ram":512,
"ephemeral":0,
"id":"1"
},
"include":true,
"id":"e33c1eea-c533-4945-864d-0da1fc002070"
},
{
"include":false,
"id":"67d6a100-fee6-4aa5-83a1-66b070d2eabe"
}
],
"restore_topology":false,
"networks_mapping":{
"networks":[
{
"snapshot_network":{
"subnet":{
"id":"b7b54304-aa82-4d50-91e6-66445ab56db4"
},
"id":"5fb7027d-a2ac-4a21-9ee1-438c281d2b26"
},
"target_network":{
"subnet":{
"id":"b7b54304-aa82-4d50-91e6-66445ab56db4"
},
"id":"5fb7027d-a2ac-4a21-9ee1-438c281d2b26",
"name":"internal"
}
}
]
}
},
"restore_type":"selective",
"type":"openstack",
"oneclickrestore":false
},
"metadata":[
]
}
}{
"restore":{
"name":"<restore name>",
"description":"<restore description>",
"options":{
"openstack":{
"instances":[
{
"name":"<new name of instance>",
"include":<true/false>,
"id":"<original id of instance to be restored>"
"availability_zone":"<availability zone>",
"vdisks":[
{
"id":"<original ID of Volume>",
"new_volume_type":"<new volume type>",
"availability_zone":"<Volume availability zone>"
}
],
"nics":[
{
'mac_address':'<mac address of the pre-created port>',
'ip_address':'<IP of the pre-created port>',
'id':'<ID of the pre-created port>',
'network':{
'subnet':{
'id':'<ID of the subnet of the pre-created port>'
},
'id':'<ID of the network of the pre-created port>'
}
],
"flavor":{
"vcpus":<Integer>,
"disk":<Integer>,
"swap":<Integer>,
"ram":<Integer>,
"ephemeral":<Integer>,
"id":<Integer>
}
}
],
"restore_topology":<true/false>,
"networks_mapping":{
"networks":[
{
"snapshot_network":{
"subnet":{
"id":"<ID of the original Subnet ID>"
},
"id":"<ID of the original Network ID>"
},
"target_network":{
"subnet":{
"id":"<ID of the target Subnet ID>"
},
"id":"<ID of the target Network ID>",
"name":"<name of the target network>"
}
}
]
}
},
"restore_type":"selective",
"type":"openstack",
"oneclickrestore":false
}
}
}HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Mon, 09 Nov 2020 12:53:03 GMT
Content-Type: application/json
Content-Length: 1341
Connection: keep-alive
X-Compute-Request-Id: req-311fa97e-0fd7-41ed-873b-482c149ee743
{
"restore":{
"id":"0bf96f46-b27b-425c-a10f-a861cc18b82a",
"created_at":"2020-11-09T12:53:02.726757",
"updated_at":"2020-11-09T12:53:02.726757",
"finished_at":null,
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"status":"restoring",
"restore_type":"restore",
"snapshot_id":"ed4f29e8-7544-4e1c-af8a-a76031211926",
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/c76b3355a164498aa95ddbc960adc238/restores/0bf96f46-b27b-425c-a10f-a861cc18b82a"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/c76b3355a164498aa95ddbc960adc238/restores/0bf96f46-b27b-425c-a10f-a861cc18b82a"
}
],
"name":"API",
"description":"API description",
"host":"",
"size":0,
"uploaded_size":0,
"progress_percent":0,
"progress_msg":null,
"warning_msg":null,
"error_msg":null,
"time_taken":0,
"restore_options":{
"restore_type":"inplace",
"type":"openstack",
"oneclickrestore":false,
"openstack":{
"instances":[
{
"restore_boot_disk":true,
"include":true,
"id":"7c1bb5d2-aa5a-44f7-abcd-2d76b819b4c8",
"vdisks":[
{
"restore_cinder_volume":true,
"id":"f6b3fef6-4b0e-487e-84b5-47a14da716ca"
}
]
},
{
"restore_boot_disk":true,
"include":true,
"id":"08dab61c-6efd-44d3-a9ed-8e789d338c1b",
"vdisks":[
{
"restore_cinder_volume":true,
"id":"53204f34-019d-4ba8-ada1-e6ab7b8e5b43"
}
]
}
]
}
},
"metadata":[
]
}
}{
"restore":{
"name":"<restore-name>",
"description":"<restore-description>",
"options":{
"restore_type":"inplace",
"type":"openstack",
"oneclickrestore":false,
"openstack":{
"instances":[
{
"restore_boot_disk":<Boolean>,
"include":<Boolean>,
"id":"<ID of the instance the volumes are attached to>",
"vdisks":[
{
"restore_cinder_volume":<boolean>,
"id":"<ID of the Volume to restore>"
}
]
}
]
}
}
}
}# mount <NFS B2-IP/NFS B2-FQDN>:/<VOL-Path> /mntworkload_ac9cae9b-5e1b-4899-930c-6aa0600a2105/…/workload_<id>/workload_db <<< Contains User ID and Project ID of Workload owner
/…/workload_<id>/workload_vms_db <<< Contains VM IDs and VM Names of all VMs actively protected be the Workload# cp /mnt/workload_ac9cae9b-5e1b-4899-930c-6aa0600a2105 /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW0=/workload_ac9cae9b-5e1b-4899-930c-6aa0600a2105
# chown -R nova:nova /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW0=/workload_ac9cae9b-5e1b-4899-930c-6aa0600a2105
# chmod -R 644 /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW0=/workload_ac9cae9b-5e1b-4899-930c-6aa0600a2105#qemu-img info bd57ec9b-c4ac-4a37-a4fd-5c9aa002c778
image: bd57ec9b-c4ac-4a37-a4fd-5c9aa002c778
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 516K
cluster_size: 65536
backing file: /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW0=/workload_ac9cae9b-5e1b-4899-930c-6aa0600a2105/snapshot_1415095d-c047-400b-8b05-c88e57011263/vm_id_38b620f1-24ae-41d7-b0ab-85ffc2d7958b/vm_res_id_d4ab3431-5ce3-4a8f-a90b-07606e2ffa33_vda/7c39eb6a-6e42-418e-8690-b6368ecaa7bb
Format specific information:
compat: 1.1
lazy refcounts: false
refcount bits: 16
corrupt: false
# echo -n 10.10.2.20:/NFS_A1 | base64
MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl
# echo -n 10.20.3.22:/NFS_B2 | base64
MTAuMjAuMy4yMjovdXBzdHJlYW1fdGFyZ2V0#mkdir /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl
#mount --bind
/var/triliovault-mounts/MTAuMjAuMy4yMjovdXBzdHJlYW1fdGFyZ2V0/ /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl#vi /etc/fstab
/var/triliovault-mounts/MTAuMjAuMy4yMjovdXBzdHJlYW1fdGFyZ2V0/ / var/triliovault-mounts/ MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl none bind 0 0# source {customer admin rc file}
# openstack role add Admin --user <my_admin_user> --user-domain <admin_domain> --domain <target_domain>
# openstack role add Admin --user <my_admin_user> --user-domain <admin_domain> --project <target_project> --project-domain <target_domain>
# openstack role add <Backup Trustee Role> --user <my_admin_user> --user-domain <admin_domain> --project <destination_project> --project-domain <target_domain># workloadmgr workload-get-orphaned-workloads-list --migrate_cloud True
+------------+--------------------------------------+----------------------------------+----------------------------------+
| Name | ID | Project ID | User ID |
+------------+--------------------------------------+----------------------------------+----------------------------------+
| Workload_1 | 6639525d-736a-40c5-8133-5caaddaaa8e9 | 4224d3acfd394cc08228cc8072861a35 | 329880dedb4cd357579a3279835f392 |
| Workload_2 | 904e72f7-27bb-4235-9b31-13a636eb9c95 | 637a9ce3fd0d404cabf1a776696c9c04 | 329880dedb4cd357579a3279835f392 |
+------------+--------------------------------------+----------------------------------+----------------------------------+# openstack project list --domain <target_domain>
+----------------------------------+----------+
| ID | Name |
+----------------------------------+----------+
| 01fca51462a44bfa821130dce9baac1a | project1 |
| 33b4db1099ff4a65a4c1f69a14f932ee | project2 |
| 9139e694eb984a4a979b5ae8feb955af | project3 |
+----------------------------------+----------+ # openstack role assignment list --project <target_project> --project-domain <target_domain> --role <backup_trustee_role>
+----------------------------------+----------------------------------+-------+----------------------------------+--------+-----------+
| Role | User | Group | Project | Domain | Inherited |
+----------------------------------+----------------------------------+-------+----------------------------------+--------+-----------+
| 9fe2ff9ee4384b1894a90878d3e92bab | 72e65c264a694272928f5d84b73fe9ce | | 8e16700ae3614da4ba80a4e57d60cdb9 | | False |
| 9fe2ff9ee4384b1894a90878d3e92bab | d5fbd79f4e834f51bfec08be6d3b2ff2 | | 8e16700ae3614da4ba80a4e57d60cdb9 | | False |
| 9fe2ff9ee4384b1894a90878d3e92bab | f5b1d071816742fba6287d2c8ffcd6c4 | | 8e16700ae3614da4ba80a4e57d60cdb9 | | False |
+----------------------------------+----------------------------------+-------+----------------------------------+--------+-----------+# workloadmgr workload-reassign-workloads --new_tenant_id {target_project_id} --user_id {target_user_id} --workload_ids {workload_id} --migrate_cloud True
+-----------+--------------------------------------+----------------------------------+----------------------------------+
| Name | ID | Project ID | User ID |
+-----------+--------------------------------------+----------------------------------+----------------------------------+
| project1 | 904e72f7-27bb-4235-9b31-13a636eb9c95 | 4f2a91274ce9491481db795dcb10b04f | 3e05cac47338425d827193ba374749cc |
+-----------+--------------------------------------+----------------------------------+----------------------------------+ # workloadmgr workload-show ac9cae9b-5e1b-4899-930c-6aa0600a2105
+-------------------+------------------------------------------------------------------------------------------------------+
| Property | Value |
+-------------------+------------------------------------------------------------------------------------------------------+
| availability_zone | nova |
| created_at | 2019-04-18T02:19:39.000000 |
| description | Test Linux VMs |
| error_msg | None |
| id | ac9cae9b-5e1b-4899-930c-6aa0600a2105 |
| instances | [{"id": "38b620f1-24ae-41d7-b0ab-85ffc2d7958b", "name": "Test-Linux-1"}, {"id": |
| | "3fd869b2-16bd-4423-b389-18d19d37c8e0", "name": "Test-Linux-2"}] |
| interval | None |
| jobschedule | True |
| name | Test Linux |
| project_id | 2fc4e2180c2745629753305591aeb93b |
| scheduler_trust | None |
| status | available |
| storage_usage | {"usage": 60555264, "full": {"usage": 44695552, "snap_count": 1}, "incremental": {"usage": 15859712, |
| | "snap_count": 13}} |
| updated_at | 2019-11-15T02:32:43.000000 |
| user_id | 72e65c264a694272928f5d84b73fe9ce |
| workload_type_id | f82ce76f-17fe-438b-aa37-7a023058e50d |
+-------------------+------------------------------------------------------------------------------------------------------+# workloadmgr snapshot-list --workload_id ac9cae9b-5e1b-4899-930c-6aa0600a2105 --all True
+----------------------------+--------------+--------------------------------------+--------------------------------------+---------------+-----------+-----------+
| Created At | Name | ID | Workload ID | Snapshot Type | Status | Host |
+----------------------------+--------------+--------------------------------------+--------------------------------------+---------------+-----------+-----------+
| 2019-11-02T02:30:02.000000 | jobscheduler | f5b8c3fd-c289-487d-9d50-fe27a6561d78 | ac9cae9b-5e1b-4899-930c-6aa0600a2105 | full | available | Upstream2 |
| 2019-11-03T02:30:02.000000 | jobscheduler | 7e39e544-537d-4417-853d-11463e7396f9 | ac9cae9b-5e1b-4899-930c-6aa0600a2105 | incremental | available | Upstream2 |
| 2019-11-04T02:30:02.000000 | jobscheduler | 0c086f3f-fa5d-425f-b07e-a1adcdcafea9 | ac9cae9b-5e1b-4899-930c-6aa0600a2105 | incremental | available | Upstream2 |
+----------------------------+--------------+--------------------------------------+--------------------------------------+---------------+-----------+-----------+# workloadmgr snapshot-show --output networks 7e39e544-537d-4417-853d-11463e7396f9
+-------------------+--------------------------------------+
| Snapshot property | Value |
+-------------------+--------------------------------------+
| description | None |
| host | Upstream2 |
| id | 7e39e544-537d-4417-853d-11463e7396f9 |
| name | jobscheduler |
| progress_percent | 100 |
| restore_size | 44040192 Bytes or Approx (42.0MB) |
| restores_info | |
| size | 1310720 Bytes or Approx (1.2MB) |
| snapshot_type | incremental |
| status | available |
| time_taken | 154 Seconds |
| uploaded_size | 1310720 |
| workload_id | ac9cae9b-5e1b-4899-930c-6aa0600a2105 |
+-------------------+--------------------------------------+
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Instances | Value |
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-1 |
| ID | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| | |
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-2 |
| ID | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| | |
+----------------+---------------------------------------------------------------------------------------------------------------------+
+-------------+----------------------------------------------------------------------------------------------------------------------------------------------+
| Networks | Value |
+-------------+----------------------------------------------------------------------------------------------------------------------------------------------+
| ip_address | 172.20.20.20 |
| vm_id | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| network | {u'subnet': {u'ip_version': 4, u'cidr': u'172.20.20.0/24', u'gateway_ip': u'172.20.20.1', u'id': u'3a756a89-d979-4cda-a7f3-dacad8594e44',
u'name': u'Trilio Test'}, u'cidr': None, u'id': u'5f0e5d34-569d-42c9-97c2-df944f3924b1', u'name': u'Trilio_Test_Internal', u'network_type': u'neutron'} |
| mac_address | fa:16:3e:74:58:bb |
| | |
| ip_address | 172.20.20.13 |
| vm_id | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| network | {u'subnet': {u'ip_version': 4, u'cidr': u'172.20.20.0/24', u'gateway_ip': u'172.20.20.1', u'id': u'3a756a89-d979-4cda-a7f3-dacad8594e44',
u'name': u'Trilio Test'}, u'cidr': None, u'id': u'5f0e5d34-569d-42c9-97c2-df944f3924b1', u'name': u'Trilio_Test_Internal', u'network_type': u'neutron'} |
| mac_address | fa:16:3e:6b:46:ae |
+-------------+----------------------------------------------------------------------------------------------------------------------------------------------+[root@upstreamcontroller ~(keystone_admin)]# workloadmgr snapshot-show --output disks 7e39e544-537d-4417-853d-11463e7396f9
+-------------------+--------------------------------------+
| Snapshot property | Value |
+-------------------+--------------------------------------+
| description | None |
| host | Upstream2 |
| id | 7e39e544-537d-4417-853d-11463e7396f9 |
| name | jobscheduler |
| progress_percent | 100 |
| restore_size | 44040192 Bytes or Approx (42.0MB) |
| restores_info | |
| size | 1310720 Bytes or Approx (1.2MB) |
| snapshot_type | incremental |
| status | available |
| time_taken | 154 Seconds |
| uploaded_size | 1310720 |
| workload_id | ac9cae9b-5e1b-4899-930c-6aa0600a2105 |
+-------------------+--------------------------------------+
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Instances | Value |
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-1 |
| ID | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| | |
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-2 |
| ID | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| | |
+----------------+---------------------------------------------------------------------------------------------------------------------+
+-------------------+--------------------------------------------------+
| Vdisks | Value |
+-------------------+--------------------------------------------------+
| volume_mountpoint | /dev/vda |
| restore_size | 22020096 |
| resource_id | ebc2fdd0-3c4d-4548-b92d-0e16734b5d9a |
| volume_name | 0027b140-a427-46cb-9ccf-7895c7624493 |
| volume_type | None |
| label | None |
| volume_size | 1 |
| volume_id | 0027b140-a427-46cb-9ccf-7895c7624493 |
| availability_zone | nova |
| vm_id | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| metadata | {u'readonly': u'False', u'attached_mode': u'rw'} |
| | |
| volume_mountpoint | /dev/vda |
| restore_size | 22020096 |
| resource_id | 8007ed89-6a86-447e-badb-e49f1e92f57a |
| volume_name | 2a7f9e78-7778-4452-af5b-8e2fa43853bd |
| volume_type | None |
| label | None |
| volume_size | 1 |
| volume_id | 2a7f9e78-7778-4452-af5b-8e2fa43853bd |
| availability_zone | nova |
| vm_id | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| metadata | {u'readonly': u'False', u'attached_mode': u'rw'} |
| | |
+-------------------+--------------------------------------------------+{
u'description':u'<description of the restore>',
u'oneclickrestore':False,
u'restore_type':u'selective',
u'type':u'openstack',
u'name':u'<name of the restore>'
u'openstack':{
u'instances':[
{
u'name':u'<name instance 1>',
u'availability_zone':u'<AZ instance 1>',
u'nics':[ #####Leave empty for network topology restore
],
u'vdisks':[
{
u'id':u'<old disk id>',
u'new_volume_type':u'<new volume type name>',
u'availability_zone':u'<new cinder volume AZ>'
}
],
u'flavor':{
u'ram':<RAM in MB>,
u'ephemeral':<GB of ephemeral disk>,
u'vcpus':<# vCPUs>,
u'swap':u'<GB of Swap disk>',
u'disk':<GB of boot disk>,
u'id':u'<id of the flavor to use>'
},
u'include':<True/False>,
u'id':u'<old id of the instance>'
} #####Repeat for each instance in the snapshot
],
u'restore_topology':<True/False>,
u'networks_mapping':{
u'networks':[ #####Leave empty for network topology restore
]
}
}
}
# workloadmgr snapshot-selective-restore --filename restore.json {snapshot id}[root@upstreamcontroller ~(keystone_admin)]# workloadmgr restore-list --snapshot_id 5928554d-a882-4881-9a5c-90e834c071af
+----------------------------+------------------+--------------------------------------+--------------------------------------+----------+-----------+
| Created At | Name | ID | Snapshot ID | Size | Status |
+----------------------------+------------------+--------------------------------------+--------------------------------------+----------+-----------+
| 2019-09-24T12:44:38.000000 | OneClick Restore | 5b4216d0-4bed-460f-8501-1589e7b45e01 | 5928554d-a882-4881-9a5c-90e834c071af | 41126400 | available |
+----------------------------+------------------+--------------------------------------+--------------------------------------+----------+-----------+
[root@upstreamcontroller ~(keystone_admin)]# workloadmgr restore-show 5b4216d0-4bed-460f-8501-1589e7b45e01
+------------------+------------------------------------------------------------------------------------------------------+
| Property | Value |
+------------------+------------------------------------------------------------------------------------------------------+
| created_at | 2019-09-24T12:44:38.000000 |
| description | - |
| error_msg | None |
| finished_at | 2019-09-24T12:46:07.000000 |
| host | Upstream2 |
| id | 5b4216d0-4bed-460f-8501-1589e7b45e01 |
| instances | [{"status": "available", "id": "b8506f04-1b99-4ca8-839b-6f5d2c20d9aa", "name": "temp", "metadata": |
| | {"instance_id": "c014a938-903d-43db-bfbb-ea4998ff1a0f", "production": "1", "config_drive": ""}}] |
| name | OneClick Restore |
| progress_msg | Restore from snapshot is complete |
| progress_percent | 100 |
| project_id | 8e16700ae3614da4ba80a4e57d60cdb9 |
| restore_options | {"description": "-", "oneclickrestore": true, "restore_type": "oneclick", "openstack": {"instances": |
| | [{"availability_zone": "US-West", "id": "c014a938-903d-43db-bfbb-ea4998ff1a0f", "name": "temp"}]}, |
| | "type": "openstack", "name": "OneClick Restore"} |
| restore_type | restore |
| size | 41126400 |
| snapshot_id | 5928554d-a882-4881-9a5c-90e834c071af |
| status | available |
| time_taken | 89 |
| updated_at | 2019-09-24T12:44:38.000000 |
| uploaded_size | 41126400 |
| user_id | d5fbd79f4e834f51bfec08be6d3b2ff2 |
| warning_msg | None |
| workload_id | 02b1aca2-c51a-454b-8c0f-99966314165e |
+------------------+------------------------------------------------------------------------------------------------------+# workloadmgr workload-delete <workload_id># source {customer admin rc file}
# openstack role remove Admin --user <my_admin_user> --user-domain <admin_domain> --domain <target_domain>
# openstack role remove Admin --user <my_admin_user> --user-domain <admin_domain> --project <target_project> --project-domain <target_domain>
# openstack role remove <Backup Trustee Role> --user <my_admin_user> --user-domain <admin_domain> --project <destination_project> --project-domain <target_domain>
# vi /etc/workloadmgr/workloadmgr.confvault_storage_nfs_export = <NFS_B1/NFS_B1-FQDN>:/<VOL-B1-Path>vault_storage_nfs_export = <NFS-IP/NFS-FQDN>:/<VOL-1-Path>,<NFS-IP/NFS-FQDN>:/<VOL—2-Path># systemctl restart wlm-workloads# vi /etc/tvault-contego/tvault-contego.confvault_storage_nfs_export = <NFS_B1-IP/NFS_B1-FQDN>:/<VOL-B1-Path>vault_storage_nfs_export = <NFS_B1-IP/NFS-FQDN>:/<VOL-B1-Path>,<NFS_B2-IP/NFS-FQDN>:/<VOL—B2-Path># systemctl restart tvault-contego#qemu-img info bd57ec9b-c4ac-4a37-a4fd-5c9aa002c778
image: bd57ec9b-c4ac-4a37-a4fd-5c9aa002c778
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 516K
cluster_size: 65536
backing file: /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW0=/workload_ac9cae9b-5e1b-4899-930c-6aa0600a2105/snapshot_1415095d-c047-400b-8b05-c88e57011263/vm_id_38b620f1-24ae-41d7-b0ab-85ffc2d7958b/vm_res_id_d4ab3431-5ce3-4a8f-a90b-07606e2ffa33_vda/7c39eb6a-6e42-418e-8690-b6368ecaa7bb
Format specific information:
compat: 1.1
lazy refcounts: false
refcount bits: 16
corrupt: false
# echo -n 10.10.2.20:/NFS_A1 | base64
MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl
# echo -n 10.20.3.22:/NFS_B2 | base64
MTAuMjAuMy4yMjovdXBzdHJlYW1fdGFyZ2V0#mkdir /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl
#mount --bind
/var/triliovault-mounts/MTAuMjAuMy4yMjovdXBzdHJlYW1fdGFyZ2V0/ /var/triliovault-mounts/MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl#vi /etc/fstab
/var/triliovault-mounts/MTAuMjAuMy4yMjovdXBzdHJlYW1fdGFyZ2V0/ / var/triliovault-mounts/ MTAuMTAuMi4yMDovdXBzdHJlYW1fc291cmNl none bind 0 0# source {customer admin rc file}
# openstack role add Admin --user <my_admin_user> --user-domain <admin_domain> --domain <target_domain>
# openstack role add Admin --user <my_admin_user> --user-domain <admin_domain> --project <target_project> --project-domain <target_domain>
# openstack role add <Backup Trustee Role> --user <my_admin_user> --user-domain <admin_domain> --project <destination_project> --project-domain <target_domain># workloadmgr workload-get-orphaned-workloads-list --migrate_cloud True
+------------+--------------------------------------+----------------------------------+----------------------------------+
| Name | ID | Project ID | User ID |
+------------+--------------------------------------+----------------------------------+----------------------------------+
| Workload_1 | 6639525d-736a-40c5-8133-5caaddaaa8e9 | 4224d3acfd394cc08228cc8072861a35 | 329880dedb4cd357579a3279835f392 |
| Workload_2 | 904e72f7-27bb-4235-9b31-13a636eb9c95 | 637a9ce3fd0d404cabf1a776696c9c04 | 329880dedb4cd357579a3279835f392 |
+------------+--------------------------------------+----------------------------------+----------------------------------+# openstack project list --domain <target_domain>
+----------------------------------+----------+
| ID | Name |
+----------------------------------+----------+
| 01fca51462a44bfa821130dce9baac1a | project1 |
| 33b4db1099ff4a65a4c1f69a14f932ee | project2 |
| 9139e694eb984a4a979b5ae8feb955af | project3 |
+----------------------------------+----------+ # openstack role assignment list --project <target_project> --project-domain <target_domain> --role <backup_trustee_role>
+----------------------------------+----------------------------------+-------+----------------------------------+--------+-----------+
| Role | User | Group | Project | Domain | Inherited |
+----------------------------------+----------------------------------+-------+----------------------------------+--------+-----------+
| 9fe2ff9ee4384b1894a90878d3e92bab | 72e65c264a694272928f5d84b73fe9ce | | 8e16700ae3614da4ba80a4e57d60cdb9 | | False |
| 9fe2ff9ee4384b1894a90878d3e92bab | d5fbd79f4e834f51bfec08be6d3b2ff2 | | 8e16700ae3614da4ba80a4e57d60cdb9 | | False |
| 9fe2ff9ee4384b1894a90878d3e92bab | f5b1d071816742fba6287d2c8ffcd6c4 | | 8e16700ae3614da4ba80a4e57d60cdb9 | | False |
+----------------------------------+----------------------------------+-------+----------------------------------+--------+-----------+# workloadmgr workload-reassign-workloads --new_tenant_id {target_project_id} --user_id {target_user_id} --workload_ids {workload_id} --migrate_cloud True
+-----------+--------------------------------------+----------------------------------+----------------------------------+
| Name | ID | Project ID | User ID |
+-----------+--------------------------------------+----------------------------------+----------------------------------+
| project1 | 904e72f7-27bb-4235-9b31-13a636eb9c95 | 4f2a91274ce9491481db795dcb10b04f | 3e05cac47338425d827193ba374749cc |
+-----------+--------------------------------------+----------------------------------+----------------------------------+ # workloadmgr workload-show ac9cae9b-5e1b-4899-930c-6aa0600a2105
+-------------------+------------------------------------------------------------------------------------------------------+
| Property | Value |
+-------------------+------------------------------------------------------------------------------------------------------+
| availability_zone | nova |
| created_at | 2019-04-18T02:19:39.000000 |
| description | Test Linux VMs |
| error_msg | None |
| id | ac9cae9b-5e1b-4899-930c-6aa0600a2105 |
| instances | [{"id": "38b620f1-24ae-41d7-b0ab-85ffc2d7958b", "name": "Test-Linux-1"}, {"id": |
| | "3fd869b2-16bd-4423-b389-18d19d37c8e0", "name": "Test-Linux-2"}] |
| interval | None |
| jobschedule | True |
| name | Test Linux |
| project_id | 2fc4e2180c2745629753305591aeb93b |
| scheduler_trust | None |
| status | available |
| storage_usage | {"usage": 60555264, "full": {"usage": 44695552, "snap_count": 1}, "incremental": {"usage": 15859712, |
| | "snap_count": 13}} |
| updated_at | 2019-11-15T02:32:43.000000 |
| user_id | 72e65c264a694272928f5d84b73fe9ce |
| workload_type_id | f82ce76f-17fe-438b-aa37-7a023058e50d |
+-------------------+------------------------------------------------------------------------------------------------------+# workloadmgr snapshot-list --workload_id ac9cae9b-5e1b-4899-930c-6aa0600a2105 --all True
+----------------------------+--------------+--------------------------------------+--------------------------------------+---------------+-----------+-----------+
| Created At | Name | ID | Workload ID | Snapshot Type | Status | Host |
+----------------------------+--------------+--------------------------------------+--------------------------------------+---------------+-----------+-----------+
| 2019-11-02T02:30:02.000000 | jobscheduler | f5b8c3fd-c289-487d-9d50-fe27a6561d78 | ac9cae9b-5e1b-4899-930c-6aa0600a2105 | full | available | Upstream2 |
| 2019-11-03T02:30:02.000000 | jobscheduler | 7e39e544-537d-4417-853d-11463e7396f9 | ac9cae9b-5e1b-4899-930c-6aa0600a2105 | incremental | available | Upstream2 |
| 2019-11-04T02:30:02.000000 | jobscheduler | 0c086f3f-fa5d-425f-b07e-a1adcdcafea9 | ac9cae9b-5e1b-4899-930c-6aa0600a2105 | incremental | available | Upstream2 |
+----------------------------+--------------+--------------------------------------+--------------------------------------+---------------+-----------+-----------+# workloadmgr snapshot-show --output networks 7e39e544-537d-4417-853d-11463e7396f9
+-------------------+--------------------------------------+
| Snapshot property | Value |
+-------------------+--------------------------------------+
| description | None |
| host | Upstream2 |
| id | 7e39e544-537d-4417-853d-11463e7396f9 |
| name | jobscheduler |
| progress_percent | 100 |
| restore_size | 44040192 Bytes or Approx (42.0MB) |
| restores_info | |
| size | 1310720 Bytes or Approx (1.2MB) |
| snapshot_type | incremental |
| status | available |
| time_taken | 154 Seconds |
| uploaded_size | 1310720 |
| workload_id | ac9cae9b-5e1b-4899-930c-6aa0600a2105 |
+-------------------+--------------------------------------+
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Instances | Value |
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-1 |
| ID | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| | |
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-2 |
| ID | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| | |
+----------------+---------------------------------------------------------------------------------------------------------------------+
+-------------+----------------------------------------------------------------------------------------------------------------------------------------------+
| Networks | Value |
+-------------+----------------------------------------------------------------------------------------------------------------------------------------------+
| ip_address | 172.20.20.20 |
| vm_id | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| network | {u'subnet': {u'ip_version': 4, u'cidr': u'172.20.20.0/24', u'gateway_ip': u'172.20.20.1', u'id': u'3a756a89-d979-4cda-a7f3-dacad8594e44',
u'name': u'Trilio Test'}, u'cidr': None, u'id': u'5f0e5d34-569d-42c9-97c2-df944f3924b1', u'name': u'Trilio_Test_Internal', u'network_type': u'neutron'} |
| mac_address | fa:16:3e:74:58:bb |
| | |
| ip_address | 172.20.20.13 |
| vm_id | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| network | {u'subnet': {u'ip_version': 4, u'cidr': u'172.20.20.0/24', u'gateway_ip': u'172.20.20.1', u'id': u'3a756a89-d979-4cda-a7f3-dacad8594e44',
u'name': u'Trilio Test'}, u'cidr': None, u'id': u'5f0e5d34-569d-42c9-97c2-df944f3924b1', u'name': u'Trilio_Test_Internal', u'network_type': u'neutron'} |
| mac_address | fa:16:3e:6b:46:ae |
+-------------+----------------------------------------------------------------------------------------------------------------------------------------------+[root@upstreamcontroller ~(keystone_admin)]# workloadmgr snapshot-show --output disks 7e39e544-537d-4417-853d-11463e7396f9
+-------------------+--------------------------------------+
| Snapshot property | Value |
+-------------------+--------------------------------------+
| description | None |
| host | Upstream2 |
| id | 7e39e544-537d-4417-853d-11463e7396f9 |
| name | jobscheduler |
| progress_percent | 100 |
| restore_size | 44040192 Bytes or Approx (42.0MB) |
| restores_info | |
| size | 1310720 Bytes or Approx (1.2MB) |
| snapshot_type | incremental |
| status | available |
| time_taken | 154 Seconds |
| uploaded_size | 1310720 |
| workload_id | ac9cae9b-5e1b-4899-930c-6aa0600a2105 |
+-------------------+--------------------------------------+
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Instances | Value |
+----------------+---------------------------------------------------------------------------------------------------------------------+
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-1 |
| ID | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| | |
| Status | available |
| Security Group | [{u'name': u'Test', u'security_group_type': u'neutron'}, {u'name': u'default', u'security_group_type': u'neutron'}] |
| Flavor | {u'ephemeral': u'0', u'vcpus': u'1', u'disk': u'1', u'ram': u'512'} |
| Name | Test-Linux-2 |
| ID | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| | |
+----------------+---------------------------------------------------------------------------------------------------------------------+
+-------------------+--------------------------------------------------+
| Vdisks | Value |
+-------------------+--------------------------------------------------+
| volume_mountpoint | /dev/vda |
| restore_size | 22020096 |
| resource_id | ebc2fdd0-3c4d-4548-b92d-0e16734b5d9a |
| volume_name | 0027b140-a427-46cb-9ccf-7895c7624493 |
| volume_type | None |
| label | None |
| volume_size | 1 |
| volume_id | 0027b140-a427-46cb-9ccf-7895c7624493 |
| availability_zone | nova |
| vm_id | 38b620f1-24ae-41d7-b0ab-85ffc2d7958b |
| metadata | {u'readonly': u'False', u'attached_mode': u'rw'} |
| | |
| volume_mountpoint | /dev/vda |
| restore_size | 22020096 |
| resource_id | 8007ed89-6a86-447e-badb-e49f1e92f57a |
| volume_name | 2a7f9e78-7778-4452-af5b-8e2fa43853bd |
| volume_type | None |
| label | None |
| volume_size | 1 |
| volume_id | 2a7f9e78-7778-4452-af5b-8e2fa43853bd |
| availability_zone | nova |
| vm_id | 3fd869b2-16bd-4423-b389-18d19d37c8e0 |
| metadata | {u'readonly': u'False', u'attached_mode': u'rw'} |
| | |
+-------------------+--------------------------------------------------+{
u'description':u'<description of the restore>',
u'oneclickrestore':False,
u'restore_type':u'selective',
u'type':u'openstack',
u'name':u'<name of the restore>'
u'openstack':{
u'instances':[
{
u'name':u'<name instance 1>',
u'availability_zone':u'<AZ instance 1>',
u'nics':[ #####Leave empty for network topology restore
],
u'vdisks':[
{
u'id':u'<old disk id>',
u'new_volume_type':u'<new volume type name>',
u'availability_zone':u'<new cinder volume AZ>'
}
],
u'flavor':{
u'ram':<RAM in MB>,
u'ephemeral':<GB of ephemeral disk>,
u'vcpus':<# vCPUs>,
u'swap':u'<GB of Swap disk>',
u'disk':<GB of boot disk>,
u'id':u'<id of the flavor to use>'
},
u'include':<True/False>,
u'id':u'<old id of the instance>'
} #####Repeat for each instance in the snapshot
],
u'restore_topology':<True/False>,
u'networks_mapping':{
u'networks':[ #####Leave empty for network topology restore
]
}
}
}
# workloadmgr snapshot-selective-restore --filename restore.json {snapshot id}[root@upstreamcontroller ~(keystone_admin)]# workloadmgr restore-list --snapshot_id 5928554d-a882-4881-9a5c-90e834c071af
+----------------------------+------------------+--------------------------------------+--------------------------------------+----------+-----------+
| Created At | Name | ID | Snapshot ID | Size | Status |
+----------------------------+------------------+--------------------------------------+--------------------------------------+----------+-----------+
| 2019-09-24T12:44:38.000000 | OneClick Restore | 5b4216d0-4bed-460f-8501-1589e7b45e01 | 5928554d-a882-4881-9a5c-90e834c071af | 41126400 | available |
+----------------------------+------------------+--------------------------------------+--------------------------------------+----------+-----------+
[root@upstreamcontroller ~(keystone_admin)]# workloadmgr restore-show 5b4216d0-4bed-460f-8501-1589e7b45e01
+------------------+------------------------------------------------------------------------------------------------------+
| Property | Value |
+------------------+------------------------------------------------------------------------------------------------------+
| created_at | 2019-09-24T12:44:38.000000 |
| description | - |
| error_msg | None |
| finished_at | 2019-09-24T12:46:07.000000 |
| host | Upstream2 |
| id | 5b4216d0-4bed-460f-8501-1589e7b45e01 |
| instances | [{"status": "available", "id": "b8506f04-1b99-4ca8-839b-6f5d2c20d9aa", "name": "temp", "metadata": |
| | {"instance_id": "c014a938-903d-43db-bfbb-ea4998ff1a0f", "production": "1", "config_drive": ""}}] |
| name | OneClick Restore |
| progress_msg | Restore from snapshot is complete |
| progress_percent | 100 |
| project_id | 8e16700ae3614da4ba80a4e57d60cdb9 |
| restore_options | {"description": "-", "oneclickrestore": true, "restore_type": "oneclick", "openstack": {"instances": |
| | [{"availability_zone": "US-West", "id": "c014a938-903d-43db-bfbb-ea4998ff1a0f", "name": "temp"}]}, |
| | "type": "openstack", "name": "OneClick Restore"} |
| restore_type | restore |
| size | 41126400 |
| snapshot_id | 5928554d-a882-4881-9a5c-90e834c071af |
| status | available |
| time_taken | 89 |
| updated_at | 2019-09-24T12:44:38.000000 |
| uploaded_size | 41126400 |
| user_id | d5fbd79f4e834f51bfec08be6d3b2ff2 |
| warning_msg | None |
| workload_id | 02b1aca2-c51a-454b-8c0f-99966314165e |
+------------------+------------------------------------------------------------------------------------------------------+# vi /etc/workloadmgr/workloadmgr.confvault_storage_nfs_export = <NFS_B1-IP/NFS-FQDN>:/<VOL-B1-Path>,<NFS_B2-IP/NFS-FQDN>:/<VOL—B2-Path>vault_storage_nfs_export = <NFS_B1-IP/NFS_B1-FQDN>:/<VOL-B1-Path># systemctl restart wlm-workloads# vi /etc/tvault-contego/tvault-contego.confvault_storage_nfs_export = <NFS_B1-IP/NFS-FQDN>:/<VOL-B1-Path>,<NFS_B2-IP/NFS-FQDN>:/<VOL—B2-Path>vault_storage_nfs_export = <NFS-IP/NFS-FQDN>:/<VOL-1-Path># systemctl restart tvault-contego# source {customer admin rc file}
# openstack role remove Admin --user <my_admin_user> --user-domain <admin_domain> --domain <target_domain>
# openstack role remove Admin --user <my_admin_user> --user-domain <admin_domain> --project <target_project> --project-domain <target_domain>
# openstack role remove <Backup Trustee Role> --user <my_admin_user> --user-domain <admin_domain> --project <destination_project> --project-domain <target_domain>

This page contains the API guide for various operations on Backup Targets and Backup Target Types.
{
"backup_target" :
{
"s3_endpoint_url": <s3 endpint url>,
"s3_bucket": <s3 bucket name>,
"filesystem_export": <filesystem export required for NFS type>,
"type": <Backup Target type Eg. nfs, s3>,
"is_default": <integer value 0|1 to specify if default or non-default>,
"btt_name": <Backup Target Type name, using filesystem_export if not provided>
"immutable": <integer value 0|1 to specify if s3 Backup Target has object locked enabled>
"metadata": <dictionary of key-value pair denotes metadata of backup target>
}
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:25:14 GMT'
'Content-Type': 'application/json'
'Content-Length': '430'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-5730cced-949b-4567-827e-8349a023e716'
{"backup_targets": {"id": "4720baae-5422-466d-a4b2-a4a2c94400f3", "type": "s3", "created_at": "2025-01-30T12:32:27.000000", "updated_at": "2025-01-30T12:32:27.000000", "version": "5.2.8.15", "filesystem_export": "cephs3.triliodata.demo/object-locked-cephs3-2", "filesystem_export_mount_path": "/var/trilio/triliovault-mounts/Y2VwaHMzLnRyaWxpb2RhdGEuZGVtby9vYmplY3QtbG9ja2VkLWNlcGhzMy0y", "is_default": false, "capacity": null, "used": null, "status": "offline", "backup_target_types": [{"created_at": "2025-01-30T12:32:27.000000", "updated_at": null, "version": "5.2.8.15", "user_id": "a62bf1546cdf4b02a3fc08b7aad79acb", "name": "cephs3.triliodata.demo/object-locked-cephs3-2", "description": null, "is_public": true, "is_default": false}], "backup_target_metadata": []}}HTTP/1.1 202 OK
'Server': 'nginx/1.20.1'
'Date': 'Thu, 30 Jan 2025 13:04:32 GMT'
'Content-Type': 'text/html; charset=UTF-8'
'Content-Length': '0'
'Connection': 'keep-alive'HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:15:16 GMT',
'Content-Type': 'application/json',
'Content-Length': '1808',
'Connection': 'keep-alive',
'X-Compute-Request-Id': 'req-739464e0-3b84-4e94-866a-f34476915a38'
{
"backup_targets": [
{
"id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"type": "nfs",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"nfs_export": "192.168.1.34:/mnt/tvault/162",
"nfs_export_mount_path": "/var/triliovault-mounts/L21udC90dmF1bHQvMTYy",
"is_default": true,
"capacity": "2.4 TB",
"used": "1.2 TB",
"status": "available",
"backup_target_types": [
{
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": null,
"version": "5.0.204",
"user_id": null,
"name": "nfs_1",
"description": null,
"is_public": true,
"is_default": true
}
]
},
{
"id": "2af5f2db-3267-453f-bc57-19884837e274",
"type": "s3",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"nfs_export": "https://cephs3.triliodata.demo/trilio-qamanual",
"nfs_export_mount_path": "/var/triliovault-mounts/Y2VwaHMzLnRyaWxpb2RhdGEuZGVtby90cmlsaW8tcWFtYW51YWw=",
"is_default": false,
"capacity": null,
"used": null,
"status": "offline",
"backup_target_types": [
{
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": null,
"version": "5.0.204",
"user_id": null,
"name": "s3_2",
"description": null,
"is_public": true,
"is_default": false
}
]
},
{
"id": "1fd7af34-d723-428d-90f5-35d31bf24884",
"type": "nfs",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"nfs_export": "192.168.1.34:/mnt/tvault/tvm",
"nfs_export_mount_path": "/var/triliovault-mounts/L21udC90dmF1bHQvdHZt",
"is_default": false,
"capacity": "2.4 TB",
"used": "1.2 TB",
"status": "available",
"backup_target_types": [
{
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": null,
"version": "5.0.204",
"user_id": null,
"name": "nfs_2",
"description": null,
"is_public": true,
"is_default": false
}
]
}
]
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:15:16 GMT',
'Content-Type': 'application/json',
'Content-Length': '1808',
'Connection': 'keep-alive',
'X-Compute-Request-Id': 'req-739464e0-3b84-4e94-866a-f34476915a38'
{
"backup_targets": [
{
"id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"type": "nfs",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"nfs_export": "192.168.1.34:/mnt/tvault/162",
"nfs_export_mount_path": "/var/triliovault-mounts/L21udC90dmF1bHQvMTYy",
"is_default": true,
"capacity": "2.4 TB",
"used": "1.2 TB",
"status": "available",
"backup_target_types": [
{
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": null,
"version": "5.0.204",
"user_id": null,
"name": "nfs_1",
"description": null,
"is_public": true,
"is_default": true
}
]
},
{
"id": "2af5f2db-3267-453f-bc57-19884837e274",
"type": "s3",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"nfs_export": "https://cephs3.triliodata.demo/trilio-qamanual",
"nfs_export_mount_path": "/var/triliovault-mounts/Y2VwaHMzLnRyaWxpb2RhdGEuZGVtby90cmlsaW8tcWFtYW51YWw=",
"is_default": false,
"capacity": null,
"used": null,
"status": "offline",
"backup_target_types": [
{
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": null,
"version": "5.0.204",
"user_id": null,
"name": "s3_2",
"description": null,
"is_public": true,
"is_default": false
}
]
},
{
"id": "1fd7af34-d723-428d-90f5-35d31bf24884",
"type": "nfs",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"nfs_export": "192.168.1.34:/mnt/tvault/tvm",
"nfs_export_mount_path": "/var/triliovault-mounts/L21udC90dmF1bHQvdHZt",
"is_default": false,
"capacity": "2.4 TB",
"used": "1.2 TB",
"status": "available",
"backup_target_types": [
{
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": null,
"version": "5.0.204",
"user_id": null,
"name": "nfs_2",
"description": null,
"is_public": true,
"is_default": false
}
]
}
]
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:21:14 GMT'
'Content-Type': 'application/json'
'Content-Length': '600'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-0b298919-f2e8-4c50-aa55-ce88b4569f2a'
{
"backup_targets": {
"id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"type": "nfs",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"nfs_export": "192.168.1.34:/mnt/tvault/162",
"nfs_export_mount_path": "/var/triliovault-mounts/L21udC90dmF1bHQvMTYy",
"is_default": true,
"capacity": "2.4 TB",
"used": "1.2 TB",
"status": "available",
"backup_target_types": [
{
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": null,
"version": "5.0.204",
"user_id": null,
"name": "nfs_1",
"description": null,
"is_public": true,
"is_default": true
}
]
}
}'x-compute-request-id': 'req-295c03f7-8dfb-4b72-a654-9f06a51d9b0c'
'content-type': 'application/json'
'content-length': '911'
'date': 'Tue, 04 Feb 2025 14:19:05 GMT'
{"backup_targets": {"id": "0dcc632a-aed7-45ab-a40e-7cc2deae3994", "type": "s3", "created_at": "2025-02-04T10:43:37.000000", "updated_at": "2025-02-04T10:43:37.000000", "version": "5.2.8.15", "filesystem_export": "cephs3.triliodata.demo/object-locked-cephs3-2", "filesystem_export_mount_path": "/var/trilio/triliovault-mounts/Y2VwaHMzLnRyaWxpb2RhdGEuZGVtby9vYmplY3QtbG9ja2VkLWNlcGhzMy0y", "is_default": true, "capacity": null, "used": null, "status": "offline", "backup_target_types": [{"created_at": "2025-02-04T10:43:37.000000", "updated_at": "2025-02-04T14:19:05.000000", "version": "5.2.8.15", "user_id": "a62bf1546cdf4b02a3fc08b7aad79acb", "name": "cephs3.triliodata.demo/object-locked-cephs3-2", "description": null, "is_public": true, "is_default": true}], "backup_target_metadata": [{"key": "bucket", "value": "s3-object-lock"}, {"key": "immutable", "value": "1"}, {"key": "object_lock", "value": "1"}]}}```
</details>
***
## Backup Target Types: <a href="#backup-target-types" id="backup-target-types"></a>
### **List Backup Target Types** 
Provides the list of all backup target types
 <mark style="color:green;">`GET`</mark> `https://<wlm_api_endpoint>/backup_target_types` 
#### Input Parameters:
Path:
<table><thead><tr><th width="219">Parameter Name</th><th>Description</th></tr></thead><tbody><tr><td>wlm_api_endpoint</td><td>The endpoint URL of the <code>Workloadmgr</code> service</td></tr></tbody></table>
Headers:
| Header Name | Value/Description |
| ----------------- | --------------------------------------------- |
| X-Auth-Project-Id | Project ID to run the authentication against |
| X-Auth-Token | Authentication token to use |
| Accept | `application/json` |
| User-Agent | `python-workloadmgrclient` |
<details>
<summary>Sample Response</summary>
```json5
HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:29:09 GMT'
'Content-Type': 'application/json'
'Content-Length': '1628'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-dd23df0a-37a6-4769-882a-40e67431d997'
{
"backup_target_types": [
{
"id": "13dd2bf2-12c5-4eb8-98a3-0f1dd9f8579f",
"backup_targets_id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": null,
"name": "nfs_1",
"is_default": true,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": []
},
{
"id": "51f11fc5-854b-4cb7-9b64-0a0ace33b0d5",
"backup_targets_id": "1fd7af34-d723-428d-90f5-35d31bf24884",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": null,
"name": "nfs_2",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": []
},
{
"id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"backup_targets_id": "2af5f2db-3267-453f-bc57-19884837e274",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": null,
"name": "s3_2",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": []
},
{
"id": "fdae1b10-9852-4c68-8879-64ead0aed31b",
"backup_targets_id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"created_at": "2024-03-13T10:25:14.000000",
"updated_at": "2024-03-13T10:25:14.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "BTT-name",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": [
{
"key": "nfs",
"value": "secondary"
}
]
}
]
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:29:09 GMT'
'Content-Type': 'application/json'
'Content-Length': '1628'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-dd23df0a-37a6-4769-882a-40e67431d997'
{
"backup_target_types": [
{
"id": "13dd2bf2-12c5-4eb8-98a3-0f1dd9f8579f",
"backup_targets_id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": null,
"name": "nfs_1",
"is_default": true,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": []
},
{
"id": "51f11fc5-854b-4cb7-9b64-0a0ace33b0d5",
"backup_targets_id": "1fd7af34-d723-428d-90f5-35d31bf24884",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": null,
"name": "nfs_2",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": []
},
{
"id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"backup_targets_id": "2af5f2db-3267-453f-bc57-19884837e274",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": null,
"name": "s3_2",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": []
},
{
"id": "fdae1b10-9852-4c68-8879-64ead0aed31b",
"backup_targets_id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"created_at": "2024-03-13T10:25:14.000000",
"updated_at": "2024-03-13T10:25:14.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "BTT-name",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": [
{
"key": "nfs",
"value": "secondary"
}
]
}
]
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:30:31 GMT'
'Content-Type': 'application/json'
'Content-Length': '406'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-f20d2e28-a083-47fe-8eda-a702ac484865'
{
"backup_target_types": {
"id": "13dd2bf2-12c5-4eb8-98a3-0f1dd9f8579f",
"backup_targets_id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"created_at": "2024-03-04T10:59:51.000000",
"updated_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": null,
"name": "nfs_1",
"is_default": true,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": []
}
}{
"backup_target_type" :
{
"name": <name of the backup target type>,
"description": <description of backup target type>,
"backup_targets_id": <ID of the backup target>,
"is_default": <integer value 0|1 to specify if default or non-default>,
"is_public": <integer value 0|1 to specify if public or non-public>,
"project_list": [
<list of project IDs on which backup target type will be assigned>
],
"metadata": [
<list of dictionaries of key-value pair >
{
"key":<meta-key>,
"value":<meta-value>
},
]
}
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:25:14 GMT'
'Content-Type': 'application/json'
'Content-Length': '430'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-5730cced-949b-4567-827e-8349a023e716'
{
"backup_target_types": {
"id": "fdae1b10-9852-4c68-8879-64ead0aed31b",
"backup_targets_id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"created_at": "2024-03-13T10:25:14.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "BTT-name",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": [
{
"key": "nfs",
"value": "primary"
}
]
}
}{
"backup_target_type" :
{
"name": <name of the backup target type>,
"description": <description of backup target type>,
"is_default": <integer value 0|1 to specify if default or non-default>,
"is_public": <integer value 0|1 to specify if public or non-public>,
"project_list": [
<list of project IDs on which backup target type will be assigned>
],
"purge_projects": True <if All the assigned projects need to be purged>,
"metadata": [
<list of dictionaries of key-value pair >
{
"key":<meta-key>,
"value":<meta-value>
},
],
"purge_metadata": True <if All the metadata needs to be purged>
}
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:27:49 GMT'
'Content-Type': 'application/json'
'Content-Length': '432'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-14b39336-bc84-414a-a128-1d71e7dfb3cf'
{
"backup_target_types": {
"id": "fdae1b10-9852-4c68-8879-64ead0aed31b",
"backup_targets_id": "b39847c8-bf65-4cec-9af6-dd65303ca485",
"created_at": "2024-03-13T10:25:14.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "BTT-name",
"is_default": false,
"description": null,
"is_public": true,
"backup_target_type_projects": [],
"backup_target_type_metadata": [
{
"key": "nfs",
"value": "secondary"
}
]
}
}{
"backup_target_type" :
{
"backup_target_type_id": <ID of the backup target type>,
"project_list": [
<list of project IDs on which backup target type will be assigned>
]
}
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:40:05 GMT'
'Content-Type': 'application/json'
'Content-Length': '921'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-3dd1a577-ef35-4997-83bc-b2e50afaea73'
{
"backup_target_types": {
"id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"backup_targets_id": "2af5f2db-3267-453f-bc57-19884837e274",
"created_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "s3_2",
"is_default": false,
"description": null,
"is_public": false,
"backup_target_type_projects": [
{
"created_at": "2024-03-13T10:39:14.000000",
"updated_at": null,
"version": "5.0.204",
"id": "5a14a688-e16f-45f9-91c2-6906fb200825",
"backup_target_types_id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"project_id": "fc439373e340459fb28202b2412e26c0"
},
{
"created_at": "2024-03-13T10:40:05.000000",
"updated_at": null,
"version": "5.0.204",
"id": "c0fe123e-e566-465a-acf2-56e2b27ae9b2",
"backup_target_types_id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"project_id": "0920f871077c4c079057ce940d8105a8"
}
],
"backup_target_type_metadata": [
{
"key": "dg1",
"value": "dg2"
}
]
}
}{
"backup_target_type" :
{
"backup_target_type_id": <ID of the backup target type>,
"project_list": [
<list of assigned project IDs that needs to be unassigned>
]
}
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:41:18 GMT'
'Content-Type': 'application/json'
'Content-Length': '671'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-9dfe92c3-5b82-4062-98b0-b00294147b02'
{
"backup_target_types": {
"id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"backup_targets_id": "2af5f2db-3267-453f-bc57-19884837e274",
"created_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "s3_2",
"is_default": false,
"description": null,
"is_public": false,
"backup_target_type_projects": [
{
"created_at": "2024-03-13T10:39:14.000000",
"updated_at": null,
"version": "5.0.204",
"id": "5a14a688-e16f-45f9-91c2-6906fb200825",
"backup_target_types_id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"project_id": "fc439373e340459fb28202b2412e26c0"
}
],
"backup_target_type_metadata": [
{
"key": "dg1",
"value": "dg2"
}
]
}
}{
"backup_target_type" :
{
"backup_target_type_id": <ID of the backup target type>,
"metadata": [
<list of dictionaries of key-value pair to be added >
{
"key":<meta-key>,
"value":<meta-value>
},
],
}
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:44:02 GMT'
'Content-Type': 'application/json'
'Content-Length': '671'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-870c776b-a160-41c0-82a9-d9e7131d77aa'
{
"backup_target_types": {
"id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"backup_targets_id": "2af5f2db-3267-453f-bc57-19884837e274",
"created_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "s3_2",
"is_default": false,
"description": null,
"is_public": false,
"backup_target_type_projects": [
{
"created_at": "2024-03-13T10:39:14.000000",
"updated_at": null,
"version": "5.0.204",
"id": "5a14a688-e16f-45f9-91c2-6906fb200825",
"backup_target_types_id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"project_id": "fc439373e340459fb28202b2412e26c0"
}
],
"backup_target_type_metadata": [
{
"key": "dg1",
"value": "dg2"
}
]
}
}{
"backup_target_type" :
{
"backup_target_type_id": <ID of the backup target type>,
"metadata": [
<list of dictionaries of key-value pair to be removed >
{
"key":<meta-key>,
"value":<meta-value>
},
],
}
}HTTP/1.1 200 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:45:12 GMT'
'Content-Type': 'application/json'
'Content-Length': '671'
'Connection': 'keep-alive'
'X-Compute-Request-Id': 'req-7eee91f0-d02f-41e1-afe2-ebcbe39aaa85'
{
"backup_target_types": {
"id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"backup_targets_id": "2af5f2db-3267-453f-bc57-19884837e274",
"created_at": "2024-03-04T10:59:51.000000",
"version": "5.0.204",
"user_id": "2b1189be3add4806bcb7e0c259b03597",
"name": "s3_2",
"is_default": false,
"description": null,
"is_public": false,
"backup_target_type_projects": [
{
"created_at": "2024-03-13T10:39:14.000000",
"updated_at": null,
"version": "5.0.204",
"id": "5a14a688-e16f-45f9-91c2-6906fb200825",
"backup_target_types_id": "c65625c1-50bf-4ab7-aa26-f625001e60f1",
"project_id": "fc439373e340459fb28202b2412e26c0"
}
],
"backup_target_type_metadata": [
{
"key": "dg1",
"value": "dg2"
}
]
}
}HTTP/1.1 202 OK
'Server': 'nginx/1.20.1'
'Date': 'Wed, 13 Mar 2024 10:33:02 GMT'
'Content-Type': 'text/html; charset=UTF-8'
'Content-Length': '0'
'Connection': 'keep-alive'HTTP/1.1 200 OK
x-compute-request-id: req-199f171f-b6fe-4172-8408-b069da3cfe19
content-type: application/json
content-length: 7615
date: Wed, 29 Jan 2025 09:39:36 GMT
{
"policy_list": [
{
"id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": "2025-01-29T09:45:47.000000",
"status": "available",
"name": "policy_api",
"description": "",
"metadata": [
],
"field_values": [
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "134a9886-1621-4c51-9951-456b8ed578af",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "start_time",
"value": "12:00 AM"
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "3de049cf-cb50-4c7f-82ff-88b5c256251f",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "daily",
"value": {
"backup_time": "['01:00']",
"retention": 7,
"snapshot_type": "incremental"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "5e7146cc-4bdf-4b69-8c0d-77146b9b432c",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "yearly",
"value": {
"backup_month": "['mar']",
"retention": 1,
"snapshot_type": "full"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "6ecaea3d-206d-4083-8d00-8fdea340d198",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "manual",
"value": {
"retention": 30
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "7f85955b-2079-4408-95a4-339e235526a9",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "retentionmanual",
"value": {
"retentionmanual": 30
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "b87eb463-2ed1-4869-92bc-256d09767d4d",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "monthly",
"value": {
"month_backup_day": "['3']",
"retention": 12,
"snapshot_type": "full"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "ce970c60-b38d-4b6a-82d6-a2d1b9948947",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "hourly",
"value": {
"interval": "1",
"retention": 3,
"snapshot_type": "incremental"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "fadef33d-9565-47f2-8180-37fadd967203",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "weekly",
"value": {
"backup_day": "['mon']",
"retention": 7,
"snapshot_type": "full"
}
}
]
}
]
}HTTP/1.1 200 OK
x-compute-request-id: req-d4ffd8c4-5f20-4b74-bba9-9243964b0a61
content-type: application/json
content-length: 3237
date: Wed, 29 Jan 2025 09:54:09 GMT
{
"policy": {
"id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": "2025-01-29T09:45:47.000000",
"user_id": "6bbb210a29a043af86b7b0c667747187",
"project_id": "dee550d3df5b497ca2e05044616bc8b1",
"status": "available",
"name": "policy_api",
"description": "",
"field_values": [
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "134a9886-1621-4c51-9951-456b8ed578af",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "start_time",
"value": "12:00 AM"
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "3de049cf-cb50-4c7f-82ff-88b5c256251f",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "daily",
"value": {
"backup_time": "['01:00']",
"retention": 7,
"snapshot_type": "incremental"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "5e7146cc-4bdf-4b69-8c0d-77146b9b432c",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "yearly",
"value": {
"backup_month": "['mar']",
"retention": 1,
"snapshot_type": "full"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "6ecaea3d-206d-4083-8d00-8fdea340d198",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "manual",
"value": {
"retention": 30
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "7f85955b-2079-4408-95a4-339e235526a9",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "retentionmanual",
"value": {
"retentionmanual": 30
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "b87eb463-2ed1-4869-92bc-256d09767d4d",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "monthly",
"value": {
"month_backup_day": "['3']",
"retention": 12,
"snapshot_type": "full"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "ce970c60-b38d-4b6a-82d6-a2d1b9948947",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "hourly",
"value": {
"interval": "1",
"retention": 3,
"snapshot_type": "incremental"
}
},
{
"created_at": "2025-01-29T09:45:47.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "fadef33d-9565-47f2-8180-37fadd967203",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"policy_field_name": "weekly",
"value": {
"backup_day": "['mon']",
"retention": 7,
"snapshot_type": "full"
}
}
],
"metadata": [
],
"policy_assignments": [
{
"created_at": "2025-01-29T09:49:41.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "0d3f0678-4cce-4e6c-9b47-9a4a475a9b1d",
"policy_id": "d29cb349-1953-405d-8f16-301da9c7bc84",
"project_id": "dee550d3df5b497ca2e05044616bc8b1",
"policy_name": "policy_api",
"project_name": "cloudproject"
}
]
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Tue, 17 Nov 2020 09:14:01 GMT
Content-Type: application/json
Content-Length: 338
Connection: keep-alive
X-Compute-Request-Id: req-57175488-d267-4dcb-90b5-f239d8b02fe2
{
"policies":[
{
"created_at":"2020-10-29T15:39:13.000000",
"updated_at":null,
"deleted_at":null,
"deleted":false,
"version":"4.0.115",
"id":"8b4a6236-63f1-4e2d-b8d1-23b37f4b4346",
"policy_id":"b79aa5f3-405b-4da4-96e2-893abf7cb5fd",
"project_id":"c76b3355a164498aa95ddbc960adc238",
"policy_name":"Gold",
"project_name":"robert"
}
]
}{
"workload_policy": {
"display_name": "Api_policy_test",
"display_description": "No description",
"field_values": {
"start_time": "10:00 AM",
"hourly": {
"interval": "1",
"retention": "2",
"snapshot_type": "incremental"
},
"daily": {
"backup_time": [
"11:40"
],
"retention": "2",
"snapshot_type": "incremental"
},
"weekly": {
"backup_day": [
"fri"
],
"retention": "2",
"snapshot_type": "incremental"
},
"monthly": {
"month_backup_day": [
"1"
],
"snapshot_type": "full"
},
"yearly": {
"backup_month": [
"mar"
],
"retention": "1",
"snapshot_type": "full"
},
"manual": {
"retention": "4",
},
"retentionmanual": {
"retentionmanual": "5",
},
},
"metadata": {
}
}
}HTTP/1.1 200 OK
x-compute-request-id: req-538517fb-aca0-4abc-9dc7-ef1ee2af1cd7
content-type: application/json
content-length: 2943
date: Wed, 29 Jan 2025 10:23:38 GMT
{
"policy": {
"id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": "2025-01-29T10:23:38.000000",
"status": "available",
"name": "Api_policy_test",
"description": "No description",
"metadata": [
],
"field_values": [
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "0492f64e-175b-4f8f-91da-e5986d3b9118",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "retentionmanual",
"value": "V4\np0\n."
},
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "273f07f1-fa68-4883-aec9-8873095d9e0e",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "daily",
"value": "(dp0\nVbackup_time\np1\n(lp2\nV11:40\np3\nasVretention\np4\nV2\np5\nsVsnapshot_type\np6\nVincremental\np7\ns."
},
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "31432273-d82c-4ea7-802b-353f818b6926",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "yearly",
"value": "(dp0\nVbackup_month\np1\n(lp2\nVmar\np3\nasVretention\np4\nV1\np5\nsVsnapshot_type\np6\nVfull\np7\ns."
},
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "4ef22bcd-4b7a-4dac-9c02-1b41ec443778",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "start_time",
"value": "V10:00 AM\np0\n."
},
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "58a13a57-08c3-457a-a912-0217c58ac351",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "hourly",
"value": "(dp0\nVinterval\np1\nV1\np2\nsVretention\np3\nV2\np4\nsVsnapshot_type\np5\nVincremental\np6\ns."
},
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "979fc748-9434-46a8-8613-24524954ba6e",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "weekly",
"value": "(dp0\nVbackup_day\np1\n(lp2\nVfri\np3\nasVretention\np4\nV2\np5\nsVsnapshot_type\np6\nVincremental\np7\ns."
},
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "cad874e6-9960-47a8-a904-25161c7704ee",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "monthly",
"value": "(dp0\nVmonth_backup_day\np1\n(lp2\nV1\np3\nasVsnapshot_type\np4\nVfull\np5\ns."
},
{
"created_at": "2025-01-29T10:23:38.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "f1670b32-0a3b-4876-a51c-14c24f999eab",
"policy_id": "43885a4d-f9c6-42fd-a8c4-2d1816dbd88d",
"policy_field_name": "manual",
"value": "V4\np0\n."
}
]
}
}HTTP/1.1 200 OK
x-compute-request-id: req-9c7473ce-468c-4688-b061-a761258f7c5e
content-type: application/json
content-length: 3013
date: Wed, 29 Jan 2025 10:37:40 GMT
{
"policy": {
"id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:21:11.000000",
"status": "available",
"name": "Api_update_policy",
"description": "No description",
"metadata": [
],
"field_values": [
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:36:59.000000",
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "36c76a9b-3599-409c-b226-c59a981d5693",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "hourly",
"value": "(dp0\nVinterval\np1\nV2\np2\nsVretention\np3\ng2\nsVsnapshot_type\np4\nVincremental\np5\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "44633679-9fdf-4fe0-84e9-0e364cee8d02",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "monthly",
"value": "(dp0\nVmonth_backup_day\np1\n(lp2\nV1\np3\nasVsnapshot_type\np4\nVfull\np5\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:36:59.000000",
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "8ddfa5a3-ae39-4430-a474-9074ea3fefbc",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "daily",
"value": "(dp0\nVbackup_time\np1\n(lp2\nV13:00\np3\nasVretention\np4\nV2\np5\nsVsnapshot_type\np6\nVincremental\np7\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "a0944765-93bd-487a-9dd2-6a756fab1d2f",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "weekly",
"value": "(dp0\nVbackup_day\np1\n(lp2\nVfri\np3\nasVretention\np4\nV2\np5\nsVsnapshot_type\np6\nVincremental\np7\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:36:59.000000",
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "ac6267c5-1923-4d24-b911-61427acfdc8b",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "start_time",
"value": "V11:00 AM\np0\n."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "b945acdd-3780-4494-b63c-42474ea65c24",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "manual",
"value": "V4\np0\n."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "bad15c15-5eb2-4b4b-8597-639a8c27b947",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "yearly",
"value": "(dp0\nVbackup_month\np1\n(lp2\nVmar\np3\nasVretention\np4\nV1\np5\nsVsnapshot_type\np6\nVfull\np7\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "cf900224-2b21-4b29-9118-99ec3151d891",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "retentionmanual",
"value": "V4\np0\n."
}
]
}
}HTTP/1.1 200 OK
x-compute-request-id: req-a8569cd2-05a2-45ce-bae5-41a214759ff8
content-type: application/json
content-length: 3831
date: Wed, 29 Jan 2025 10:44:56 GMT
{
"policy": {
"id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:21:11.000000",
"user_id": "6bbb210a29a043af86b7b0c667747187",
"project_id": "dee550d3df5b497ca2e05044616bc8b1",
"status": "available",
"name": "Api_update_policy",
"description": "No description",
"field_values": [
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:36:59.000000",
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "36c76a9b-3599-409c-b226-c59a981d5693",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "hourly",
"value": "(dp0\nVinterval\np1\nV2\np2\nsVretention\np3\ng2\nsVsnapshot_type\np4\nVincremental\np5\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "44633679-9fdf-4fe0-84e9-0e364cee8d02",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "monthly",
"value": "(dp0\nVmonth_backup_day\np1\n(lp2\nV1\np3\nasVsnapshot_type\np4\nVfull\np5\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:36:59.000000",
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "8ddfa5a3-ae39-4430-a474-9074ea3fefbc",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "daily",
"value": "(dp0\nVbackup_time\np1\n(lp2\nV13:00\np3\nasVretention\np4\nV2\np5\nsVsnapshot_type\np6\nVincremental\np7\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "a0944765-93bd-487a-9dd2-6a756fab1d2f",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "weekly",
"value": "(dp0\nVbackup_day\np1\n(lp2\nVfri\np3\nasVretention\np4\nV2\np5\nsVsnapshot_type\np6\nVincremental\np7\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": "2025-01-29T10:36:59.000000",
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "ac6267c5-1923-4d24-b911-61427acfdc8b",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "start_time",
"value": "V11:00 AM\np0\n."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "b945acdd-3780-4494-b63c-42474ea65c24",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "manual",
"value": "V4\np0\n."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "bad15c15-5eb2-4b4b-8597-639a8c27b947",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "yearly",
"value": "(dp0\nVbackup_month\np1\n(lp2\nVmar\np3\nasVretention\np4\nV1\np5\nsVsnapshot_type\np6\nVfull\np7\ns."
},
{
"created_at": "2025-01-29T10:21:11.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "cf900224-2b21-4b29-9118-99ec3151d891",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"policy_field_name": "retentionmanual",
"value": "V4\np0\n."
}
],
"metadata": [
],
"policy_assignments": [
{
"created_at": "2025-01-29T10:44:56.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "2177bf6e-116f-45fc-986b-3d20f7084fc7",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"project_id": "c532c5b669304d20bbe9e2157986757c",
"policy_name": "Api_update_policy",
"project_name": "AP_test"
},
{
"created_at": "2025-01-29T10:43:28.000000",
"updated_at": null,
"deleted_at": null,
"deleted": false,
"version": "6.0.20",
"id": "5b3b445b-e4dd-4381-84cd-c90bdda0b7e7",
"policy_id": "d3b638c6-d26e-4949-8493-18a4df3123bf",
"project_id": "dee550d3df5b497ca2e05044616bc8b1",
"policy_name": "Api_update_policy",
"project_name": "cloudproject"
}
]
},
"failed_ids": [
]
}HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Tue, 17 Nov 2020 09:56:03 GMT
Content-Type: text/html; charset=UTF-8
Content-Length: 0
Connection: keep-alive{
"workload_policy":{
"field_values":{
"start_time": "<Time format: HH:MM AM/PM>",
"hourly": {
"interval": "<1, 2, 3, 4, 6, 12, 24 hours>",
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"daily": {
"backup_time": [
"<HH:MM 24-hour format>"
],
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"weekly": {
"backup_day": [
"<mon, tue, wed, thu, fri, sat, sun>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"monthly": {
"month_backup_day": [
"<Integer: day of the month (1-31)>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"yearly": {
"backup_month": [
"<jan, feb, mar, ... dec>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"manual": {
"retention": "<Integer>"
},
"retentionmanual": {
"retentionmanual": "<Integer>"
},
}
},
"display_name":"<String>",
"display_description":"<String>",
"metadata":{
<key>:<value>
}
}
}{
"workload_policy":{
"field_values":{
"start_time": "<Time format: HH:MM AM/PM>",
"hourly": {
"interval": "<1, 2, 3, 4, 6, 12, 24 hours>",
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"daily": {
"backup_time": [
"<HH:MM 24-hour format>"
],
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"weekly": {
"backup_day": [
"<mon, tue, wed, thu, fri, sat, sun>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"monthly": {
"month_backup_day": [
"<Integer: day of the month (1-31)>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"yearly": {
"backup_month": [
"<jan, feb, mar, ... dec>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"manual": {
"retention": "<Integer>",
},
"retentionmanual": {
"retentionmanual": "<Integer>"
},
},
"display_name":"<String>",
"display_description":"<String>",
"metadata":{
<key>:<value>
}
}
}{
"policy":{
"remove_projects":[
"<project_id>"
],
"add_projects":[
"<project_id>",
]
}
}HTTP/1.1 200 OK
Server: nginx/1.16.1
Date: Thu, 29 Oct 2020 14:55:40 GMT
Content-Type: application/json
Content-Length: 3480
Connection: keep-alive
X-Compute-Request-Id: req-a2e49b7e-ce0f-4dcb-9e61-c5a4756d9948
{
"workloads":[
{
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":"adfa32d7746a4341b27377d6f7c61adb",
"id":"8ee7a61d-a051-44a7-b633-b495e6f8fc1d",
"name":"worklaod1",
"snapshots_info":"",
"description":"no-description",
"workload_type_id":"f82ce76f-17fe-438b-aa37-7a023058e50d",
"status":"available",
"created_at":"2020-10-26T12:07:01.000000",
"updated_at":"2020-10-29T12:22:26.000000",
"scheduler_trust":null,
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/4dfe98a43bfa404785a812020066b4d6/workloads/8ee7a61d-a051-44a7-b633-b495e6f8fc1d"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/4dfe98a43bfa404785a812020066b4d6/workloads/8ee7a61d-a051-44a7-b633-b495e6f8fc1d"
}
]
},
{
"project_id":"4dfe98a43bfa404785a812020066b4d6",
"user_id":"adfa32d7746a4341b27377d6f7c61adb",
"id":"a90d002a-85e4-44d1-96ac-7ffc5d0a5a84",
"name":"workload2",
"snapshots_info":"",
"description":"no-description",
"workload_type_id":"f82ce76f-17fe-438b-aa37-7a023058e50d",
"status":"available",
"created_at":"2020-10-20T09:51:15.000000",
"updated_at":"2020-10-29T10:03:33.000000",
"scheduler_trust":null,
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/4dfe98a43bfa404785a812020066b4d6/workloads/a90d002a-85e4-44d1-96ac-7ffc5d0a5a84"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/4dfe98a43bfa404785a812020066b4d6/workloads/a90d002a-85e4-44d1-96ac-7ffc5d0a5a84"
}
]
}
]
}hourly
daily
weekly
monthly
yearly{
"workload": {
"name": "workload_cli",
"description": null,
"source_platform": null,
"instances": [
{
"instance-id": "14309d25-23dd-47da-bf60-febc8c25b636"
}
],
"jobschedule": {
"start_date": "01/28/2025",
"enabled": true,
"start_time": "02:15 PM",
"timezone": "Etc/UTC",
"hourly": {
"interval": 1,
"retention": 2,
"snapshot_type": "incremental"
},
"daily": {
"backup_time": [
"14:15"
],
"retention": 2,
"snapshot_type": "incremental"
},
"weekly": {
"backup_day": [
"wed"
],
"retention": 2,
"snapshot_type": "full"
},
"monthly": {
"month_backup_day": [
20
],
"retention": 2,
"snapshot_type": "full"
},
"yearly": {
"backup_month": [
"mar"
],
"retention": 1,
"snapshot_type": "full"
},
"manual": {
"retention": 21
}
},
"metadata": {},
"encryption": false,
"secret_uuid": null,
"backup_target_types": "6ba9fd82-151b-4f5a-bdbf-44504c2e210e"
}
}
HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Thu, 29 Oct 2020 15:42:02 GMT
Content-Type: application/json
Content-Length: 703
Connection: keep-alive
X-Compute-Request-Id: req-443b9dea-36e6-4721-a11b-4dce3c651ede
{
"workload":{
"project_id":"c76b3355a164498aa95ddbc960adc238",
"user_id":"ccddc7e7a015487fa02920f4d4979779",
"id":"c4e3aeeb-7d87-4c49-99ed-677e51ba715e",
"name":"API created",
"snapshots_info":"",
"description":"API description",
"workload_type_id":"f82ce76f-17fe-438b-aa37-7a023058e50d",
"status":"creating",
"created_at":"2020-10-29T15:42:01.000000",
"updated_at":"2020-10-29T15:42:01.000000",
"scheduler_trust":null,
"links":[
{
"rel":"self",
"href":"http://wlm_backend/v1/c76b3355a164498aa95ddbc960adc238/workloads/c4e3aeeb-7d87-4c49-99ed-677e51ba715e"
},
{
"rel":"bookmark",
"href":"http://wlm_backend/c76b3355a164498aa95ddbc960adc238/workloads/c4e3aeeb-7d87-4c49-99ed-677e51ba715e"
}
]
}
}HTTP/1.1 200 OK
x-compute-request-id: req-3bd5dd1e-3064-4859-a530-7a5bf9f4278f
content-type: application/json
content-length: 1941
date: Wed, 29 Jan 2025 06:28:36 GMT
{
"workload": {
"created_at": "2025-01-28T12:23:49.000000",
"updated_at": "2025-01-28T14:15:06.000000",
"id": "4ddf0e47-618d-47d1-9f2f-48f342e1d9af",
"encryption": false,
"secret_uuid": null,
"user_id": "6bbb210a29a043af86b7b0c667747187",
"project_id": "dee550d3df5b497ca2e05044616bc8b1",
"availability_zone": "nova",
"workload_type_id": "f82ce76f-17fe-438b-aa37-7a023058e50d",
"name": "workload_API",
"description": "no-description",
"interval": null,
"storage_usage": {
"usage": 0,
"full": {
"snap_count": 0,
"usage": 0
},
"incremental": {
"snap_count": 1,
"usage": 0
}
},
"instances": [
{
"id": "14309d25-23dd-47da-bf60-febc8c25b636",
"name": "PM",
"metadata": {}
}
],
"metadata": {
"hostnames": "[]",
"preferredgroup": "[]",
"workload_approx_backup_size": "2.1",
"backup_media_target": "192.168.1.34:/mnt/tvault/42436",
"backup_target_types": "nfs_1",
"backup_target_type": "nfs_1"
},
"jobschedule": {
"start_date": "01/28/2025",
"enabled": true,
"start_time": "02:15 PM",
"hourly": {
"interval": "1",
"retention": "2",
"snapshot_type": "incremental"
},
"daily": {
"backup_time": ["14:15"],
"retention": "2",
"snapshot_type": "incremental"
},
"weekly": {
"backup_day": ["wed"],
"retention": "2",
"snapshot_type": "full"
},
"monthly": {
"month_backup_day": ["20"],
"retention": "2",
"snapshot_type": "full"
},
"yearly": {
"backup_month": ["mar"],
"retention": "1",
"snapshot_type": "full"
},
"manual": {
"retention": "21"
},
"retentionmanual": {
"retentionmanual": "5"
},
"timezone": "UTC",
"global_jobscheduler": true,
"nextrun": 2783.561769
},
"status": "locked",
"error_msg": null,
"links": [
{
"rel": "self",
"href": "http://kolla-external-wallaby-dev4.triliodata.demo:8781/v1/dee550d3df5b497ca2e05044616bc8b1/workloads/4ddf0e47-618d-47d1-9f2f-48f342e1d9af"
},
{
"rel": "bookmark",
"href": "http://kolla-external-wallaby-dev4.triliodata.demo:8781/dee550d3df5b497ca2e05044616bc8b1/workloads/4ddf0e47-618d-47d1-9f2f-48f342e1d9af"
}
],
"scheduler_trust": null,
"policy_id": null
}
}
hourly
daily
weekly
monthly
yearlyHTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Mon, 02 Nov 2020 12:31:42 GMT
Content-Type: application/json
Content-Length: 0
Connection: keep-alive
X-Compute-Request-Id: req-674a5d71-4aeb-4f99-90ce-7e8d3158d137HTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Mon, 02 Nov 2020 13:31:00 GMT
Content-Type: text/html; charset=UTF-8
Content-Length: 0
Connection: keep-aliveHTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Mon, 02 Nov 2020 13:41:55 GMT
Content-Type: text/html; charset=UTF-8
Content-Length: 0
Connection: keep-aliveHTTP/1.1 202 Accepted
Server: nginx/1.16.1
Date: Mon, 02 Nov 2020 13:52:30 GMT
Content-Type: text/html; charset=UTF-8
Content-Length: 0
Connection: keep-alive{
"workload": {
"name": "<name of the Workload>",
"description": "<description of workload>",
"workload_type_id": "<ID of the chosen Workload Type>",
"source_platform": "openstack",
"instances": [
{
"instance-id": "<Instance ID>"
},
{
"instance-id": "<Instance ID>"
}
],
"jobschedule": {
"timezone": "<timezone>",
"start_date": "<Date format: MM/DD/YYYY>",
"end_date": "<Date format: MM/DD/YYYY>",
"start_time": "<Time format: HH:MM AM/PM>",
"enabled": "<True/False>",
"hourly": {
"interval": "<1, 2, 3, 4, 6, 12, 24 hours>",
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"daily": {
"backup_time": [
"<HH:MM 24-hour format>"
],
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"weekly": {
"backup_day": [
"<mon, tue, wed, thu, fri, sat, sun>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"monthly": {
"month_backup_day": [
"<Integer: day of the month (1-31)>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"yearly": {
"backup_month": [
"<jan, feb, mar, ... dec>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"manual": {
"retention": "<Integer>"
},
"retentionmanual": {
"retentionmanual": "<Integer>"
},
},
"metadata": {
"<key>": "<value>",
"policy_id": "<policy_id>"
},
"backup_target_types": "<backup_target_type_id>"
}
}
{
"workload": {
"name": "<name of the Workload>",
"description": "<description of workload>",
"instances": [
{
"instance-id": "<Instance ID>"
},
{
"instance-id": "<Instance ID>"
}
],
"jobschedule": {
"timezone": "<timezone>",
"start_date": "<Date format: MM/DD/YYYY>",
"end_date": "<Date format: MM/DD/YYYY>",
"start_time": "<Time format: HH:MM AM/PM>",
"enabled": "<True/False>",
"hourly": {
"interval": "<1, 2, 3, 4, 6, 12, 24 hours>",
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"daily": {
"backup_time": [
"<HH:MM 24-hour format>"
],
"retention": "<Integer>",
"snapshot_type": "incremental/full"
},
"weekly": {
"backup_day": [
"<mon, tue, wed, thu, fri, sat, sun>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"monthly": {
"month_backup_day": [
"<Integer: day of the month (1-31)>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"yearly": {
"backup_month": [
"<jan, feb, mar, ... dec>"
],
"retention": "<Integer>",
"snapshot_type": "full"
},
"manual": {
"retention": "<Integer>"
},
"retentionmanual": {
"retentionmanual": "<Integer>"
},
},
"metadata": {
"<key>": "<value>",
"policy_id": "<policy_id>"
},
}
}