[Pacemaker] More issues in deleting cloned resources

K Mehta kiranmehta1981 at gmail.com
Tue May 20 15:36:51 UTC 2014


[root at vsan-test2 ~]# rpm -qa | grep pcs
pcs-0.9.26-10.el6.noarch
[root at vsan-test2 ~]# rpm -qa | grep ccs
ccs-0.16.2-63.el6.x86_64
[root at vsan-test2 ~]# rpm -qa | grep pace
pacemaker-libs-1.1.8-7.el6.x86_64
pacemaker-cli-1.1.8-7.el6.x86_64
pacemaker-1.1.8-7.el6.x86_64
pacemaker-cluster-libs-1.1.8-7.el6.x86_64
[root at vsan-test2 ~]# rpm -qa | grep coro
corosync-1.4.1-15.el6.x86_64
corosynclib-1.4.1-15.el6.x86_64
[root at vsan-test2 ~]# rpm -qa | grep cman
cman-3.0.12.1-49.el6.x86_64
[root at vsan-test2 ~]# cat /etc/redhat-release
Red Hat Enterprise Linux Server release 6.4 (Santiago)
[root at vsan-test2 ~]# uname -a
Linux vsan-test2 2.6.32-358.el6.x86_64 #1 SMP Tue Jan 29 11:47:41 EST 2013
x86_64 x86_64 x86_64 GNU/Linux

[root at vsan-test2 ~]# pcs status
Last updated: Tue May 20 08:30:47 2014
Last change: Tue May 20 08:30:43 2014 via cibadmin on vsan-test2
Stack: cman
Current DC: vsan-test2 - partition with quorum
Version: 1.1.8-7.el6-394e906
2 Nodes configured, unknown expected votes
2 Resources configured.


Online: [ vsan-test1 vsan-test2 ]

Full list of resources:

 Master/Slave Set: ms-3e6a5f58-d95e-461f-a376-a5064cecf144
[vha-3e6a5f58-d95e-461f-a376-a5064cecf144]
     Masters: [ vsan-test1 ]
     Slaves: [ vsan-test2 ]


[root at vsan-test2 ~]# pcs config
Corosync Nodes:

Pacemaker Nodes:
 vsan-test1 vsan-test2

Resources:

Location Constraints:
  Resource: vha-3e6a5f58-d95e-461f-a376-a5064cecf144
    Enabled on: vsan-test2
    Enabled on: vsan-test1
  Resource: ms-3e6a5f58-d95e-461f-a376-a5064cecf144
    Enabled on: vsan-test2
    Enabled on: vsan-test1
Ordering Constraints:
Colocation Constraints:

Cluster Properties:
 dc-version: 1.1.8-7.el6-394e906
 cluster-infrastructure: cman
 last-lrm-refresh: 1400492826
 stonith-enabled: false
 no-quorum-policy: ignore




[root at vsan-test2 ~]# pcs resource delete
ms-3e6a5f58-d95e-461f-a376-a5064cecf144
ERROR: Unable to update cib
Call cib_replace failed (-1003): Update does not conform to the configured
schema
<cib admin_epoch="0" cib-last-written="Tue May 20 08:30:43 2014"
crm_feature_set="3.0.7" dc-uuid="vsan-test2" epoch="7893" have-quorum="1"
num_updates="1" update-client="cibadmin" update-origin="vsan-test2"
validate-with="pacemaker-1.2">
  <configuration>
    <crm_config>
      <cluster_property_set id="cib-bootstrap-options">
        <nvpair id="cib-bootstrap-options-dc-version" name="dc-version"
value="1.1.8-7.el6-394e906"/>
        <nvpair id="cib-bootstrap-options-cluster-infrastructure"
name="cluster-infrastructure" value="cman"/>
        <nvpair id="cib-bootstrap-options-last-lrm-refresh"
name="last-lrm-refresh" value="1400492826"/>
        <nvpair id="cib-bootstrap-options-stonith-enabled"
name="stonith-enabled" value="false"/>
        <nvpair id="cib-bootstrap-options-no-quorum-policy"
name="no-quorum-policy" value="ignore"/>
      </cluster_property_set>
    </crm_config>
    <nodes>
      <node id="vsan-test1" uname="vsan-test1"/>
      <node id="vsan-test2" uname="vsan-test2"/>
    </nodes>
    <resources/>
    <constraints>
      <rsc_location
id="location-vha-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test2-INFINITY"
node="vsan-test2" rsc="vha-3e6a5f58-d95e-461f-a376-a5064cecf144"
score="INFINITY"/>
      <rsc_location
id="location-vha-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test1-INFINITY"
node="vsan-test1" rsc="vha-3e6a5f58-d95e-461f-a376-a5064cecf144"
score="INFINITY"/>
      <rsc_location
id="location-ms-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test2-INFINITY"
node="vsan-test2" rsc="ms-3e6a5f58-d95e-461f-a376-a5064cecf144"
score="INFINITY"/>
      <rsc_location
id="location-ms-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test1-INFINITY"
node="vsan-test1" rsc="ms-3e6a5f58-d95e-461f-a376-a5064cecf144"
score="INFINITY"/>
    </constraints>
    <rsc_defaults>
      <meta_attributes id="rsc_defaults-options">
        <nvpair id="rsc_defaults-options-resource-stickiness"
name="resource-stickiness" value="100"/>
      </meta_attributes>
    </rsc_defaults>
  </configuration>
  <status>
    <node_state crm-debug-origin="do_update_resource" crmd="online"
expected="member" id="vsan-test2" in_ccm="true" join="member"
uname="vsan-test2">
      <transient_attributes id="vsan-test2">
        <instance_attributes id="status-vsan-test2">
          <nvpair id="status-vsan-test2-probe_complete"
name="probe_complete" value="true"/>
          <nvpair
id="status-vsan-test2-master-vha-3e6a5f58-d95e-461f-a376-a5064cecf144"
name="master-vha-3e6a5f58-d95e-461f-a376-a5064cecf144" value="3"/>
        </instance_attributes>
      </transient_attributes>
      <lrm id="vsan-test2">
        <lrm_resources>
          <lrm_resource id="vha-3e6a5f58-d95e-461f-a376-a5064cecf144"
type="vgc-cm-agent.ocf" class="ocf" provider="heartbeat">
            <lrm_rsc_op
id="vha-3e6a5f58-d95e-461f-a376-a5064cecf144_last_0"
operation_key="vha-3e6a5f58-d95e-461f-a376-a5064cecf144_start_0"
operation="start" crm-debug-origin="do_update_resource"
crm_feature_set="3.0.7"
transition-key="9:1:0:5916460b-c0c3-4bd0-9e9a-845dc8745f2c"
transition-magic="0:0;9:1:0:5916460b-c0c3-4bd0-9e9a-845dc8745f2c"
call-id="9" rc-code="0" op-status="0" interval="0" last-run="1400599846"
last-rc-change="0" exec-time="108" queue-time="0"
op-digest="7bb186e694658d06bedbfcf9fb962098"/>
            <lrm_rsc_op
id="vha-3e6a5f58-d95e-461f-a376-a5064cecf144_monitor_31000"
operation_key="vha-3e6a5f58-d95e-461f-a376-a5064cecf144_monitor_31000"
operation="monitor" crm-debug-origin="do_update_resource"
crm_feature_set="3.0.7"
transition-key="7:2:0:5916460b-c0c3-4bd0-9e9a-845dc8745f2c"
transition-magic="0:0;7:2:0:5916460b-c0c3-4bd0-9e9a-845dc8745f2c"
call-id="12" rc-code="0" op-status="0" interval="31000" last-rc-change="0"
exec-time="57" queue-time="0" op-digest="e3a7ce7334d52de15f8b2480d2d72ad3"/>
          </lrm_resource>
        </lrm_resources>
      </lrm>
    </node_state>
    <node_state crm-debug-origin="do_update_resource" crmd="online"
expected="member" id="vsan-test1" in_ccm="true" join="member"
uname="vsan-test1">
      <transient_attributes id="vsan-test1">
        <instance_attributes id="status-vsan-test1">
          <nvpair id="status-vsan-test1-probe_complete"
name="probe_complete" value="true"/>
          <nvpair
id="status-vsan-test1-master-vha-3e6a5f58-d95e-461f-a376-a5064cecf144"
name="master-vha-3e6a5f58-d95e-461f-a376-a5064cecf144" value="4"/>
        </instance_attributes>
      </transient_attributes>
      <lrm id="vsan-test1">
        <lrm_resources>
          <lrm_resource id="vha-3e6a5f58-d95e-461f-a376-a5064cecf144"
type="vgc-cm-agent.ocf" class="ocf" provider="heartbeat">
            <lrm_rsc_op
id="vha-3e6a5f58-d95e-461f-a376-a5064cecf144_last_0"
operation_key="vha-3e6a5f58-d95e-461f-a376-a5064cecf144_promote_0"
operation="promote" crm-debug-origin="do_update_resource"
crm_feature_set="3.0.7"
transition-key="10:2:0:5916460b-c0c3-4bd0-9e9a-845dc8745f2c"
transition-magic="0:0;10:2:0:5916460b-c0c3-4bd0-9e9a-845dc8745f2c"
call-id="12" rc-code="0" op-status="0" interval="0" last-run="1400599846"
last-rc-change="0" exec-time="171" queue-time="0"
op-digest="7bb186e694658d06bedbfcf9fb962098"/>
          </lrm_resource>
        </lrm_resources>
      </lrm>
    </node_state>
  </status>
</cib>



[root at vsan-test2 ~]# pcs status
Last updated: Tue May 20 08:32:42 2014
Last change: Tue May 20 08:30:43 2014 via cibadmin on vsan-test2
Stack: cman
Current DC: vsan-test2 - partition with quorum
Version: 1.1.8-7.el6-394e906
2 Nodes configured, unknown expected votes
2 Resources configured.


Online: [ vsan-test1 vsan-test2 ]

Full list of resources:

 Master/Slave Set: ms-3e6a5f58-d95e-461f-a376-a5064cecf144
[vha-3e6a5f58-d95e-461f-a376-a5064cecf144]
     Masters: [ vsan-test1 ]
     Slaves: [ vsan-test2 ]

[root at vsan-test2 ~]# pcs resource unclone
ms-3e6a5f58-d95e-461f-a376-a5064cecf144
Error: could not find resource or group:
ms-3e6a5f58-d95e-461f-a376-a5064cecf144



[root at vsan-test2 ~]# pcs status
Last updated: Tue May 20 08:33:07 2014
Last change: Tue May 20 08:30:43 2014 via cibadmin on vsan-test2
Stack: cman
Current DC: vsan-test2 - partition with quorum
Version: 1.1.8-7.el6-394e906
2 Nodes configured, unknown expected votes
2 Resources configured.


Online: [ vsan-test1 vsan-test2 ]

Full list of resources:

 Master/Slave Set: ms-3e6a5f58-d95e-461f-a376-a5064cecf144
[vha-3e6a5f58-d95e-461f-a376-a5064cecf144]
     Masters: [ vsan-test1 ]
     Slaves: [ vsan-test2 ]

[root at vsan-test2 ~]# pcs resource delete
vha-3e6a5f58-d95e-461f-a376-a5064cecf144
Removing Constraint -
location-ms-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test2-INFINITY
Removing Constraint -
location-ms-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test1-INFINITY
Removing Constraint -
location-vha-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test2-INFINITY
Removing Constraint -
location-vha-3e6a5f58-d95e-461f-a376-a5064cecf144-vsan-test1-INFINITY
Deleting Resource - vha-3e6a5f58-d95e-461f-a376-a5064cecf144
[root at vsan-test2 ~]# pcs status
Last updated: Tue May 20 08:33:22 2014
Last change: Tue May 20 08:33:19 2014 via cibadmin on vsan-test2
Stack: cman
Current DC: vsan-test2 - partition with quorum
Version: 1.1.8-7.el6-394e906
2 Nodes configured, unknown expected votes
0 Resources configured.


Online: [ vsan-test1 vsan-test2 ]

Full list of resources:

 vha-3e6a5f58-d95e-461f-a376-a5064cecf144
(ocf::heartbeat:vgc-cm-agent.ocf):       ORPHANED Started vsan-test2
 <<<<<




Regards,
 Kiran
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.clusterlabs.org/pipermail/pacemaker/attachments/20140520/9d768942/attachment-0003.html>


More information about the Pacemaker mailing list