Merge lp:~bhill/epics-base/pcas-deadlock-fix-3.14 into lp:~epics-core/epics-base/3.14

Proposed by Bruce Hill
Status: Merged
Approved by: Andrew Johnson
Approved revision: 12683
Merged at revision: 12686
Proposed branch: lp:~bhill/epics-base/pcas-deadlock-fix-3.14
Merge into: lp:~epics-core/epics-base/3.14
Diff against target: 83 lines (+12/-4)
3 files modified
src/cas/generic/casChannelI.cc (+1/-0)
src/cas/generic/casChannelI.h (+7/-0)
src/cas/generic/casStrmClient.cc (+4/-4)
To merge this branch: bzr merge lp:~bhill/epics-base/pcas-deadlock-fix-3.14
Reviewer Review Type Date Requested Status
Andrew Johnson Approve
Review via email: mp+312638@code.launchpad.net

Description of the change

Avoid deadlock when by moving call to casPVI::nativeCount()
out of the casEventSys processing to the casPVI constructor.
The nativeCount (max array size) doesn't change for a casPVI
instance so no need to lock the casPVI mutex each time we need
to test it's size.

To post a comment you must log in.
Revision history for this message
Andrew Johnson (anj) wrote :

Thanks Bruce, much appreciated!

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'src/cas/generic/casChannelI.cc'
2--- src/cas/generic/casChannelI.cc 2016-05-22 03:43:09 +0000
3+++ src/cas/generic/casChannelI.cc 2016-12-07 09:42:35 +0000
4@@ -21,6 +21,7 @@
5 casChannel & chanIn, casPVI & pvIn, ca_uint32_t cidIn ) :
6 privateForPV ( clientIn, *this ),
7 pv ( pvIn ),
8+ maxElem( pvIn.nativeCount() ),
9 chan ( chanIn ),
10 cid ( cidIn ),
11 serverDeletePending ( false ),
12
13=== modified file 'src/cas/generic/casChannelI.h'
14--- src/cas/generic/casChannelI.h 2016-05-22 03:43:09 +0000
15+++ src/cas/generic/casChannelI.h 2016-12-07 09:42:35 +0000
16@@ -45,6 +45,7 @@
17 void clearOutstandingReads ();
18 void postAccessRightsEvent ();
19 const gddEnumStringTable & enumStringTable () const;
20+ ca_uint32_t getMaxElem () const;
21 void setOwner ( const char * const pUserName,
22 const char * const pHostName );
23 bool readAccess () const;
24@@ -58,6 +59,7 @@
25 chanIntfForPV privateForPV;
26 tsDLList < casAsyncIOI > ioList;
27 casPVI & pv;
28+ ca_uint32_t maxElem;
29 casChannel & chan;
30 caResId cid; // client id
31 bool serverDeletePending;
32@@ -77,6 +79,11 @@
33 return this->pv;
34 }
35
36+inline ca_uint32_t casChannelI::getMaxElem () const
37+{
38+ return this->maxElem;
39+}
40+
41 inline const caResId casChannelI::getCID ()
42 {
43 return this->cid;
44
45=== modified file 'src/cas/generic/casStrmClient.cc'
46--- src/cas/generic/casStrmClient.cc 2016-11-30 17:58:24 +0000
47+++ src/cas/generic/casStrmClient.cc 2016-12-07 09:42:35 +0000
48@@ -409,7 +409,7 @@
49 //
50 // element count out of range ?
51 //
52- if ( ctx.msg.m_count > pChan->getPVI().nativeCount() ||
53+ if ( ctx.msg.m_count > pChan->getMaxElem() ||
54 ( !allowdyn && ctx.msg.m_count == 0u ) ) {
55 return ECA_BADCOUNT;
56 }
57@@ -895,7 +895,7 @@
58 gdd * pDBRDD = 0;
59 if ( completionStatus == S_cas_success ) {
60 caStatus status = createDBRDD ( msg.m_dataType, count,
61- chan.getPVI().nativeCount(), pDBRDD );
62+ chan.getMaxElem(), pDBRDD );
63 if ( status != S_cas_success ) {
64 caStatus ecaStatus;
65 if ( status == S_cas_badType ) {
66@@ -1866,7 +1866,7 @@
67 // the protocol buffer.
68 //
69 assert ( nativeTypeDBR <= 0xffff );
70- aitIndex nativeCount = chan.getPVI().nativeCount();
71+ aitIndex nativeCount = chan.getMaxElem();
72 assert ( nativeCount <= 0xffffffff );
73 assert ( hdr.m_cid == chan.getCID() );
74 status = this->out.copyInHeader ( CA_PROTO_CREATE_CHAN, 0,
75@@ -2626,7 +2626,7 @@
76 {
77 gdd * pDD = 0;
78 caStatus status = createDBRDD ( pHdr->m_dataType, pHdr->m_count,
79- this->ctx.getChannel()->getPVI().nativeCount(), pDD );
80+ this->ctx.getChannel()->getMaxElem(), pDD );
81 if ( status != S_cas_success ) {
82 return status;
83 }

Subscribers

People subscribed via source and target branches