Merge lp:~esys-p-dev/esys-particle/esys-darcy into lp:esys-particle

Proposed by Dion Weatherley
Status: Merged
Merged at revision: 1202
Proposed branch: lp:~esys-p-dev/esys-particle/esys-darcy
Merge into: lp:esys-particle
Diff against target: 13423 lines (+8253/-737) (has conflicts)
55 files modified
Doc/ESyS-Darcy/examples/seepage/input.geo (+522/-0)
Doc/ESyS-Darcy/examples/seepage/run.sh (+17/-0)
Doc/ESyS-Darcy/examples/seepage/upward_seepage.py (+204/-0)
Fields/FieldMaster.cpp (+28/-2)
Fields/FieldMaster.h (+8/-1)
Fields/FluidFieldMaster.cpp (+566/-0)
Fields/FluidFieldMaster.h (+106/-0)
Fields/FluidInteractionFieldMaster.cpp (+695/-0)
Fields/FluidInteractionFieldMaster.h (+124/-0)
Fields/Makefile.am (+13/-1)
Fields/ScalarFluidFieldSlave.h (+51/-0)
Fields/ScalarFluidFieldSlave.hpp (+121/-0)
Fields/ScalarFluidInteractionFieldSlave.h (+55/-0)
Fields/ScalarFluidInteractionFieldSlave.hpp (+176/-0)
Fields/VectorFluidFieldSlave.h (+47/-0)
Fields/VectorFluidFieldSlave.hpp (+54/-0)
Fields/VectorFluidInteractionFieldSlave.h (+54/-0)
Fields/VectorFluidInteractionFieldSlave.hpp (+160/-0)
Fields/field_const.h (+14/-2)
Model/FluidCell.cpp (+145/-0)
Model/FluidCell.h (+133/-0)
Model/FluidInteraction.cpp (+99/-0)
Model/FluidInteraction.h (+72/-0)
Model/Makefile.am (+5/-1)
Parallel/ASubLattice.h (+21/-5)
Parallel/GMRESSolverMaster.cpp (+143/-0)
Parallel/GMRESSolverMaster.h (+81/-0)
Parallel/GMRESSolverSlave.cpp (+573/-0)
Parallel/GMRESSolverSlave.h (+76/-0)
Parallel/LatticeMaster.cpp (+657/-199)
Parallel/LatticeMaster.h (+55/-28)
Parallel/Makefile.am (+8/-0)
Parallel/SubLattice.h (+34/-8)
Parallel/SubLattice.hpp (+487/-205)
Parallel/SubLatticeControler.cpp (+41/-28)
Parallel/sublattice_cmd.h (+23/-2)
Python/esys/lsm/ExportModuleLsm.cpp (+4/-0)
Python/esys/lsm/FluidFieldSaverPrmsPy.cpp (+148/-0)
Python/esys/lsm/FluidFieldSaverPrmsPy.h (+60/-0)
Python/esys/lsm/FluidInteractionFieldSaverPrmsPy.cpp (+147/-0)
Python/esys/lsm/FluidInteractionFieldSaverPrmsPy.h (+61/-0)
Python/esys/lsm/LsmMpiPy.cpp (+257/-65)
Python/esys/lsm/LsmMpiPy.h (+26/-9)
Python/esys/lsm/Makefile.am (+5/-1)
Tools/ExtractStrain/DataExtractor.cpp (+2/-2)
Tools/ExtractStrain/DataExtractor.h (+1/-1)
Tools/ExtractStrain/main.cpp (+1/-1)
ntable/src/ntable.h (+72/-14)
ntable/src/ntable.hpp (+1092/-49)
pis/Makefile.am (+2/-1)
pis/pi_storage_f.h (+78/-0)
pis/pi_storage_f.hpp (+196/-0)
ppa/src/pp_array.h (+47/-16)
ppa/src/pp_array.hpp (+290/-95)
tml/message/pack.cpp (+96/-1)
Text conflict in Parallel/LatticeMaster.cpp
Text conflict in Parallel/Makefile.am
Text conflict in Parallel/SubLattice.hpp
Text conflict in Parallel/sublattice_cmd.h
Text conflict in Python/esys/lsm/LsmMpiPy.h
Text conflict in ppa/src/pp_array.h
To merge this branch: bzr merge lp:~esys-p-dev/esys-particle/esys-darcy
Reviewer Review Type Date Requested Status
Dion Weatherley Approve
Review via email: mp+370693@code.launchpad.net

Commit message

Merge of esys-darcy into main trunk

Merging the integrated Darcy flow solver developed by Dr. Qi Shao with the main trunk of ESyS-Particle.

Description of the change

This merge incorporates Dr. Qi Shao's integrated Darcy flow solver with the main ESyS-Particle trunk

To post a comment you must log in.
Revision history for this message
Dion Weatherley (d-weatherley) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== added directory 'Doc/ESyS-Darcy'
2=== added directory 'Doc/ESyS-Darcy/examples'
3=== added directory 'Doc/ESyS-Darcy/examples/seepage'
4=== added file 'Doc/ESyS-Darcy/examples/seepage/input.geo'
5--- Doc/ESyS-Darcy/examples/seepage/input.geo 1970-01-01 00:00:00 +0000
6+++ Doc/ESyS-Darcy/examples/seepage/input.geo 2019-07-28 14:55:57 +0000
7@@ -0,0 +1,522 @@
8+LSMGeometry 1.2
9+BoundingBox 0 0 -0.01 0.04 0.02 0.07
10+PeriodicBoundaries 0 0 0
11+Dimension 3D
12+BeginParticles
13+Simple
14+511
15+0.00740403 0.00723698 0.00744442 0.00312945 0 0
16+0.0171073 0.00808333 0.0162018 0.00331158 1 0
17+0.00765378 0.00853487 0.0261773 0.00175397 2 0
18+0.0159496 0.00777501 0.0349267 0.00332675 3 0
19+0.00884835 0.00779284 0.0465758 0.00276472 4 0
20+0.015543 0.00832276 0.0536022 0.00169508 5 0
21+0.0221331 0.003481 0.0153119 0.00348576 6 0
22+0.00596406 0.00553242 0.0407641 0.00150957 7 0
23+0.0323902 0.00701856 0.0392024 0.00298129 8 0
24+0.0150359 0.00834329 0.0453691 0.00234418 9 0
25+0.00855937 0.00342381 0.0391418 0.00210383 10 0
26+0.00179315 0.0182046 0.0142976 0.00179823 11 0
27+0.0111781 0.00192011 0.0240379 0.00192042 12 0
28+0.0196628 0.00689996 0.0112512 0.00241598 13 0
29+0.024942 0.00842001 0.0262652 0.00308795 14 0
30+0.027295 0.013957 0.016231 0.00150564 15 0
31+0.00742343 0.0141679 0.0380271 0.00278792 16 0
32+0.0216727 0.0102069 0.00749932 0.00304783 17 0
33+0.0120856 0.0150207 0.0367895 0.00153555 18 0
34+0.0100415 0.0168587 0.0426142 0.00314168 19 0
35+0.00596843 0.0030228 0.0347412 0.00302346 20 0
36+0.0112935 0.0031166 0.010052 0.00311947 21 0
37+0.00751719 0.00466006 0.0432232 0.00152708 22 0
38+0.0201785 0.0165272 0.0365523 0.00347492 23 0
39+0.0214643 0.0106991 0.0341478 0.0029656 24 0
40+0.0114554 0.0134664 0.0538252 0.00285964 25 0
41+0.0314729 0.00639405 0.0446898 0.00262311 26 0
42+0.0230855 0.00459689 0.022573 0.00255474 27 0
43+0.0320717 0.0129698 0.0308292 0.00221269 28 0
44+0.0230772 0.0168748 0.0194676 0.00274072 29 0
45+0.0271645 0.00728577 0.013058 0.00255968 30 0
46+0.0299018 0.0128437 0.0404557 0.00347676 31 0
47+0.00332364 0.0151418 0.0276371 0.00332515 32 0
48+0.0373931 0.00810864 0.0121803 0.00261654 33 0
49+0.0332908 0.00940968 0.00967425 0.00242495 34 0
50+0.0202158 0.0172934 0.00537452 0.00270846 35 0
51+0.0371413 0.0066962 0.0458402 0.00285904 36 0
52+0.0134183 0.00679717 0.00534724 0.00321175 37 0
53+0.0382342 0.0164371 0.0259019 0.00176599 38 0
54+0.0254657 0.0113559 0.0516564 0.00237733 39 0
55+0.0200455 0.00177476 0.0198161 0.001776 40 0
56+0.00302217 0.00803457 0.0245716 0.00302484 41 0
57+0.0103181 0.00952543 0.0124983 0.00314521 42 0
58+0.0384043 0.00722139 0.0515612 0.00159589 43 0
59+0.0198441 0.00613669 0.0272463 0.00258811 44 0
60+0.0349719 0.017209 0.0195774 0.00218768 45 0
61+0.0147406 0.0180795 0.0440457 0.0019206 46 0
62+0.0163462 0.0024843 0.0142653 0.00248488 47 0
63+0.00448848 0.00247625 0.00782811 0.0024806 48 0
64+0.00269461 0.0176299 0.0350801 0.00237049 49 0
65+0.0162774 0.0115099 0.0127853 0.00160631 50 0
66+0.0291583 0.00175865 0.0343068 0.00175969 51 0
67+0.0176758 0.0117416 0.0384466 0.002257 52 0
68+0.0113942 0.0144846 0.0332913 0.00207561 53 0
69+0.0105094 0.00663566 0.0419762 0.00227142 54 0
70+0.0277693 0.00849944 0.0207598 0.00313154 55 0
71+0.00225796 0.0139071 0.0449231 0.00225884 56 0
72+0.0327355 0.00869802 0.0524756 0.00210723 57 0
73+0.0116614 0.0152128 0.0267079 0.00171612 58 0
74+0.0128076 0.0117423 0.0356005 0.00178029 59 0
75+0.0231826 0.00339772 0.0490703 0.00339785 60 0
76+0.0257734 0.0132393 0.0362276 0.00245328 61 0
77+0.0100857 0.018045 0.0282694 0.00188899 62 0
78+0.00649234 0.0019779 0.0148566 0.001979 63 0
79+0.0190784 0.0168062 0.0129325 0.00244603 64 0
80+0.0274388 0.00283526 0.0186635 0.00283635 65 0
81+0.00980859 0.011643 0.0238988 0.00266876 66 0
82+0.00530997 0.0104109 0.0279439 0.00169793 67 0
83+0.0150089 0.00230255 0.0188572 0.00230241 68 0
84+0.0104202 0.00988296 0.034132 0.00158411 69 0
85+0.0326759 0.0148408 0.036397 0.00183511 70 0
86+0.00537875 0.017851 0.0127402 0.00214993 71 0
87+0.0141031 0.0131392 0.0156461 0.00234038 72 0
88+0.022045 0.0130303 0.0256106 0.00172639 73 0
89+0.030883 0.0055074 0.0515429 0.00170148 74 0
90+0.0171122 0.0156672 0.0189835 0.00282233 75 0
91+0.0248191 0.0106904 0.0565557 0.0019531 76 0
92+0.00998799 0.00230269 0.0303509 0.00230314 77 0
93+0.0233557 0.00776256 0.0308694 0.0018403 78 0
94+0.0153734 0.00613475 0.0407651 0.00277294 79 0
95+0.0112266 0.0154402 0.00669456 0.0021556 80 0
96+0.0284372 0.00330752 0.0246966 0.00330897 81 0
97+0.0296624 0.0180387 0.0063879 0.00196153 82 0
98+0.025597 0.0109553 0.00522174 0.0015032 83 0
99+0.0115065 0.00724476 0.05159 0.00293994 84 0
100+0.0266011 0.00629966 0.0541349 0.0033685 85 0
101+0.0351079 0.00773157 0.0342485 0.00272183 86 0
102+0.0229427 0.0116912 0.0474661 0.00252904 87 0
103+0.00581224 0.00536645 0.0292007 0.0030036 88 0
104+0.0305311 0.0104105 0.0564955 0.00278929 89 0
105+0.0173384 0.00953158 0.0299498 0.00213646 90 0
106+0.00391293 0.0139655 0.0328758 0.00207938 91 0
107+0.00621244 0.0135989 0.0427517 0.00189021 92 0
108+0.024043 0.00728268 0.0388119 0.00337211 93 0
109+0.0186136 0.00722557 0.0482368 0.00237387 94 0
110+0.0340442 0.00251141 0.0228404 0.00251184 95 0
111+0.00491825 0.0183503 0.00613648 0.00165301 96 0
112+0.0147637 0.012986 0.00900628 0.00273441 97 0
113+0.0332292 0.00229225 0.0467351 0.00229396 98 0
114+0.0282865 0.00300698 0.0408854 0.00300894 99 0
115+0.00428008 0.00479348 0.0121972 0.00248558 100 0
116+0.0083076 0.00619411 0.0218279 0.00322861 101 0
117+0.0147392 0.00271629 0.0274105 0.00271994 102 0
118+0.0376745 0.0177152 0.0565854 0.00228494 103 0
119+0.019959 0.0183052 0.0166754 0.0016982 104 0
120+0.038242 0.00860656 0.031193 0.00176244 105 0
121+0.0242836 0.0151937 0.0399219 0.00198545 106 0
122+0.0255509 0.0181291 0.0369613 0.00187095 107 0
123+0.00838219 0.00505231 0.0135851 0.00185281 108 0
124+0.0277088 0.00151638 0.049422 0.00151663 109 0
125+0.0149918 0.00738756 0.0106314 0.00234305 110 0
126+0.0365618 0.00566455 0.00834941 0.00200565 111 0
127+0.01974 0.0115456 0.011923 0.00196263 112 0
128+0.00685994 0.017412 0.0317913 0.00258912 113 0
129+0.0104388 0.00590398 0.0334389 0.00245599 114 0
130+0.0233839 0.00156916 0.0198111 0.0015725 115 0
131+0.00785617 0.00833543 0.0376739 0.00307195 116 0
132+0.0144027 0.0145482 0.0415487 0.00190752 117 0
133+0.00974302 0.0178158 0.0355729 0.0021843 118 0
134+0.0357762 0.0178467 0.052576 0.0021534 119 0
135+0.00317369 0.00881829 0.0335775 0.00317579 120 0
136+0.0135396 0.0150709 0.0229688 0.0024835 121 0
137+0.0250317 0.0169316 0.032195 0.00307149 122 0
138+0.0249193 0.0117534 0.0409116 0.00164436 123 0
139+0.00230726 0.0131174 0.0368666 0.00230746 124 0
140+0.0271016 0.0112457 0.0312579 0.00307011 125 0
141+0.0338569 0.00788284 0.0245417 0.00294891 126 0
142+0.015087 0.017037 0.0291434 0.00286593 127 0
143+0.0235944 0.0133136 0.00457506 0.00165768 128 0
144+0.021186 0.00823015 0.0440303 0.00266366 129 0
145+0.0117129 0.00238318 0.00238331 0.00238526 130 0
146+0.0106024 0.0112625 0.0075197 0.0020241 131 0
147+0.0204841 0.00993927 0.0251963 0.00176161 132 0
148+0.0269842 0.0122714 0.0107032 0.00244944 133 0
149+0.0273927 0.00236758 0.0130094 0.00236832 134 0
150+0.00319804 0.00965197 0.0416454 0.00319901 135 0
151+0.0326107 0.00239774 0.0181546 0.0023979 136 0
152+0.00187391 0.00652176 0.0378804 0.00187417 137 0
153+0.0174243 0.00813536 0.0246287 0.001506 138 0
154+0.0313724 0.00744182 0.0159657 0.00193022 139 0
155+0.0263637 0.0133749 0.0196433 0.00206739 140 0
156+0.0368989 0.00309771 0.0148131 0.00310353 141 0
157+0.0331317 0.0184727 0.016621 0.00152978 142 0
158+0.030803 0.0179462 0.0371897 0.00187845 143 0
159+0.0384495 0.0155004 0.0356344 0.00155192 144 0
160+0.030126 0.016506 0.0142283 0.00279956 145 0
161+0.00464258 0.00791393 0.0149828 0.00172216 146 0
162+0.0192921 0.00307373 0.0376961 0.00307455 147 0
163+0.0305916 0.0068819 0.0292489 0.0028724 148 0
164+0.0126891 0.0136203 0.0449236 0.00163748 149 0
165+0.0105185 0.0177893 0.051347 0.00221122 150 0
166+0.00318776 0.015905 0.0184879 0.00318971 151 0
167+0.030311 0.0127497 0.0346018 0.00181544 152 0
168+0.0296683 0.00533651 0.0336603 0.0018926 153 0
169+0.00248031 0.0141469 0.0131478 0.00248124 154 0
170+0.00216428 0.00952655 0.00619678 0.00216537 155 0
171+0.0199893 0.00735672 0.0314117 0.00160125 156 0
172+0.0268247 0.0183491 0.0212189 0.001651 157 0
173+0.0222275 0.00288889 0.0428809 0.00288937 158 0
174+0.00756801 0.0168824 0.0229114 0.00311793 159 0
175+0.0312656 0.00623918 0.00333778 0.00333877 160 0
176+0.0127276 0.0180344 0.0384488 0.00196729 161 0
177+0.00185072 0.010301 0.028726 0.00185086 162 0
178+0.0192515 0.0170171 0.0240123 0.00282793 163 0
179+0.0088248 0.00553153 0.0170241 0.00164829 164 0
180+0.0174593 0.0135792 0.0461189 0.00328253 165 0
181+0.00808536 0.0140608 0.0190272 0.00171072 166 0
182+0.0062331 0.0165946 0.0479246 0.00340578 167 0
183+0.0357472 0.00829438 0.0419716 0.00155678 168 0
184+0.0173856 0.0100909 0.0422682 0.00192304 169 0
185+0.0230821 0.0140217 0.0102382 0.00185391 170 0
186+0.00184968 0.0067189 0.0090493 0.00184971 171 0
187+0.028432 0.0017601 0.0526059 0.00176021 172 0
188+0.030613 0.00179218 0.0299505 0.00178407 173 0
189+0.036528 0.0119222 0.0385787 0.00347314 174 0
190+0.0185294 0.00300098 0.0316728 0.00300401 175 0
191+0.022188 0.0175745 0.0429847 0.00242628 176 0
192+0.020683 0.0130888 0.0515288 0.00232643 177 0
193+0.0258479 0.00328883 0.0308183 0.00329078 178 0
194+0.0234504 0.0090752 0.00245722 0.00247003 179 0
195+0.00879877 0.0129209 0.0294136 0.00284995 180 0
196+0.0258572 0.0142383 0.00195802 0.00196253 181 0
197+0.0029827 0.00429277 0.0174766 0.00297787 182 0
198+0.0256372 0.00738693 0.0338708 0.00182949 183 0
199+0.0237353 0.00563493 0.00917512 0.00224192 184 0
200+0.0251589 0.00158402 0.0532679 0.00158404 185 0
201+0.00155557 0.00526344 0.042439 0.00155593 186 0
202+0.0354467 0.00743304 0.0501311 0.00169709 187 0
203+0.022148 0.0170426 0.0483429 0.00295866 188 0
204+0.00161019 0.00161185 0.0105829 0.00161257 189 0
205+0.0166357 0.0034659 0.0522416 0.00346604 190 0
206+0.0280514 0.0176819 0.0278071 0.00231822 191 0
207+0.038291 0.00872478 0.00155462 0.001551 192 0
208+0.00652405 0.0033354 0.00291777 0.00292349 193 0
209+0.0148099 0.00168521 0.0387253 0.00168542 194 0
210+0.0325501 0.0177729 0.0274042 0.00222942 195 0
211+0.00234334 0.00234271 0.0486866 0.0023434 196 0
212+0.0235571 0.0178086 0.00889742 0.00219226 197 0
213+0.00650385 0.00776521 0.0176745 0.00158606 198 0
214+0.038139 0.018139 0.014236 0.00186139 199 0
215+0.00560776 0.00640613 0.0529907 0.00200518 200 0
216+0.0327698 0.00168103 0.0412371 0.00168104 201 0
217+0.00321623 0.00782948 0.0485721 0.00321733 202 0
218+0.00168006 0.00167935 0.0138615 0.00168074 203 0
219+0.0120099 0.0122088 0.00289867 0.00290008 204 0
220+0.00684948 0.0184589 0.0272349 0.0015438 205 0
221+0.0172233 0.0124645 0.0560698 0.00341054 206 0
222+0.0307111 0.0174269 0.0319002 0.00257369 207 0
223+0.0317224 0.0131959 0.0056655 0.0033502 208 0
224+0.0223864 0.0121828 0.0385944 0.00181601 209 0
225+0.0333741 0.00162267 0.0379904 0.00162341 210 0
226+0.0126385 0.00474775 0.0451379 0.00198505 211 0
227+0.0333118 0.00975449 0.047728 0.00226902 212 0
228+0.00175565 0.0182005 0.0390556 0.00175591 213 0
229+0.00205201 0.00539685 0.00538364 0.00205289 214 0
230+0.00323014 0.0146552 0.0406688 0.00189881 215 0
231+0.0264494 0.0149921 0.0068019 0.00233734 216 0
232+0.0273275 0.0180763 0.0176918 0.00192378 217 0
233+0.0240693 0.00191583 0.0385519 0.00191617 218 0
234+0.00916667 0.0124688 0.0453142 0.0020909 219 0
235+0.0118018 0.00792646 0.0179651 0.00228676 220 0
236+0.00211426 0.0020685 0.0302846 0.00206859 221 0
237+0.0325379 0.00517236 0.00846175 0.00205096 222 0
238+0.0174031 0.00675963 0.0570005 0.00237341 223 0
239+0.0308439 0.00335187 0.036816 0.00166135 224 0
240+0.0378043 0.0117057 0.00526434 0.00219581 225 0
241+0.0187597 0.0183295 0.0450964 0.00167053 226 0
242+0.0306812 0.0113159 0.0255473 0.00183528 227 0
243+0.0257224 0.014471 0.0443686 0.00247729 228 0
244+0.00564528 0.0136085 0.00794232 0.00348379 229 0
245+0.00690026 0.0016242 0.0110379 0.00162423 230 0
246+0.010398 0.002126 0.0161074 0.0021265 231 0
247+0.0291003 0.00998606 0.0491639 0.00218657 232 0
248+0.01055 0.00175199 0.0343268 0.0017523 233 0
249+0.0146657 0.0174941 0.0344697 0.00250615 234 0
250+0.0367023 0.0129797 0.0230008 0.00300049 235 0
251+0.0170752 0.00958292 0.00687754 0.00163407 236 0
252+0.0305487 0.0140316 0.0103414 0.00154308 237 0
253+0.012143 0.00512642 0.037407 0.00193262 238 0
254+0.0144416 0.00239774 0.0348906 0.00223713 239 0
255+0.0208874 0.0152295 0.0161386 0.00155911 240 0
256+0.017102 0.0119374 0.023918 0.00239269 241 0
257+0.0380168 0.0116568 0.0333423 0.0019839 242 0
258+0.020783 0.017238 0.0536266 0.00232437 243 0
259+0.00459883 0.00936175 0.0108443 0.00177585 244 0
260+0.00979842 0.011681 0.04944 0.0021586 245 0
261+0.0157955 0.0168473 0.0513997 0.00315337 246 0
262+0.0222891 0.0142212 0.0562616 0.00195534 247 0
263+0.0328359 0.00573027 0.0487517 0.00171545 248 0
264+0.0233948 0.00708164 0.0189688 0.00181446 249 0
265+0.0167859 0.00612698 0.0211226 0.00201806 250 0
266+0.0366613 0.0134709 0.0108203 0.00298431 251 0
267+0.0117147 0.0182589 0.0250377 0.00174352 252 0
268+0.0351538 0.0178268 0.0238751 0.00217522 253 0
269+0.0343496 0.0125793 0.0176437 0.00286872 254 0
270+0.0311049 0.0052083 0.0205014 0.00156194 255 0
271+0.0382786 0.0182785 0.0178056 0.00172253 256 0
272+0.0280292 0.003005 0.0077153 0.00301014 257 0
273+0.00156874 0.0110855 0.00953688 0.0015688 258 0
274+0.0336558 0.0173626 0.040316 0.00240105 259 0
275+0.0101468 0.0129838 0.0162281 0.00166167 260 0
276+0.0365685 0.0163658 0.0311024 0.00343461 261 0
277+0.0084464 0.018151 0.00767331 0.00184901 262 0
278+0.00183102 0.00828557 0.0128648 0.00183638 263 0
279+0.0144202 0.0171772 0.0580798 0.00242996 264 0
280+0.037643 0.0101872 0.0274533 0.00235827 265 0
281+0.0306139 0.00214476 0.0558196 0.00214483 266 0
282+0.0369547 0.0125392 0.0450446 0.00304545 267 0
283+0.0137442 0.0118058 0.0271828 0.00231246 268 0
284+0.00572437 0.0112326 0.0138614 0.00195718 269 0
285+0.013553 0.0174369 0.00257317 0.00256338 270 0
286+0.0178381 0.0167656 0.0416959 0.00218456 271 0
287+0.0327479 0.0174337 0.00964386 0.00256805 272 0
288+0.0115658 0.0170655 0.018374 0.00293691 273 0
289+0.00722468 0.00201345 0.0187804 0.00201356 274 0
290+0.0380832 0.00191675 0.0263024 0.00191783 275 0
291+0.00151268 0.0184873 0.0220424 0.00151274 276 0
292+0.0188008 0.0134214 0.0281418 0.00240862 277 0
293+0.0370407 0.0108358 0.053941 0.00278181 278 0
294+0.0382324 0.0149963 0.0189764 0.00176843 279 0
295+0.0333946 0.00268916 0.0333306 0.00269044 280 0
296+0.0379133 0.00495567 0.0316106 0.00192929 281 0
297+0.0139653 0.00453043 0.031477 0.00181791 282 0
298+0.0165215 0.00176899 0.0416556 0.00171339 283 0
299+0.0233985 0.00402321 0.0266658 0.00159446 284 0
300+0.0289804 0.0135746 0.047063 0.00184511 285 0
301+0.0276874 0.00829591 0.0427048 0.00206229 286 0
302+0.0104164 0.0050226 0.0267236 0.00225462 287 0
303+0.0299703 0.0043149 0.0152598 0.00156971 288 0
304+0.0355399 0.00830889 0.00553889 0.00189664 289 0
305+0.0211319 0.00999705 0.0288925 0.0018021 290 0
306+0.012526 0.0183266 0.0546273 0.00167363 291 0
307+0.00215674 0.0178438 0.0429304 0.00215679 292 0
308+0.036927 0.0169288 0.00589332 0.00307357 293 0
309+0.00711901 0.0106929 0.0199971 0.00177195 294 0
310+0.00608676 0.0124662 0.0516043 0.00212781 295 0
311+0.0332294 0.00995106 0.0430838 0.0016568 296 0
312+0.00160766 0.00453795 0.0350303 0.00160967 297 0
313+0.0326308 0.0131346 0.0510869 0.00254587 298 0
314+0.0234443 0.0110963 0.0157333 0.00331821 299 0
315+0.0161996 0.00450485 0.0462712 0.00176098 300 0
316+0.0217518 0.00172445 0.0286186 0.00161838 301 0
317+0.0309133 0.0112472 0.0136353 0.00255638 302 0
318+0.00872013 0.0113484 0.0413458 0.00175616 303 0
319+0.0352245 0.0110546 0.0303423 0.0015085 304 0
320+0.037468 0.00814346 0.0204297 0.00253284 305 0
321+0.0383311 0.0150886 0.0537058 0.00166904 306 0
322+0.0120816 0.00233167 0.0416034 0.00233396 307 0
323+0.0294322 0.00800664 0.025063 0.00150829 308 0
324+0.0343668 0.0147665 0.0556005 0.00225467 309 0
325+0.0207971 0.0176529 0.0289065 0.00235005 310 0
326+0.0319273 0.00217282 0.0128965 0.00217288 311 0
327+0.0371121 0.00416696 0.0037222 0.002892 312 0
328+0.00775536 0.00188664 0.0263674 0.00188868 313 0
329+0.0331224 0.0174383 0.0483866 0.00256238 314 0
330+0.0112521 0.0181341 0.0318277 0.00186785 315 0
331+0.0282315 0.0149082 0.0508115 0.00220593 316 0
332+0.0228505 0.00329858 0.00329885 0.00329944 317 0
333+0.0178559 0.00243378 0.0233342 0.00243497 318 0
334+0.00164242 0.0183563 0.00638179 0.00164843 319 0
335+0.0186533 0.00744749 0.00287275 0.00262119 320 0
336+0.0347361 0.0180106 0.0359326 0.00199199 321 0
337+0.0196083 0.00662829 0.0231802 0.00152232 322 0
338+0.0334753 0.00183238 0.00183527 0.00183807 323 0
339+0.0105506 0.0160335 0.0122247 0.00338778 324 0
340+0.0287291 0.00917687 0.0360068 0.00234093 325 0
341+0.0231773 0.0134731 0.0290722 0.001944 326 0
342+0.0204786 0.0099285 0.0207758 0.00265931 327 0
343+0.0259044 0.00788416 0.0467202 0.00235282 328 0
344+0.0221521 0.00567684 0.0339741 0.00207081 329 0
345+0.00220569 0.0130599 0.0495825 0.00220624 330 0
346+0.0166981 0.00261089 0.00260843 0.0026122 331 0
347+0.0352549 0.00744536 0.0295519 0.00184709 332 0
348+0.0181356 0.00514363 0.00685589 0.00201028 333 0
349+0.0279339 0.0105814 0.00227513 0.00227643 334 0
350+0.0369969 0.0030031 0.0503873 0.0030034 335 0
351+0.0206375 0.00897867 0.0569603 0.00154939 336 0
352+0.0382062 0.00179401 0.00760958 0.0017943 337 0
353+0.0271071 0.0149506 0.0558297 0.00293743 338 0
354+0.0354024 0.00407603 0.0263096 0.00153291 339 0
355+0.0187072 0.00154247 0.0481111 0.00154347 340 0
356+0.0150173 0.0163305 0.00624998 0.00155225 341 0
357+0.00686868 0.0102587 0.00260768 0.0026095 342 0
358+0.0100899 0.0169072 0.0566386 0.00179012 343 0
359+0.0137025 0.00746051 0.0286139 0.00226749 344 0
360+0.0129312 0.0109103 0.0315169 0.00220639 345 0
361+0.0158624 0.0142056 0.00150858 0.00150852 346 0
362+0.017519 0.0183293 0.0556543 0.00167074 347 0
363+0.0079414 0.00892021 0.031694 0.00183696 348 0
364+0.0382482 0.0182475 0.0214964 0.00175278 349 0
365+0.0259955 0.00656889 0.0169037 0.0015487 350 0
366+0.0161249 0.00187418 0.00996327 0.00187507 351 0
367+0.00865046 0.0101062 0.0169744 0.00166719 352 0
368+0.006204 0.0121718 0.0171857 0.00153468 353 0
369+0.0384895 0.0184891 0.0502027 0.00151113 354 0
370+0.0161742 0.0184681 0.0388438 0.00153249 355 0
371+0.0117072 0.00161091 0.0374811 0.00161099 356 0
372+0.0197249 0.0126478 0.00269504 0.00270316 357 0
373+0.0379822 0.010017 0.0493093 0.00201857 358 0
374+0.0079389 0.00970356 0.0546289 0.00235297 359 0
375+0.00634777 0.0147387 0.0148706 0.00174477 360 0
376+0.0160297 0.0113212 0.0199518 0.00177307 361 0
377+0.0262327 0.00692446 0.00516674 0.00200391 362 0
378+0.0190803 0.00167763 0.0058822 0.00155 363 0
379+0.021295 0.00167028 0.0534442 0.00167044 364 0
380+0.0354161 0.0165134 0.0159228 0.00156673 365 0
381+0.0259647 0.0183665 0.0505247 0.00163387 366 0
382+0.00323668 0.00262533 0.0396654 0.00262566 367 0
383+0.00170074 0.0182995 0.0311909 0.00170109 368 0
384+0.0202307 0.00595664 0.0192722 0.00155746 369 0
385+0.0380636 0.00591639 0.0275328 0.00193819 370 0
386+0.0353452 0.00778029 0.0163646 0.00207745 371 0
387+0.0223714 0.00840571 0.0502387 0.00180997 372 0
388+0.00221456 0.00647535 0.054996 0.00193698 373 0
389+0.0289452 0.00529415 0.0483547 0.00196326 374 0
390+0.0287647 0.00832274 0.00847278 0.00242411 375 0
391+0.0172068 0.0179303 0.00898301 0.0020752 376 0
392+0.00202381 0.00924055 0.00202347 0.00202428 377 0
393+0.0245742 0.0170822 0.0245145 0.00253671 378 0
394+0.0379998 0.00200056 0.0392715 0.00200151 379 0
395+0.0334767 0.00593786 0.0121811 0.00186137 380 0
396+0.0180743 0.00181884 0.0448255 0.00182027 381 0
397+0.0122542 0.0108769 0.0466382 0.00162706 382 0
398+0.00655569 0.0168082 0.0539671 0.00264169 383 0
399+0.0383407 0.0183407 0.00165259 0.00166263 384 0
400+0.0214504 0.00172539 0.0252907 0.00172862 385 0
401+0.00214564 0.0175114 0.00999983 0.00215176 386 0
402+0.0150882 0.0183661 0.0248541 0.00163821 387 0
403+0.0384032 0.00870137 0.00750029 0.00159694 388 0
404+0.0161057 0.010794 0.00405664 0.00158625 389 0
405+0.0328496 0.00829805 0.0196323 0.00209146 390 0
406+0.0156947 0.00564037 0.0244717 0.00154824 391 0
407+0.00195455 0.00195538 0.00195608 0.00195661 392 0
408+0.0372154 0.0145755 0.0499549 0.00227933 393 0
409+0.0243971 0.0170321 0.0139171 0.002969 394 0
410+0.00762674 0.00305111 0.0497952 0.00304934 395 0
411+0.0379301 0.00482145 0.0235776 0.00207029 396 0
412+0.0264092 0.00350722 0.035982 0.00190991 397 0
413+0.0193412 0.0144205 0.00912034 0.00207234 398 0
414+0.035643 0.00411888 0.0370425 0.00188387 399 0
415+0.0297379 0.0104753 0.0452151 0.00184417 400 0
416+0.00583399 0.00269151 0.0453085 0.00180035 401 0
417+0.0358471 0.00253241 0.0293966 0.00195015 402 0
418+0.0182762 0.0131768 0.0149761 0.00179601 403 0
419+0.0350586 0.00545458 0.0562953 0.00341809 404 0
420+0.00218552 0.0128416 0.0227735 0.00218788 405 0
421+0.0152214 0.0178986 0.0150455 0.00209452 406 0
422+0.0353873 0.0050777 0.0187091 0.00150126 407 0
423+0.0384627 0.0015372 0.0314742 0.00153768 408 0
424+0.00211531 0.00211565 0.044241 0.00211629 409 0
425+0.0313858 0.0151228 0.0236641 0.00247037 410 0
426+0.00923771 0.00179787 0.0445522 0.00179785 411 0
427+0.0383583 0.0108847 0.0152595 0.00164174 412 0
428+0.0238522 0.0178169 0.00218615 0.00218987 413 0
429+0.014146 0.00867598 0.0210404 0.00165307 414 0
430+0.037655 0.00201786 0.0347177 0.0018402 415 0
431+0.0320816 0.00203275 0.050898 0.0020332 416 0
432+0.0154327 0.0151909 0.0126162 0.00155675 417 0
433+0.0313455 0.00965986 0.032855 0.00174282 418 0
434+0.0281756 0.0177571 0.0475384 0.00213551 419 0
435+0.0291461 0.0181753 0.0404917 0.00182473 420 0
436+0.0186263 0.00174812 0.0567437 0.0017482 421 0
437+0.0329754 0.0176709 0.00230144 0.00230164 422 0
438+0.0127313 0.00532341 0.0145625 0.00212689 423 0
439+0.0376574 0.0176587 0.0465346 0.00234295 424 0
440+0.0271144 0.0128908 0.0239936 0.00236246 425 0
441+0.0215385 0.00459648 0.0562059 0.00236118 426 0
442+0.00738788 0.00904432 0.0507336 0.00163676 427 0
443+0.0382684 0.00173155 0.0547663 0.00173191 428 0
444+0.0257193 0.0183717 0.0408645 0.00162862 429 0
445+0.0126253 0.00219756 0.04845 0.00219787 430 0
446+0.0109563 0.00196688 0.0201579 0.00196749 431 0
447+0.0254353 0.0184314 0.045143 0.00156921 432 0
448+0.00570082 0.0109462 0.045671 0.00171668 433 0
449+0.0282435 0.0110536 0.0168082 0.00160284 434 0
450+0.00179531 0.00403575 0.0269695 0.00180045 435 0
451+0.016429 0.0137832 0.00513348 0.00157774 436 0
452+0.0126431 0.00782114 0.02426 0.00200227 437 0
453+0.00241284 0.0105334 0.0534433 0.00241296 438 0
454+0.00536789 0.0179582 0.0402004 0.00204203 439 0
455+0.00729154 0.018056 0.0163168 0.00194866 440 0
456+0.00413258 0.00214982 0.0535375 0.00214994 441 0
457+0.0236329 0.00873546 0.0114316 0.00160723 442 0
458+0.00264859 0.00264829 0.0228373 0.00264955 443 0
459+0.0378351 0.00650969 0.0394615 0.00216522 444 0
460+0.0380916 0.018083 0.0105314 0.0018534 445 0
461+0.0321456 0.0132646 0.0452373 0.00182279 446 0
462+0.00966394 0.00166826 0.00580348 0.00166644 447 0
463+0.00168093 0.0046194 0.0519373 0.00168113 448 0
464+0.0347692 0.0102218 0.00210724 0.00210747 449 0
465+0.00255495 0.0172316 0.00251778 0.00252309 450 0
466+0.0112111 0.00226283 0.0536347 0.0022629 451 0
467+0.0129764 0.00955392 0.0551326 0.00153717 452 0
468+0.0132403 0.00456442 0.0220498 0.00197873 453 0
469+0.0299834 0.0143163 0.0183655 0.00188999 454 0
470+0.032864 0.00190089 0.0270448 0.00190282 455 0
471+0.0068479 0.00166642 0.0229382 0.00166951 456 0
472+0.0203736 0.00242782 0.00956418 0.00243119 457 0
473+0.00675756 0.00172359 0.0420532 0.00172428 458 0
474+0.0311484 0.0119802 0.0208795 0.00173721 459 0
475+0.0222478 0.00944784 0.053868 0.00196903 460 0
476+0.0168253 0.01213 0.0508497 0.00170684 461 0
477+0.0183685 0.00504764 0.0437566 0.00160412 462 0
478+0.0232737 0.0131858 0.02223 0.00187716 463 0
479+0.0293226 0.0140501 0.00154323 0.00155941 464 0
480+0.0187692 0.0092528 0.0520583 0.0019576 465 0
481+0.0365652 0.0138516 0.0019462 0.00194697 466 0
482+0.0215012 0.0130162 0.0424346 0.00221918 467 0
483+0.0381017 0.0144179 0.0153672 0.00190392 468 0
484+0.00332102 0.0129332 0.00189674 0.00189745 469 0
485+0.0177389 0.0182206 0.00177586 0.00178121 470 0
486+0.0307391 0.017877 0.0200459 0.00212367 471 0
487+0.0363481 0.00156879 0.0191933 0.00156882 472 0
488+0.0290274 0.0184173 0.0235843 0.00158279 473 0
489+0.0339247 0.0137638 0.0271061 0.00201849 474 0
490+0.0283487 0.0176869 0.00233598 0.00231694 475 0
491+0.0282852 0.0184908 0.0350055 0.00151042 476 0
492+0.0330134 0.00185311 0.00549684 0.00185315 477 0
493+0.0242935 0.00160266 0.010027 0.00160277 478 0
494+0.030886 0.0172073 0.044091 0.00228919 479 0
495+0.0312084 0.0175061 0.0533512 0.00249442 480 0
496+0.0341718 0.0183149 0.0135427 0.00168831 481 0
497+0.0120748 0.0105452 0.0429707 0.00205716 482 0
498+0.012124 0.0120933 0.0196146 0.00222838 483 0
499+0.0147474 0.0184644 0.0115119 0.00154335 484 0
500+0.0378522 0.0178499 0.0386472 0.00215028 485 0
501+0.00192368 0.0146758 0.0546279 0.00192372 486 0
502+0.0155852 0.00901374 0.0502825 0.00169673 487 0
503+0.00803869 0.0170089 0.00298788 0.0029914 488 0
504+0.0362089 0.00233206 0.0431987 0.00233296 489 0
505+0.0330852 0.010437 0.0281984 0.00158274 490 0
506+0.0122583 0.0182774 0.0467047 0.00172264 491 0
507+0.00226078 0.0177388 0.0517969 0.00226149 492 0
508+0.00280152 0.00993607 0.018667 0.00280199 493 0
509+0.0146218 0.00189707 0.00652101 0.00189122 494 0
510+0.0384981 0.0184995 0.0351118 0.00150211 495 0
511+0.00425058 0.00164772 0.0266976 0.00164977 496 0
512+0.0133873 0.0182043 0.00848053 0.00179734 497 0
513+0.0383755 0.00427334 0.0191478 0.00162463 498 0
514+0.0383818 0.00897087 0.0576935 0.00161829 499 0
515+0.0382429 0.0137606 0.0571964 0.00175716 500 0
516+0.0168499 0.0133127 0.0326882 0.00254127 501 0
517+0.0150799 0.00917761 0.00156015 0.00156038 502 0
518+0.0246948 0.0182753 0.0536291 0.00172506 503 0
519+0.0350383 0.0021399 0.00991198 0.00214006 504 0
520+0.00725777 0.0119493 0.0338505 0.00194902 505 0
521+0.0277642 0.0178239 0.0100572 0.00217661 506 0
522+0.00168331 0.00557014 0.00168047 0.00168212 507 0
523+0.011902 0.0124199 0.0392896 0.0020792 508 0
524+0.00526411 0.012825 0.0557359 0.00190477 509 0
525+0.0275611 0.00235525 0.0459032 0.00210683 510 0
526+EndParticles
527+BeginConnect
528+0
529+EndConnect
530
531=== added file 'Doc/ESyS-Darcy/examples/seepage/run.sh'
532--- Doc/ESyS-Darcy/examples/seepage/run.sh 1970-01-01 00:00:00 +0000
533+++ Doc/ESyS-Darcy/examples/seepage/run.sh 2019-07-28 14:55:57 +0000
534@@ -0,0 +1,17 @@
535+#!/bin/bash
536+
537+#a bash file to run the simulation and create vtk files for visualization in Paraview
538+#use the command "sh run.sh" in terminal for execution
539+
540+
541+if [ -d "results" ]; then
542+ rm -r results
543+fi
544+
545+mkdir results
546+
547+mpirun -np 2 `which esysparticle` upward_seepage.py
548+
549+dump2vtk -i results/snap -o results/particles_ -rot -t 0 6 100000
550+
551+printf "\nSimulation done, results saved in /results\n"
552
553=== added file 'Doc/ESyS-Darcy/examples/seepage/upward_seepage.py'
554--- Doc/ESyS-Darcy/examples/seepage/upward_seepage.py 1970-01-01 00:00:00 +0000
555+++ Doc/ESyS-Darcy/examples/seepage/upward_seepage.py 2019-07-28 14:55:57 +0000
556@@ -0,0 +1,204 @@
557+#upward_seepage.py: A upward seepage simulation through particle assembly using ESyS-Darcy (a fluid-coupled version of ESyS-Particle)
558+# Author: Q.Shao
559+# Date: 21 August 2017
560+# Organisation: University of Queensland
561+# (C) All rights reserved, 2017.
562+#
563+#
564+
565+#import the appropriate ESyS-Particle modules:
566+from esys.lsm import *
567+from esys.lsm.util import *
568+from esys.lsm.geometry import *
569+
570+#instantiate a simulation object
571+#and initialise the neighbour search algorithm:
572+sim = LsmMpi(numWorkerProcesses=1, mpiDimList=[1,1,1])
573+
574+sim.initNeighbourSearch(
575+ particleType="RotSphere",
576+ gridSpacing=0.008,
577+ verletDist=0.0003
578+)
579+
580+#specify the number of timesteps and timestep increment:
581+sim.setNumTimeSteps(500000)
582+sim.setTimeStepSize(1.0e-6)
583+
584+#specify the spatial domain for the simulation:
585+domain = BoundingBox(Vec3(0.,0.,-0.01), Vec3(0.04,0.02,0.07))
586+sim.setSpatialDomain(domain)
587+
588+#add fluid to the domain:
589+sim.createFluidInteraction(
590+ cellside=0.01, #side length of fluid cell
591+ Bw=1.0e6, #bulk modulus of fluid
592+ Bp=1.0e6, #bulk modulus of particle
593+ Mu=1.0e-3, #fluid viscosity
594+ alpha=0.5, #adjusting factor between two time steps
595+ flowrate=0.1, #rate of inflow
596+ pressure=0., #hydraulic pressure
597+ inflow=Vec3(0,0,1), #directions of inflows
598+ outflow=Vec3(0,0,1), #directions of outflows
599+ fluid_timestep=1.0e-4 #time step size for updating fluid phase
600+)
601+
602+#read in geometry input file
603+sim.readGeometry("input.geo")
604+
605+#set partilce density
606+sim.setParticleDensity (
607+ tag = 0,
608+ mask = -1,
609+ Density = 2600
610+)
611+
612+#add a left side wall to the model:
613+sim.createWall (
614+ name = "left_wall",
615+ posn = Vec3(0.0000, 0.0000, 0.0000),
616+ normal = Vec3(1.0000, 0.0000, 0.0000)
617+)
618+
619+#add a right side wall to the model:
620+sim.createWall (
621+ name = "right_wall",
622+ posn = Vec3(0.04, 0.0000, 0.0000),
623+ normal = Vec3(-1.0000, 0.0000, 0.0000)
624+)
625+
626+#add a back side wall to the model:
627+sim.createWall (
628+ name = "back_wall",
629+ posn = Vec3(0.0000, 0.0000, 0.0000),
630+ normal = Vec3(0.0000, 1.0000, 0.0000)
631+)
632+
633+#add a front side wall to the model:
634+sim.createWall (
635+ name = "front_wall",
636+ posn = Vec3(0.0000, 0.02, 0.0000),
637+ normal = Vec3(0.0000, -1.0000, 0.0000)
638+)
639+
640+#add a bottom side wall to the model:
641+sim.createWall (
642+ name = "bottom_wall",
643+ posn = Vec3(0.0000, 0.0000, 0.0000),
644+ normal = Vec3(0.0000, 0.0000, 1.0000)
645+)
646+
647+#specify that particles undergo frictional interactions:
648+sim.createInteractionGroup (
649+ RotFrictionPrms (
650+ name = "friction",
651+ normalK = 1.0e6,
652+ dynamicMu = 0.4,
653+ staticMu = 0.6,
654+ shearK = 1.0e6,
655+ scaling = True,
656+ rigid = True,
657+ meanR_scaling = True
658+ )
659+)
660+
661+#specify that particles undergo elastic repulsion
662+#from the left side wall:
663+sim.createInteractionGroup (
664+ NRotElasticWallPrms (
665+ name = "lw_repel",
666+ wallName = "left_wall",
667+ normalK = 1.0e6
668+ )
669+)
670+
671+#specify that particles undergo elastic repulsion
672+#from the right side wall:
673+sim.createInteractionGroup (
674+ NRotElasticWallPrms (
675+ name = "rw_repel",
676+ wallName = "right_wall",
677+ normalK = 1.0e6
678+ )
679+)
680+
681+#specify that particles undergo elastic repulsion
682+#from the back side wall:
683+sim.createInteractionGroup (
684+ NRotElasticWallPrms (
685+ name = "bkw_repel",
686+ wallName = "back_wall",
687+ normalK = 1.0e6
688+ )
689+)
690+
691+
692+#specify that particles undergo elastic repulsion
693+#from the front side wall:
694+sim.createInteractionGroup (
695+ NRotElasticWallPrms (
696+ name = "fw_repel",
697+ wallName = "front_wall",
698+ normalK = 1.0e6
699+ )
700+)
701+
702+
703+#specify that particles undergo elastic repulsion
704+#from the bottom side wall:
705+sim.createInteractionGroup (
706+ NRotElasticWallPrms (
707+ name = "btw_repel",
708+ wallName = "bottom_wall",
709+ normalK = 1.0e6
710+ )
711+)
712+
713+#add a CheckPointer to store simulation data
714+sim.createCheckPointer (
715+ CheckPointPrms (
716+ fileNamePrefix = "results/snap",
717+ beginTimeStep=0,
718+ endTimeStep=500000,
719+ timeStepIncr=100000
720+ )
721+)
722+
723+#create a FieldSaver to store distributed pore pressures
724+sim.createFieldSaver (
725+ FluidScalarFieldSaverPrms(
726+ fieldName="disP",
727+ fileName="results/Pressure",
728+ fileFormat="VTI",
729+ beginTimeStep=0,
730+ endTimeStep=500000,
731+ timeStepIncr=100000
732+ )
733+)
734+
735+#create a FieldSaver to store porosities
736+sim.createFieldSaver (
737+ FluidScalarFieldSaverPrms(
738+ fieldName="Phi",
739+ fileName="results/Porosity",
740+ fileFormat="VTI",
741+ beginTimeStep=0,
742+ endTimeStep=500000,
743+ timeStepIncr=100000
744+ )
745+)
746+
747+#create a FieldSaver to store permeabilities
748+sim.createFieldSaver (
749+ FluidScalarFieldSaverPrms(
750+ fieldName="K",
751+ fileName="results/Permeability",
752+ fileFormat="VTI",
753+ beginTimeStep=0,
754+ endTimeStep=500000,
755+ timeStepIncr=100000
756+ )
757+)
758+
759+#Execute the simulation:
760+sim.run()
761
762=== modified file 'Fields/FieldMaster.cpp'
763--- Fields/FieldMaster.cpp 2017-01-04 08:26:43 +0000
764+++ Fields/FieldMaster.cpp 2019-07-28 14:55:57 +0000
765@@ -68,6 +68,18 @@
766 m_write_type=WRITE_TYPE_RAW_WITH_ID;
767 }else if(savetype=="RAW_WITH_POS_ID"){
768 m_write_type=WRITE_TYPE_RAW_WITH_POS_ID;
769+ /****fluid contents: begin****/
770+ }else if(savetype=="RAW_WITH_POS"){
771+ m_write_type=WRITE_TYPE_RAW_WITH_POS;
772+ }else if(savetype=="RAW_WITH_ID_POS"){
773+ m_write_type=WRITE_TYPE_RAW_WITH_ID_POS;
774+ }else if(savetype=="RAW_WITH_PARTICLE"){
775+ m_write_type=WRITE_TYPE_RAW_WITH_PARTICLE;
776+ }else if(savetype=="VTI"){
777+ m_write_type=WRITE_TYPE_VTI;
778+ }else if(savetype=="VTU"){
779+ m_write_type=WRITE_TYPE_VTU;
780+ /****fluid contents: end****/
781 } else {
782 cerr
783 << "AFieldMaster: unknown output file format '"
784@@ -99,9 +111,16 @@
785 case WRITE_TYPE_RAW : suffix=".dat"; break;
786 case WRITE_TYPE_RAW_WITH_ID : suffix=".dat"; break;
787 case WRITE_TYPE_RAW_WITH_POS_ID : suffix=".dat"; break;
788+ /****fluid contents: begin****/
789+ case WRITE_TYPE_RAW_WITH_POS: suffix=".dat"; break;
790+ case WRITE_TYPE_RAW_WITH_ID_POS : suffix=".dat"; break;
791+ case WRITE_TYPE_RAW_WITH_PARTICLE : suffix=".dat"; break;
792+ case WRITE_TYPE_VTI : suffix=".vti"; break;
793+ case WRITE_TYPE_VTU : suffix=".vtu"; break;
794+ /****fluid contents: end****/
795 default : cerr << "AFieldMaster: wrong m_write_type in makeFilename" << endl; // can't happen
796 }
797- fn << m_file_name << "." << m_save_count << suffix;
798+ fn << m_file_name << "." << m_save_count*m_dt << suffix;
799 m_save_count++;
800
801 return fn.str();
802@@ -149,6 +168,13 @@
803 case WRITE_TYPE_RAW2 : writeAsRAW2(); break;
804 case WRITE_TYPE_RAW_WITH_ID : writeAsRawWithID(); break;
805 case WRITE_TYPE_RAW_WITH_POS_ID : writeAsRawWithPosID(); break;
806- default : cerr << "AFieldMaster: wrong m_write_type in write" << endl;
807+ /***fluid contents:begin***/
808+ case WRITE_TYPE_RAW_WITH_POS : writeAsRawWithPos(); break;
809+ case WRITE_TYPE_RAW_WITH_ID_POS : writeAsRawWithIDPos(); break;
810+ case WRITE_TYPE_RAW_WITH_PARTICLE : writeAsRawWithParticle(); break;
811+ case WRITE_TYPE_VTI : writeAsVTI(); break;
812+ case WRITE_TYPE_VTU : writeAsVTU(); break;
813+ /***fluid contents:end***/
814+ default : cerr << "AFieldMaster: wrong m_write_type in write" << endl;
815 }
816 }
817
818=== modified file 'Fields/FieldMaster.h'
819--- Fields/FieldMaster.h 2017-01-04 08:26:43 +0000
820+++ Fields/FieldMaster.h 2019-07-28 14:55:57 +0000
821@@ -40,7 +40,7 @@
822 {
823 private:
824 static int s_field_count;
825-
826+
827 protected:
828 TML_Comm *m_comm;
829 string m_field_name;
830@@ -64,6 +64,13 @@
831 virtual void writeAsRAW(){console.Error()<<"writeAsRAW NOT IMPLEMENTED\n";};
832 virtual void writeAsRawWithID(){console.Error()<<"writeAsRawWithID NOT IMPLEMENTED\n";};
833 virtual void writeAsRawWithPosID(){console.Error()<<"writeAsRawWithPosID NOT IMPLEMENTED\n";};
834+ /****fluid contents: begin***/
835+ virtual void writeAsRawWithPos(){console.Error()<<"writeAsRawWithPosID NOT IMPLEMENTED\n";};
836+ virtual void writeAsRawWithIDPos(){console.Error()<<"writeAsRawWithPosID NOT IMPLEMENTED\n";};
837+ virtual void writeAsRawWithParticle(){console.Error()<<"writeAsRawWithPosID NOT IMPLEMENTED\n";};
838+ virtual void writeAsVTI(){console.Error()<<"writeAsVTI NOT IMPLEMENTED\n";};
839+ virtual void writeAsVTU(){console.Error()<<"writeAsVTU NOT IMPLEMENTED\n";};
840+ /****fluid contents: end***/
841
842 public:
843 AFieldMaster(TML_Comm*,const string&,const string&,const string&,int,int,int);
844
845=== added file 'Fields/FluidFieldMaster.cpp'
846--- Fields/FluidFieldMaster.cpp 1970-01-01 00:00:00 +0000
847+++ Fields/FluidFieldMaster.cpp 2019-07-28 14:55:57 +0000
848@@ -0,0 +1,566 @@
849+/////////////////////////////////////////////////////////////
850+// //
851+// Copyright (c) 2003-2014 by The University of Queensland //
852+// Centre for Geoscience Computing //
853+// http://earth.uq.edu.au/centre-geoscience-computing //
854+// //
855+// Primary Business: Brisbane, Queensland, Australia //
856+// Licensed under the Open Software License version 3.0 //
857+// http://www.apache.org/licenses/LICENSE-2.0 //
858+// //
859+/////////////////////////////////////////////////////////////
860+
861+// --- TML includes ---
862+#include "tml/comm/comm.h"
863+
864+//--- project includes ---
865+#include "FieldMaster.h"
866+#include "Foundation/vec3.h"
867+#include "field_const.h"
868+#include "FluidFieldMaster.h"
869+
870+// --- TML includes ---
871+#include "tml/comm/comm.h"
872+
873+//--- IO includes ---
874+#include <iostream>
875+#include <fstream>
876+#include <algorithm>
877+
878+//--- STL inculdes ---
879+#include <string>
880+#include <map>
881+
882+
883+using std::map;
884+using std::multimap;
885+using std::cout;
886+using std::endl;
887+using std::ofstream;
888+using std::string;
889+
890+//==== SCALAR PFM ===
891+
892+/*!
893+ Constructor. Set up the Master and broadcast parameters to the slaves.
894+
895+ \param comm the communicator
896+ \param fieldname the name of the field to be saved
897+ \param filename the name of the file to be saved into or the base for the generation of the filenames if the saving format requires multiple files
898+ \param savetype the way to save data, currently supported are DX,SUM
899+ \param t0 the first timestep to be saved
900+ \param tend the last timestep to be saved
901+ \param dt the interval between timesteps to be saving
902+*/
903+ScalarFluidFieldMaster::ScalarFluidFieldMaster(TML_Comm* comm,const string& fieldname,const string& filename,const string& savetype,int t0,int tend,int dt)
904+ :AFieldMaster(comm,fieldname,filename,savetype,t0,tend,dt)
905+{
906+ m_comm->broadcast_cont(fieldname);
907+ m_comm->broadcast(m_id);
908+}
909+
910+
911+void ScalarFluidFieldMaster::collect()
912+{
913+ // send field id to slave
914+ m_comm->broadcast(m_id);
915+
916+ if((m_write_type==WRITE_TYPE_SUM)||(m_write_type==WRITE_TYPE_MAX)) {
917+ collectSum();
918+ } else {
919+ collectFull();
920+ }
921+}
922+
923+/*!
924+ collect the full set of data, i.e. value, and posn of fluid cells
925+*/
926+void ScalarFluidFieldMaster::collectFull()
927+{
928+ multimap<int,pair<Vec3,double> > temp_mm;
929+
930+ // send type of collect to slaves
931+ int coll_type=1;
932+ m_comm->broadcast(coll_type);
933+
934+ // get data from slaves
935+ m_comm->gather(temp_mm);
936+
937+ // collate receive data from multimap into single map
938+ for(multimap<int,pair<Vec3,double> >::iterator iter=temp_mm.begin();
939+ iter!=temp_mm.end();
940+ iter++){
941+ m_save_vector.push_back(iter->second);
942+ }
943+}
944+
945+
946+/*!
947+ collect sum of values only
948+*/
949+void ScalarFluidFieldMaster::collectSum()
950+{
951+
952+ multimap<int,double> temp_mm;
953+
954+ // send type of collect to slaves
955+ m_comm->broadcast(m_write_type);
956+
957+ // get data from slaves
958+ m_comm->gather(temp_mm);
959+
960+ // collate receive data from multimap into single map
961+ for(multimap<int,double>::iterator iter=temp_mm.begin();
962+ iter!=temp_mm.end();
963+ iter++){
964+ m_sum_vector.push_back(iter->second);
965+ }
966+}
967+
968+
969+/*!
970+ sum data and write them out into a single continuous file
971+
972+ \warning n
973+*/
974+void ScalarFluidFieldMaster::writeAsSUM()
975+{
976+
977+ // sum data
978+ double sum_data=0.0;
979+ for(vector<double>::iterator iter=m_sum_vector.begin();
980+ iter!=m_sum_vector.end();
981+ iter++){
982+ sum_data+=*iter;
983+ }
984+ // open file for appending
985+ ofstream out_file(m_file_name.c_str(),ios::app);
986+ // write data
987+ out_file << sum_data << endl;
988+ // close file
989+ out_file.close();
990+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
991+ m_sum_vector.erase(m_sum_vector.begin(),m_sum_vector.end());
992+}
993+
994+/*!
995+ find max datum and write it out into a single continuous file
996+*/
997+void ScalarFluidFieldMaster::writeAsMAX()
998+{
999+
1000+ // get max data
1001+ double max_data=*m_sum_vector.begin();
1002+ for(vector<double>::iterator iter=m_sum_vector.begin();
1003+ iter!=m_sum_vector.end();
1004+ iter++){
1005+ max_data=(*iter > max_data) ? *iter : max_data;
1006+ }
1007+ // open file for appending
1008+ ofstream out_file(m_file_name.c_str(),ios::app);
1009+ // write data
1010+ out_file << max_data << endl;
1011+ // close file
1012+ out_file.close();
1013+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1014+ m_sum_vector.erase(m_sum_vector.begin(),m_sum_vector.end());
1015+}
1016+
1017+/*!
1018+ write data as a raw series of values, one row of values per time step,
1019+ all timesteps into the same file
1020+*/
1021+void ScalarFluidFieldMaster::writeAsRAW_SERIES()
1022+{
1023+
1024+ // open file
1025+ ofstream out_file(m_file_name.c_str(),ios::app);
1026+
1027+ // write data
1028+ for(vector<pair<Vec3,double> >::iterator iter=m_save_vector.begin();
1029+ iter!=m_save_vector.end();
1030+ iter++){
1031+ out_file << iter->first << " " << iter->second << endl;
1032+ }
1033+
1034+ out_file << endl;
1035+ out_file.close();
1036+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1037+ m_sum_vector.erase(m_sum_vector.begin(),m_sum_vector.end());
1038+}
1039+
1040+/*!
1041+ write data as raw position, value, one time step per file
1042+*/
1043+
1044+void ScalarFluidFieldMaster::writeAsRAW()
1045+{
1046+
1047+ //generate filename
1048+ string fn=makeFilename();
1049+
1050+ // open file
1051+ ofstream out_file(fn.c_str());
1052+
1053+ // write data
1054+
1055+ for(vector<pair<Vec3,double> >::iterator iter=m_save_vector.begin();
1056+ iter!=m_save_vector.end();
1057+ iter++){
1058+ out_file << iter->first << " " << iter->second << endl;
1059+ }
1060+
1061+ // close file
1062+ out_file.close();
1063+
1064+ // clean up
1065+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1066+ m_sum_vector.erase(m_sum_vector.begin(),m_sum_vector.end());
1067+}
1068+
1069+
1070+bool sortOnZ(const pair<Vec3,double> a, const pair<Vec3,double> b) {
1071+ Vec3 a_index=a.first;
1072+ Vec3 b_index=b.first;
1073+ return a_index.Z() < b_index.Z();
1074+}
1075+
1076+bool sortOnY(const pair<Vec3,double> a, const pair<Vec3,double> b) {
1077+ Vec3 a_index=a.first;
1078+ Vec3 b_index=b.first;
1079+ return a_index.Y() < b_index.Y();
1080+}
1081+
1082+bool sortOnX(const pair<Vec3,double> a, const pair<Vec3,double> b) {
1083+ Vec3 a_index=a.first;
1084+ Vec3 b_index=b.first;
1085+ return a_index.X() < b_index.X();
1086+}
1087+
1088+vector<pair<Vec3,double> > sortVector(vector<pair<Vec3,double> > unsorted)
1089+{
1090+ vector<pair<Vec3,double> > sorted = unsorted;
1091+ std::stable_sort(sorted.begin(), sorted.end(), sortOnX);
1092+ std::stable_sort(sorted.begin(), sorted.end(), sortOnY);
1093+ std::stable_sort(sorted.begin(), sorted.end(), sortOnZ);
1094+ return sorted;
1095+}
1096+
1097+
1098+/*!
1099+ write data as VTI file for paraview, one time step per file
1100+*/
1101+
1102+void ScalarFluidFieldMaster::writeAsVTI()
1103+{
1104+
1105+ //generate filename
1106+ string fn=makeFilename();
1107+
1108+ // open file
1109+ ofstream out_file(fn.c_str());
1110+
1111+ vector<pair<Vec3,double> > sorted;
1112+ sorted=sortVector(m_save_vector);
1113+ double minX,minY,minZ,maxX,maxY,maxZ;
1114+ vector<pair<Vec3,double> >::iterator iter_begin=sorted.begin();
1115+ Vec3 pos_begin=iter_begin->first;
1116+ minX=pos_begin.X();minY=pos_begin.Y();minZ=pos_begin.Z();
1117+ vector<pair<Vec3,double> >::iterator iter_end=sorted.end()-1;
1118+ Vec3 pos_end=iter_end->first;
1119+ maxX=pos_end.X();maxY=pos_end.Y();maxZ=pos_end.Z();
1120+
1121+ int z_count=0;
1122+ for(vector<pair<Vec3,double> >::iterator iter0=sorted.begin();iter0!=sorted.end();iter0++){
1123+ Vec3 pos=iter0->first;
1124+ if(pos.Z()==minZ) {z_count++;}
1125+ else {break;};
1126+ }
1127+ int nz=sorted.size()/z_count;
1128+
1129+ int y_count=0;
1130+ for(vector<pair<Vec3,double> >::iterator iter0=sorted.begin();iter0!=sorted.end();iter0++){
1131+ Vec3 pos=iter0->first;
1132+ if(pos.Y()==minY) {y_count++;}
1133+ else {break;};
1134+ }
1135+ int ny=z_count/y_count;
1136+
1137+ int x_count=0;
1138+ for(vector<pair<Vec3,double> >::iterator iter0=sorted.begin();iter0!=sorted.end();iter0++){
1139+ Vec3 pos=iter0->first;
1140+ if(pos.X()==minX) {x_count++;}
1141+ else {break;};
1142+ }
1143+ int nx=y_count/x_count;
1144+
1145+ double xside,yside,zside;
1146+ if(nx==1){
1147+ xside=maxX-minX;
1148+ }else{
1149+ xside=(maxX-minX)/double(nx-1);
1150+ }
1151+
1152+ if(ny==1){
1153+ yside=maxY-minY;
1154+ }else{
1155+ yside=(maxY-minY)/double(ny-1);
1156+ }
1157+ if(nz==1){
1158+ zside=maxZ-minZ;
1159+ }else{
1160+ zside=(maxZ-minZ)/double(nz-1);
1161+ }
1162+
1163+ out_file<<"<VTKFile type=\"ImageData\" version=\"1.0\" byte_order=\"LittleEndian\" header_type=\"UInt64\">"<< endl;
1164+ out_file<<"<ImageData WholeExtent=\"0 "<<nx<<" 0 "<<ny<<" 0 " <<nz<<"\" Origin=\""<<minX-0.5*xside<<" "<<minY-0.5*yside<<" "<<minZ-0.5*zside<<"\" Spacing=\""\
1165+ <<xside<<" "<<yside<<" "<<zside<<"\">" << endl;
1166+ out_file << "<Piece Extent=\"0 "<<nx<<" 0 "<<ny<<" 0 "<<nz<<"\">" << endl;
1167+ out_file << "<CellData>" << endl;
1168+ out_file << "<DataArray type=\"Float32\" Name=\"fluid scalar data\" format=\"ascii\">" << endl;
1169+
1170+ vector<pair<Vec3,double> >::iterator iter=sorted.begin();
1171+ for (int z=0;z<nz;z++){
1172+ for (int y=0;y<ny;y++){
1173+ for (int x=0;x<nx;x++){
1174+ out_file << iter->second << " ";
1175+ iter++;
1176+ };
1177+ out_file<<endl;
1178+ };
1179+ out_file<<endl;
1180+ };
1181+
1182+ out_file << "</DataArray>" << endl <<endl;
1183+ out_file << "</CellData>" << endl;
1184+ out_file << "</Piece>" << endl;
1185+ out_file << "</ImageData>" << endl;
1186+ out_file << "</VTKFile>" << endl;
1187+
1188+ // close file
1189+ out_file.close();
1190+
1191+ // clean up
1192+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1193+ m_sum_vector.erase(m_sum_vector.begin(),m_sum_vector.end());
1194+}
1195+
1196+
1197+// === VECTOR PFM ===
1198+/*!
1199+ Constructor. Set up the Master and broadcast parameters to the slaves.
1200+
1201+ \param comm the communicator
1202+ \param fieldname the name of the field to be saved
1203+ \param filename the name of the file to be saved into or the base for the generation of the filenames if the saving format requires multiple files
1204+ \param savetype the way to save data, currently supported are DX,SUM
1205+ \param t0 the first timestep to be saved
1206+ \param tend the last timestep to be saved
1207+ \param dt the interval between timesteps to be saving
1208+*/
1209+VectorFluidFieldMaster::VectorFluidFieldMaster(TML_Comm* comm,const string& fieldname,const string& filename,const string& savetype,int t0,int tend,int dt)
1210+ :AFieldMaster(comm,fieldname,filename,savetype,t0,tend,dt)
1211+{
1212+ m_comm->broadcast_cont(fieldname);
1213+ m_comm->broadcast(m_id);
1214+}
1215+
1216+void VectorFluidFieldMaster::collect()
1217+{
1218+ multimap<int,pair<Vec3,Vec3> > temp_mm;
1219+
1220+ // send field id to slaves
1221+ m_comm->broadcast(m_id);
1222+
1223+ //receive data from fields
1224+ m_comm->gather(temp_mm);
1225+
1226+ // collate receive data from multimap into single map
1227+ for(multimap<int,pair<Vec3,Vec3> >::iterator iter=temp_mm.begin();
1228+ iter!=temp_mm.end();
1229+ iter++){
1230+ m_save_vector.push_back(iter->second);
1231+ }
1232+}
1233+
1234+
1235+/*!
1236+ sum data and write them out into a single continuous file
1237+
1238+ \warning untested
1239+*/
1240+void VectorFluidFieldMaster::writeAsSUM()
1241+{
1242+ Vec3 sum_data=Vec3(0.0,0.0,0.0);
1243+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1244+ iter!=m_save_vector.end();
1245+ iter++){
1246+ sum_data+=iter->second;
1247+ }
1248+ // open file for appending
1249+ ofstream out_file(m_file_name.c_str(),ios::app);
1250+ // write data
1251+ out_file << sum_data << endl;
1252+ // close file
1253+ out_file.close();
1254+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1255+}
1256+
1257+/*!
1258+ get max datum and write it out into a single continuous file
1259+
1260+ \warning untested
1261+*/
1262+void VectorFluidFieldMaster::writeAsMAX()
1263+{
1264+ Vec3 max_data=(m_save_vector.begin())->second;
1265+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1266+ iter!=m_save_vector.end();
1267+ iter++){
1268+ max_data=(iter->second.norm2() > max_data.norm2()) ? iter->second : max_data;
1269+ }
1270+ // open file for appending
1271+ ofstream out_file(m_file_name.c_str(),ios::app);
1272+ // write data
1273+ out_file << max_data << endl;
1274+ // close file
1275+ out_file.close();
1276+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1277+}
1278+
1279+
1280+/*!
1281+ write data as a raw series of values, one row of values per time step,
1282+ all timesteps into the same file
1283+*/
1284+void VectorFluidFieldMaster::writeAsRAW_SERIES()
1285+{
1286+ // open file
1287+ ofstream out_file(m_file_name.c_str(),ios::app);
1288+
1289+ // write data
1290+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1291+ iter!=m_save_vector.end();
1292+ iter++){
1293+ out_file << iter->first << " " << iter->second << endl;
1294+ }
1295+ out_file << endl;
1296+ out_file.close();
1297+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1298+}
1299+
1300+/*!
1301+ write data as raw position, value pairs, one time step per file
1302+*/
1303+void VectorFluidFieldMaster::writeAsRAW()
1304+{
1305+ //generate filename
1306+ string fn=makeFilename();
1307+
1308+ // open file
1309+ ofstream out_file(fn.c_str());
1310+
1311+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1312+ iter!=m_save_vector.end();
1313+ iter++){
1314+ out_file << iter->first << " " << iter->second << endl;
1315+ }
1316+
1317+ // close file
1318+ out_file.close();
1319+
1320+ // clean up
1321+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1322+}
1323+
1324+/*!
1325+ write data as VTU file for paraview, position, value, one time step per file
1326+*/
1327+void VectorFluidFieldMaster::writeAsVTU()
1328+{
1329+ //generate filename
1330+ string fn=makeFilename();
1331+
1332+ // open file
1333+ ofstream out_file(fn.c_str());
1334+
1335+ int n=0;
1336+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1337+ iter!=m_save_vector.end();
1338+ iter++){
1339+ n++;
1340+ }
1341+ double xside,yside,zside;
1342+ Vec3 number0;
1343+ vector<pair<Vec3,Vec3> >::iterator iter0=m_save_vector.begin();
1344+ Vec3 pos0=iter0->first;
1345+
1346+ for(iter0=m_save_vector.begin();iter0!=m_save_vector.end();iter0++){
1347+ number0=iter0->first;
1348+ if(number0.X()!=pos0.X()){xside=fabs(number0.X()-pos0.X());break;};
1349+ }
1350+ for(iter0=m_save_vector.begin();iter0!=m_save_vector.end();iter0++){
1351+ number0=iter0->first;
1352+ if(number0.Y()!=pos0.Y()){yside=fabs(number0.Y()-pos0.Y());break;};
1353+ }
1354+ for(iter0=m_save_vector.begin();iter0!=m_save_vector.end();iter0++){
1355+ number0=iter0->first;
1356+ if(number0.Z()!=pos0.Z()){zside=fabs(number0.Z()-pos0.Z());break;};
1357+ }
1358+
1359+ double cellsize=(xside+yside+zside)/3.0;
1360+ out_file << "<VTKFile type=\"UnstructuredGrid\" version=\"0.1\">" << endl;
1361+ out_file << "<UnstructuredGrid>" << endl;
1362+ out_file << "<Piece NumberOfPoints=\"" << n << "\" NumberOfCells=\"" << 0 << "\">" << endl;
1363+
1364+ //write positions
1365+ out_file << "<Points>" << endl;
1366+ out_file << "<DataArray NumberOfComponents=\"3\" type=\"Float64\" format=\"ascii\">" << endl;
1367+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1368+ iter!=m_save_vector.end();
1369+ iter++){
1370+ out_file << iter->first<< endl;
1371+ }
1372+ out_file << "</DataArray>" << endl;
1373+ out_file << "</Points>" << endl;
1374+
1375+ //write cell size
1376+ out_file << "<PointData Scalars=\"cellsize\">" << endl;
1377+ out_file << "<DataArray type=\"Float64\" Name=\"cellsize\" NumberOfComponents=\"1\" format=\"ascii\">" << endl;
1378+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1379+ iter!=m_save_vector.end();
1380+ iter++){
1381+ out_file << cellsize << endl;
1382+ }
1383+ out_file << "</DataArray>" << endl;
1384+
1385+ //write vector value
1386+ out_file << "<DataArray type=\"Float64\" Name=\"vector value\" NumberOfComponents=\"3\" format=\"ascii\">" << endl;
1387+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_save_vector.begin();
1388+ iter!=m_save_vector.end();
1389+ iter++){
1390+ out_file << iter->second << endl;
1391+ }
1392+
1393+ out_file << "</DataArray>" << endl;
1394+ out_file << "</PointData>" << endl;
1395+ out_file << "<Cells>" << endl;
1396+ out_file << "<DataArray type=\"Int32\" NumberOfComponents=\"1\" Name=\"connectivity\" format=\"ascii\">" << endl;
1397+ out_file << "</DataArray>" << endl;
1398+ out_file << "<DataArray type=\"Int32\" NumberOfComponents=\"1\" Name=\"offsets\" format=\"ascii\">" << endl;
1399+ out_file << "</DataArray>" << endl;
1400+ out_file << "<DataArray type=\"UInt8\" NumberOfComponents=\"1\" Name=\"types\" format=\"ascii\">" << endl;
1401+ out_file << "</DataArray>" << endl;
1402+ out_file << "</Cells>" << endl;
1403+ out_file << "</Piece>" << endl;
1404+ out_file << "</UnstructuredGrid>" << endl;
1405+ out_file << "</VTKFile>" << endl;
1406+
1407+ // close file
1408+ out_file.close();
1409+
1410+ // clean up
1411+ m_save_vector.erase(m_save_vector.begin(),m_save_vector.end());
1412+}
1413+
1414+
1415
1416=== added file 'Fields/FluidFieldMaster.h'
1417--- Fields/FluidFieldMaster.h 1970-01-01 00:00:00 +0000
1418+++ Fields/FluidFieldMaster.h 2019-07-28 14:55:57 +0000
1419@@ -0,0 +1,106 @@
1420+/////////////////////////////////////////////////////////////
1421+// //
1422+// Copyright (c) 2003-2014 by The University of Queensland //
1423+// Centre for Geoscience Computing //
1424+// http://earth.uq.edu.au/centre-geoscience-computing //
1425+// //
1426+// Primary Business: Brisbane, Queensland, Australia //
1427+// Licensed under the Open Software License version 3.0 //
1428+// http://www.apache.org/licenses/LICENSE-2.0 //
1429+// //
1430+/////////////////////////////////////////////////////////////
1431+
1432+#ifndef __FLUIDFIELDMASTER_H
1433+#define __FLUIDFIELDMASTER_H
1434+
1435+//--- project includes ---
1436+#include "FieldMaster.h"
1437+#include "Foundation/vec3.h"
1438+#include "field_const.h"
1439+
1440+// --- TML includes ---
1441+#include "tml/comm/comm.h"
1442+
1443+//--- IO includes ---
1444+#include <iostream>
1445+#include <fstream>
1446+
1447+//--- STL inculdes ---
1448+#include <string>
1449+#include <map>
1450+
1451+
1452+using std::map;
1453+using std::multimap;
1454+using std::cout;
1455+using std::endl;
1456+using std::ofstream;
1457+using std::string;
1458+
1459+
1460+class TML_Comm;
1461+
1462+bool sortOnZ(const pair<Vec3,double>, const pair<Vec3,double>);
1463+bool sortOnY(const pair<Vec3,double>, const pair<Vec3,double>);
1464+bool sortOnX(const pair<Vec3,double>, const pair<Vec3,double>);
1465+vector<pair<Vec3,double> > sortVector(vector<pair<Vec3,double> >);
1466+
1467+/*!
1468+ \class ScalarFluidFieldMaster
1469+ \brief Class for master part of a scalar field which is defined on all fluid cells
1470+
1471+ \author Qi Shao
1472+ $Revision$
1473+ $Date$
1474+*/
1475+class ScalarFluidFieldMaster : public AFieldMaster
1476+{
1477+ protected:
1478+ vector<pair<Vec3,double> > m_save_vector;
1479+ vector<double> m_sum_vector;
1480+
1481+ void collectFull();
1482+ void collectSum();
1483+ virtual void writeAsSUM();
1484+ virtual void writeAsMAX();
1485+ virtual void writeAsRAW_SERIES();
1486+ virtual void writeAsRAW();
1487+ virtual void writeAsVTI();
1488+
1489+ public:
1490+ ScalarFluidFieldMaster(TML_Comm*,const string&,const string&,const string&,int,int,int);
1491+ virtual ~ScalarFluidFieldMaster(){};
1492+
1493+ virtual void collect();
1494+
1495+};
1496+
1497+/*!
1498+ \class VectorFluidFieldMaster
1499+ \brief Class for master part of a vector field which is defined on all fluid cells
1500+
1501+ \author Qi Shao
1502+ $Revision$
1503+ $Date$
1504+*/
1505+class VectorFluidFieldMaster : public AFieldMaster
1506+{
1507+ protected:
1508+ vector<pair<Vec3,Vec3> > m_save_vector;
1509+
1510+ virtual void writeAsSUM();
1511+ virtual void writeAsMAX();
1512+ virtual void writeAsRAW_SERIES();
1513+ virtual void writeAsRAW();
1514+ virtual void writeAsVTU();
1515+
1516+ public:
1517+ VectorFluidFieldMaster(TML_Comm*,const string&,const string&,const string&,int,int,int);
1518+
1519+ virtual ~VectorFluidFieldMaster(){};
1520+
1521+ void collect();
1522+};
1523+
1524+
1525+#endif //__FLUIDFIELDMASTER_H
1526
1527=== added file 'Fields/FluidInteractionFieldMaster.cpp'
1528--- Fields/FluidInteractionFieldMaster.cpp 1970-01-01 00:00:00 +0000
1529+++ Fields/FluidInteractionFieldMaster.cpp 2019-07-28 14:55:57 +0000
1530@@ -0,0 +1,695 @@
1531+/////////////////////////////////////////////////////////////
1532+// //
1533+// Copyright (c) 2003-2014 by The University of Queensland //
1534+// Centre for Geoscience Computing //
1535+// http://earth.uq.edu.au/centre-geoscience-computing //
1536+// //
1537+// Primary Business: Brisbane, Queensland, Australia //
1538+// Licensed under the Open Software License version 3.0 //
1539+// http://www.apache.org/licenses/LICENSE-2.0 //
1540+// //
1541+/////////////////////////////////////////////////////////////
1542+
1543+// --- TML includes ---
1544+#include "tml/comm/comm.h"
1545+
1546+// --- Project includes ---
1547+#include "FluidInteractionFieldMaster.h"
1548+#include "field_const.h"
1549+#include "console.h"
1550+#include "FieldMaster.h"
1551+#include "Foundation/vec3.h"
1552+#include "field_const.h"
1553+
1554+
1555+//--- IO includes ---
1556+#include <iostream>
1557+#include <fstream>
1558+
1559+//--- STL inculdes ---
1560+#include <string>
1561+#include <map>
1562+
1563+using std::cout;
1564+using std::endl;
1565+using std::ofstream;
1566+using std::string;
1567+using std::multimap;
1568+
1569+
1570+//==== SCALAR PFM ===
1571+/*!
1572+ Constructor. Setup master and broadcast parameters to slaves
1573+
1574+ \param comm the communicator
1575+ \param fieldname the name of the field to be saved
1576+ \param filename the name of the file to be saved into or the base for the generation of the filenames if the saving format requires multiple files
1577+ \param savetype the way to save data, currently supported are DX,SUM
1578+ \param t0 the first timestep to be saved
1579+ \param tend the last timestep to be saved
1580+ \param dt the interval between timesteps to be saving
1581+ \param checked choose between "full" and "checked" fields
1582+*/
1583+ScalarFluidInteractionFieldMaster::ScalarFluidInteractionFieldMaster(TML_Comm* comm,const string& fieldname,const string& filename,const string& savetype,int t0,int tend,int dt)
1584+ :AFieldMaster(comm,fieldname,filename,savetype,t0,tend,dt)
1585+{
1586+ m_comm->broadcast_cont(fieldname);
1587+ m_comm->broadcast(m_id);
1588+}
1589+
1590+
1591+void ScalarFluidInteractionFieldMaster::collect()
1592+{
1593+ // send field id to slave
1594+ m_comm->broadcast(m_id);
1595+
1596+ switch(m_write_type){
1597+ case WRITE_TYPE_SUM: collectSum(); break;
1598+ case WRITE_TYPE_MAX: collectMax(); break;
1599+ case WRITE_TYPE_RAW_WITH_POS: collectFullwithPos(); break;
1600+ case WRITE_TYPE_RAW_WITH_ID: collectFullwithID(); break;
1601+ case WRITE_TYPE_RAW_WITH_ID_POS: collectFullwithIDPos(); break;
1602+ case WRITE_TYPE_RAW_WITH_PARTICLE: collectFullwithParticle(); break;
1603+
1604+ default: collectFullwithParticle();
1605+ }
1606+}
1607+
1608+
1609+/*!
1610+ collect full data set, particle position
1611+*/
1612+void ScalarFluidInteractionFieldMaster::collectFullwithPos()
1613+{
1614+ multimap<int,pair<Vec3,double> > temp_mm;
1615+
1616+ // send type of collect to slaves
1617+ int coll_type=COLL_TYPE_FULL_WITH_POS;
1618+ m_comm->broadcast(coll_type);
1619+
1620+ // get data from slaves
1621+ m_comm->gather(temp_mm);
1622+ // collate receive data from multimap into single map
1623+ for(multimap<int,pair<Vec3,double> >::iterator iter=temp_mm.begin();
1624+ iter!=temp_mm.end();
1625+ iter++){
1626+ m_data_with_pos.push_back(iter->second);
1627+ }
1628+}
1629+
1630+/*!
1631+ collect full data set, particle id
1632+*/
1633+void ScalarFluidInteractionFieldMaster::collectFullwithID()
1634+{
1635+ multimap<int,pair<int,double> > temp_mm;
1636+
1637+ // send type of collect to slaves
1638+ int coll_type=COLL_TYPE_FULL_WITH_ID;
1639+ m_comm->broadcast(coll_type);
1640+
1641+ // get data from slaves
1642+ m_comm->gather(temp_mm);
1643+ // collate receive data from multimap into single map
1644+ for(multimap<int,pair<int,double> >::iterator iter=temp_mm.begin();
1645+ iter!=temp_mm.end();
1646+ iter++){
1647+ m_data_with_id.push_back(iter->second);
1648+ }
1649+}
1650+
1651+/*!
1652+ collect full data set, particle id and position
1653+*/
1654+void ScalarFluidInteractionFieldMaster::collectFullwithIDPos()
1655+{
1656+ multimap<int,pair<pair<int,Vec3>,double> > temp_mm;
1657+
1658+ // send type of collect to slaves
1659+ int coll_type=COLL_TYPE_FULL_WITH_ID_POS;
1660+ m_comm->broadcast(coll_type);
1661+
1662+ // get data from slaves
1663+ m_comm->gather(temp_mm);
1664+ // collate receive data from multimap into single map
1665+ for(multimap<int,pair<pair<int,Vec3>,double> >::iterator iter=temp_mm.begin();
1666+ iter!=temp_mm.end();
1667+ iter++){
1668+ m_data_with_id_pos.push_back(iter->second);
1669+ }
1670+}
1671+
1672+/*!
1673+ collect full data set, particle id, position and radius
1674+*/
1675+void ScalarFluidInteractionFieldMaster::collectFullwithParticle()
1676+{
1677+ multimap<int,pair<esys::lsm::triplet<int,Vec3,double>,double> > temp_mm;
1678+
1679+ // send type of collect to slaves
1680+ int coll_type=COLL_TYPE_FULL_WITH_PARTICLE;
1681+ m_comm->broadcast(coll_type);
1682+
1683+ // get data from slaves
1684+ m_comm->gather(temp_mm);
1685+ // collate receive data from multimap into single map
1686+ for(multimap<int,pair<esys::lsm::triplet<int,Vec3,double>,double> >::iterator iter=temp_mm.begin();
1687+ iter!=temp_mm.end();
1688+ iter++){
1689+ m_data_with_particle.push_back(iter->second);
1690+ }
1691+}
1692+
1693+/*!
1694+ collect sum of values only
1695+*/
1696+void ScalarFluidInteractionFieldMaster::collectSum()
1697+{
1698+ multimap<int,double> temp_mm;
1699+
1700+ // send type of collect to slaves
1701+ int coll_type=COLL_TYPE_SUM;
1702+ m_comm->broadcast(coll_type);
1703+
1704+ // get data from slaves
1705+ m_comm->gather(temp_mm);
1706+
1707+ // collate receive data from multimap into single map
1708+ for(multimap<int,double>::iterator iter=temp_mm.begin();
1709+ iter!=temp_mm.end();
1710+ iter++){
1711+ m_sum_vec.push_back(iter->second);
1712+ }
1713+}
1714+
1715+/*!
1716+ collect max of values only
1717+*/
1718+void ScalarFluidInteractionFieldMaster::collectMax()
1719+{
1720+ multimap<int,double> temp_mm;
1721+
1722+ // send type of collect to slaves
1723+ int coll_type=COLL_TYPE_MAX;
1724+ m_comm->broadcast(coll_type);
1725+
1726+ // get data from slaves
1727+ m_comm->gather(temp_mm);
1728+
1729+ // collate receive data from multimap into single map
1730+ for(multimap<int,double>::iterator iter=temp_mm.begin();
1731+ iter!=temp_mm.end();
1732+ iter++){
1733+ m_max_vec.push_back(iter->second);
1734+ }
1735+}
1736+
1737+
1738+/*!
1739+ write data as pos,value groups
1740+*/
1741+void ScalarFluidInteractionFieldMaster::writeAsRawWithPos()
1742+{
1743+ //generate filename
1744+ string fn=makeFilename();
1745+
1746+ // write data
1747+ ofstream out_file(fn.c_str());
1748+ // check if file is sucessfully opened
1749+ if(!out_file){
1750+ console.Error() << "can not open output file " << fn << "\n";
1751+ } else {
1752+ int count=0;
1753+ console.XDebug() << m_data_with_pos.size() << " vectors to be written\n";
1754+ for(vector<pair<Vec3,double> >::iterator iter=m_data_with_pos.begin();
1755+ iter!=m_data_with_pos.end();
1756+ iter++) {
1757+ out_file << iter->first << " " << iter->second << endl;
1758+ count++;
1759+ // write debug message every 10k writes
1760+ if((count % 10000)==0){
1761+ console.XDebug() << count << " vectors written\n";
1762+ }
1763+ }
1764+ console.XDebug() << "finished writing " << count << " vectors \n";
1765+ // close file
1766+ out_file.close();
1767+ }
1768+ //clean up
1769+ m_data_with_pos.clear();
1770+}
1771+
1772+/*!
1773+ write data as id,value groups
1774+*/
1775+void ScalarFluidInteractionFieldMaster::writeAsRawWithID()
1776+{
1777+ //generate filename
1778+ string fn=makeFilename();
1779+
1780+ // write data
1781+ ofstream out_file(fn.c_str());
1782+ // check if file is sucessfully opened
1783+ if(!out_file){
1784+ console.Error() << "can not open output file " << fn << "\n";
1785+ } else {
1786+ int count=0;
1787+ console.XDebug() << m_data_with_id.size() << " vectors to be written\n";
1788+ for(vector<pair<int,double> >::iterator iter=m_data_with_id.begin();
1789+ iter!=m_data_with_id.end();
1790+ iter++) {
1791+ out_file << iter->first << " " << iter->second << endl;
1792+ count++;
1793+ // write debug message every 10k writes
1794+ if((count % 10000)==0){
1795+ console.XDebug() << count << " vectors written\n";
1796+ }
1797+ }
1798+ console.XDebug() << "finished writing " << count << " vectors \n";
1799+ // close file
1800+ out_file.close();
1801+ }
1802+ //clean up
1803+ m_data_with_id.clear();
1804+}
1805+
1806+/*!
1807+ write data as id,pos,value groups
1808+*/
1809+void ScalarFluidInteractionFieldMaster::writeAsRawWithIDPos()
1810+{
1811+ cerr << "ScalarFluidInteractionFieldMaster::writeAsRawWithIDPos not implemented" << endl;
1812+}
1813+
1814+/*!
1815+ write data as id,pos,radius,value groups
1816+*/
1817+void ScalarFluidInteractionFieldMaster::writeAsRawWithParticle()
1818+{
1819+ //generate filename
1820+ string fn=makeFilename();
1821+
1822+ // write data
1823+ ofstream out_file(fn.c_str());
1824+ // check if file is sucessfully opened
1825+ if(!out_file){
1826+ console.Error() << "can not open output file " << fn << "\n";
1827+ } else {
1828+ int count=0;
1829+ console.XDebug() << m_data_with_particle.size() << " vectors to be written\n";
1830+ for(vector<pair<esys::lsm::triplet<int,Vec3,double>,double> >::iterator iter=m_data_with_particle.begin();
1831+ iter!=m_data_with_particle.end();
1832+ iter++) {
1833+ out_file
1834+ << (iter->first).get<0>()
1835+ << " "
1836+ << (iter->first).get<1>()
1837+ << " "
1838+ << (iter->first).get<2>()
1839+ << " "
1840+ << iter->second
1841+ << endl;
1842+ count++;
1843+ // write debug message every 10k writes
1844+ if((count % 10000)==0){
1845+ console.XDebug() << count << " vectors written\n";
1846+ }
1847+ }
1848+ console.XDebug() << "finished writing " << count << " vectors \n";
1849+ // close file
1850+ out_file.close();
1851+ }
1852+ //clean up
1853+ m_data_with_particle.clear();
1854+}
1855+
1856+
1857+/*!
1858+ sum data and write them out into a single continuous file
1859+*/
1860+void ScalarFluidInteractionFieldMaster::writeAsSUM()
1861+{
1862+ // sum data
1863+ double sum_data=0.0;
1864+ for(vector<double>::iterator iter=m_sum_vec.begin();
1865+ iter!=m_sum_vec.end();
1866+ iter++){
1867+ sum_data+=*iter;
1868+ }
1869+ // open file for appending
1870+ ofstream out_file(m_file_name.c_str(),ios::app);
1871+
1872+ // set output precision to 10 significant digits, scientific format
1873+ out_file.setf(ios_base::scientific, ios_base::floatfield);
1874+ out_file.precision(10);
1875+
1876+ // write data
1877+ out_file << sum_data << endl;
1878+
1879+ // close file
1880+ out_file.close();
1881+ //clean up
1882+ m_sum_vec.erase(m_sum_vec.begin(),m_sum_vec.end());
1883+
1884+}
1885+
1886+/*!
1887+ get the maximum of the data and write it out into a single continuous file
1888+*/
1889+void ScalarFluidInteractionFieldMaster::writeAsMAX()
1890+{
1891+ // sum data
1892+ double max_data=*(m_max_vec.begin());
1893+ for(vector<double>::iterator iter=m_max_vec.begin();
1894+ iter!=m_max_vec.end();
1895+ iter++){
1896+ max_data=(*iter > max_data) ? *iter : max_data;
1897+ }
1898+ // open file for appending
1899+ ofstream out_file(m_file_name.c_str(),ios::app);
1900+ // write data
1901+ out_file << max_data << endl;
1902+
1903+ // close file
1904+ out_file.close();
1905+ //clean up
1906+ m_max_vec.erase(m_max_vec.begin(),m_max_vec.end());
1907+
1908+}
1909+
1910+/*!
1911+ write data as a raw series of values, one row of values per time step,
1912+ all timesteps into the same file
1913+*/
1914+void ScalarFluidInteractionFieldMaster::writeAsRAW_SERIES()
1915+{
1916+ cerr << "ScalarFluidInteractionFieldMaster::writeAsRAW_SERIES not implemented" << endl;
1917+}
1918+
1919+
1920+
1921+
1922+
1923+// === VECTOR PFM ===
1924+/*!
1925+ Constructor. Setup master and broadcast parameters to slaves
1926+
1927+ \param comm the communicator
1928+ \param fieldname the name of the field to be saved
1929+ \param filename the name of the file to be saved into or the base for the generation of the filenames if the saving format requires multiple files
1930+ \param savetype the way to save data, currently supported are DX,SUM
1931+ \param t0 the first timestep to be saved
1932+ \param tend the last timestep to be saved
1933+ \param dt the interval between timesteps to be saving
1934+ \param checked choose between "full" and "checked" fields
1935+*/
1936+VectorFluidInteractionFieldMaster::VectorFluidInteractionFieldMaster(TML_Comm* comm,const string& fieldname,const string& filename,const string& savetype,int t0,int tend,int dt)
1937+ :AFieldMaster(comm,fieldname,filename,savetype,t0,tend,dt)
1938+{
1939+ m_comm->broadcast_cont(fieldname);
1940+ m_comm->broadcast(m_id);
1941+}
1942+
1943+
1944+void VectorFluidInteractionFieldMaster::collect()
1945+{
1946+ // send field id to slave
1947+ m_comm->broadcast(m_id);
1948+
1949+ switch(m_write_type){
1950+ case WRITE_TYPE_SUM: collectSum(); break;
1951+ //case WRITE_TYPE_MAX: collectMax(); break;
1952+ case WRITE_TYPE_RAW_WITH_POS: collectFullwithPos(); break;
1953+ case WRITE_TYPE_RAW_WITH_ID: collectFullwithID(); break;
1954+ case WRITE_TYPE_RAW_WITH_ID_POS: collectFullwithIDPos(); break;
1955+ case WRITE_TYPE_RAW_WITH_PARTICLE: collectFullwithParticle(); break;
1956+
1957+ default: collectFullwithParticle();
1958+ }
1959+}
1960+
1961+
1962+/*!
1963+ collect full data set, particle position
1964+*/
1965+void VectorFluidInteractionFieldMaster::collectFullwithPos()
1966+{
1967+ multimap<int,pair<Vec3,Vec3 > > temp_mm;
1968+
1969+ // send type of collect to slaves
1970+ int coll_type=COLL_TYPE_FULL_WITH_POS;
1971+ m_comm->broadcast(coll_type);
1972+
1973+ // get data from slaves
1974+ m_comm->gather(temp_mm);
1975+ // collate receive data from multimap into single map
1976+ for(multimap<int,pair<Vec3,Vec3> >::iterator iter=temp_mm.begin();
1977+ iter!=temp_mm.end();
1978+ iter++){
1979+ m_data_with_pos.push_back(iter->second);
1980+ }
1981+}
1982+
1983+/*!
1984+ collect full data set, particle id
1985+*/
1986+void VectorFluidInteractionFieldMaster::collectFullwithID()
1987+{
1988+ multimap<int,pair<int,Vec3> > temp_mm;
1989+
1990+ // send type of collect to slaves
1991+ int coll_type=COLL_TYPE_FULL_WITH_ID;
1992+ m_comm->broadcast(coll_type);
1993+
1994+ // get data from slaves
1995+ m_comm->gather(temp_mm);
1996+ // collate receive data from multimap into single map
1997+ for(multimap<int,pair<int,Vec3> >::iterator iter=temp_mm.begin();
1998+ iter!=temp_mm.end();
1999+ iter++){
2000+ m_data_with_id.push_back(iter->second);
2001+ }
2002+}
2003+
2004+/*!
2005+ collect full data set, particle id and position
2006+*/
2007+void VectorFluidInteractionFieldMaster::collectFullwithIDPos()
2008+{
2009+ multimap<int,pair<pair<int,Vec3>,Vec3> > temp_mm;
2010+
2011+ // send type of collect to slaves
2012+ int coll_type=COLL_TYPE_FULL_WITH_ID_POS;
2013+ m_comm->broadcast(coll_type);
2014+
2015+ // get data from slaves
2016+ m_comm->gather(temp_mm);
2017+ // collate receive data from multimap into single map
2018+ for(multimap<int,pair<pair<int,Vec3>,Vec3> >::iterator iter=temp_mm.begin();
2019+ iter!=temp_mm.end();
2020+ iter++){
2021+ m_data_with_id_pos.push_back(iter->second);
2022+ }
2023+}
2024+
2025+/*!
2026+ collect full data set, particle id, position and radius
2027+*/
2028+void VectorFluidInteractionFieldMaster::collectFullwithParticle()
2029+{
2030+ multimap<int,pair<esys::lsm::triplet<int,Vec3,double>,Vec3> > temp_mm;
2031+
2032+ // send type of collect to slaves
2033+ int coll_type=COLL_TYPE_FULL_WITH_PARTICLE;
2034+ m_comm->broadcast(coll_type);
2035+
2036+ // get data from slaves
2037+ m_comm->gather(temp_mm);
2038+ // collate receive data from multimap into single map
2039+ for(multimap<int,pair<esys::lsm::triplet<int,Vec3,double>,Vec3> >::iterator iter=temp_mm.begin();
2040+ iter!=temp_mm.end();
2041+ iter++){
2042+ m_data_with_particle.push_back(iter->second);
2043+ }
2044+}
2045+
2046+/*!
2047+ collect sum of values only
2048+*/
2049+void VectorFluidInteractionFieldMaster::collectSum()
2050+{
2051+ multimap<int,Vec3> temp_mm;
2052+
2053+ // send type of collect to slaves
2054+ int coll_type=COLL_TYPE_SUM;
2055+ m_comm->broadcast(coll_type);
2056+
2057+ // get data from slaves
2058+ m_comm->gather(temp_mm);
2059+
2060+ // collate receive data from multimap into single map
2061+ for(multimap<int,Vec3>::iterator iter=temp_mm.begin();
2062+ iter!=temp_mm.end();
2063+ iter++){
2064+ m_sum_vec.push_back(iter->second);
2065+ }
2066+}
2067+
2068+
2069+/*!
2070+ write data as pos,value groups
2071+*/
2072+void VectorFluidInteractionFieldMaster::writeAsRawWithPos()
2073+{
2074+ //generate filename
2075+ string fn=makeFilename();
2076+
2077+ // write data
2078+ ofstream out_file(fn.c_str());
2079+ // check if file is sucessfully opened
2080+ if(!out_file){
2081+ console.Error() << "can not open output file " << fn << "\n";
2082+ } else {
2083+ int count=0;
2084+ console.XDebug() << m_data_with_pos.size() << " vectors to be written\n";
2085+ for(vector<pair<Vec3,Vec3> >::iterator iter=m_data_with_pos.begin();
2086+ iter!=m_data_with_pos.end();
2087+ iter++) {
2088+ out_file << iter->first << " " << iter->second << endl;
2089+ count++;
2090+ // write debug message every 10k writes
2091+ if((count % 10000)==0){
2092+ console.XDebug() << count << " vectors written\n";
2093+ }
2094+ }
2095+ console.XDebug() << "finished writing " << count << " vectors \n";
2096+ // close file
2097+ out_file.close();
2098+ }
2099+ //clean up
2100+ m_data_with_pos.clear();
2101+}
2102+
2103+/*!
2104+ write data as id,value groups
2105+*/
2106+void VectorFluidInteractionFieldMaster::writeAsRawWithID()
2107+{
2108+ //generate filename
2109+ string fn=makeFilename();
2110+
2111+ // write data
2112+ ofstream out_file(fn.c_str());
2113+ // check if file is sucessfully opened
2114+ if(!out_file){
2115+ console.Error() << "can not open output file " << fn << "\n";
2116+ } else {
2117+ int count=0;
2118+ console.XDebug() << m_data_with_id.size() << " vectors to be written\n";
2119+ for(vector<pair<int,Vec3> >::iterator iter=m_data_with_id.begin();
2120+ iter!=m_data_with_id.end();
2121+ iter++) {
2122+ out_file << iter->first << " " << iter->second << endl;
2123+ count++;
2124+ // write debug message every 10k writes
2125+ if((count % 10000)==0){
2126+ console.XDebug() << count << " vectors written\n";
2127+ }
2128+ }
2129+ console.XDebug() << "finished writing " << count << " vectors \n";
2130+ // close file
2131+ out_file.close();
2132+ }
2133+ //clean up
2134+ m_data_with_id.clear();
2135+}
2136+
2137+/*!
2138+ write data as id,pos,value groups
2139+*/
2140+void VectorFluidInteractionFieldMaster::writeAsRawWithIDPos()
2141+{
2142+ cerr << "VectorFluidInteractionFieldMaster::writeAsRawWithIDPos not implemented" << endl;
2143+}
2144+
2145+/*!
2146+ write data as id,pos,radius,value groups
2147+*/
2148+void VectorFluidInteractionFieldMaster::writeAsRawWithParticle()
2149+{
2150+ //generate filename
2151+ string fn=makeFilename();
2152+
2153+ // write data
2154+ ofstream out_file(fn.c_str());
2155+ // check if file is sucessfully opened
2156+ if(!out_file){
2157+ console.Error() << "can not open output file " << fn << "\n";
2158+ } else {
2159+ int count=0;
2160+ console.XDebug() << m_data_with_particle.size() << " vectors to be written\n";
2161+ for(vector<pair<esys::lsm::triplet<int,Vec3,double>,Vec3> >::iterator iter=m_data_with_particle.begin();
2162+ iter!=m_data_with_particle.end();
2163+ iter++) {
2164+ out_file
2165+ << (iter->first).get<0>()
2166+ << " "
2167+ << (iter->first).get<1>()
2168+ << " "
2169+ << (iter->first).get<2>()
2170+ << " "
2171+ << iter->second
2172+ << endl;
2173+ count++;
2174+ // write debug message every 10k writes
2175+ if((count % 10000)==0){
2176+ console.XDebug() << count << " vectors written\n";
2177+ }
2178+ }
2179+ console.XDebug() << "finished writing " << count << " vectors \n";
2180+ // close file
2181+ out_file.close();
2182+ }
2183+ //clean up
2184+ m_data_with_particle.clear();
2185+}
2186+
2187+
2188+/*!
2189+ sum data and write them out into a single continuous file
2190+*/
2191+void VectorFluidInteractionFieldMaster::writeAsSUM()
2192+{
2193+ // sum data
2194+ Vec3 sum_data=Vec3(0.0,0.0,0.0);
2195+ for(vector<Vec3>::iterator iter=m_sum_vec.begin();
2196+ iter!=m_sum_vec.end();
2197+ iter++){
2198+ sum_data+=*iter;
2199+ }
2200+ // open file for appending
2201+ ofstream out_file(m_file_name.c_str(),ios::app);
2202+
2203+ // set output precision to 10 significant digits, scientific format
2204+ out_file.setf(ios_base::scientific, ios_base::floatfield);
2205+ out_file.precision(10);
2206+
2207+ // write data
2208+ out_file << sum_data << endl;
2209+
2210+ // close file
2211+ out_file.close();
2212+ //clean up
2213+ m_sum_vec.erase(m_sum_vec.begin(),m_sum_vec.end());
2214+
2215+}
2216+
2217+/*!
2218+ write data as a raw series of values, one row of values per time step,
2219+ all timesteps into the same file
2220+*/
2221+void VectorFluidInteractionFieldMaster::writeAsRAW_SERIES()
2222+{
2223+ cerr << "VectorFluidInteractionFieldMaster::writeAsRAW_SERIES not implemented" << endl;
2224+}
2225+
2226
2227=== added file 'Fields/FluidInteractionFieldMaster.h'
2228--- Fields/FluidInteractionFieldMaster.h 1970-01-01 00:00:00 +0000
2229+++ Fields/FluidInteractionFieldMaster.h 2019-07-28 14:55:57 +0000
2230@@ -0,0 +1,124 @@
2231+/////////////////////////////////////////////////////////////
2232+// //
2233+// Copyright (c) 2003-2014 by The University of Queensland //
2234+// Centre for Geoscience Computing //
2235+// http://earth.uq.edu.au/centre-geoscience-computing //
2236+// //
2237+// Primary Business: Brisbane, Queensland, Australia //
2238+// Licensed under the Open Software License version 3.0 //
2239+// http://www.apache.org/licenses/LICENSE-2.0 //
2240+// //
2241+/////////////////////////////////////////////////////////////
2242+
2243+#ifndef __FLUIDINTERACTIONFIELDMASTER_H
2244+#define __FLUIDINTERACTIONFIELDMASTER_H
2245+
2246+//--- project includes ---
2247+#include "FieldMaster.h"
2248+#include "Foundation/vec3.h"
2249+#include "Foundation/quintuple.h"
2250+#include "Foundation/triplet.h"
2251+#include "field_const.h"
2252+
2253+// --- TML includes ---
2254+#include "tml/comm/comm.h"
2255+
2256+//--- IO includes ---
2257+#include <iostream>
2258+#include <fstream>
2259+
2260+//--- STL includes ---
2261+#include <vector>
2262+#include <string>
2263+#include <map>
2264+
2265+using std::vector;
2266+using std::map;
2267+using std::multimap;
2268+using std::cout;
2269+using std::endl;
2270+using std::ofstream;
2271+using std::string;
2272+
2273+class TML_Comm;
2274+
2275+/*!
2276+ \class ScalarFluidInteractionFieldMaster
2277+ \brief Class for master part of a scalar field which is defined on all fluid interactions
2278+
2279+ \author Qi Shao
2280+ $Revision$
2281+ $Date$
2282+*/
2283+class ScalarFluidInteractionFieldMaster : public AFieldMaster
2284+{
2285+ protected:
2286+ vector<pair<Vec3,double> > m_data_with_pos; // vector of <position,value> pairs
2287+ vector<pair<int,double> > m_data_with_id;
2288+ vector<pair<pair<int,Vec3>,double> > m_data_with_id_pos;
2289+ vector<pair<esys::lsm::triplet<int,Vec3,double>,double> > m_data_with_particle;
2290+
2291+ vector<double> m_sum_vec;
2292+ vector<double> m_max_vec;
2293+
2294+ virtual void writeAsRawWithPos();
2295+ virtual void writeAsRawWithID();
2296+ virtual void writeAsRawWithIDPos();
2297+ virtual void writeAsRawWithParticle();
2298+ virtual void writeAsSUM();
2299+ virtual void writeAsMAX();
2300+ virtual void writeAsRAW_SERIES();
2301+
2302+ void collectFullwithPos();
2303+ void collectFullwithID();
2304+ void collectFullwithIDPos();
2305+ void collectFullwithParticle();
2306+ void collectSum();
2307+ void collectMax();
2308+
2309+ public:
2310+ ScalarFluidInteractionFieldMaster(TML_Comm*,const string&,const string&,const string&,int,int,int);
2311+ virtual ~ScalarFluidInteractionFieldMaster(){};
2312+
2313+ void collect();
2314+};
2315+
2316+
2317+/*!
2318+ \class VectorFluidInteractionFieldMaster
2319+ \brief Class for master part of a vector field which is defined on all fluid interactions
2320+
2321+ \author Qi Shao
2322+ $Revision$
2323+ $Date$
2324+*/
2325+class VectorFluidInteractionFieldMaster : public AFieldMaster
2326+{
2327+ protected:
2328+ vector<pair<Vec3,Vec3> > m_data_with_pos; // vector of <position,value> pairs
2329+ vector<pair<int,Vec3> > m_data_with_id;
2330+ vector<pair<pair<int,Vec3>,Vec3> > m_data_with_id_pos;
2331+ vector<pair<esys::lsm::triplet<int,Vec3,double>,Vec3> > m_data_with_particle;
2332+
2333+ vector<Vec3> m_sum_vec;
2334+
2335+ virtual void writeAsRawWithPos();
2336+ virtual void writeAsRawWithID();
2337+ virtual void writeAsRawWithIDPos();
2338+ virtual void writeAsRawWithParticle();
2339+ virtual void writeAsSUM();
2340+ virtual void writeAsRAW_SERIES();
2341+
2342+ void collectFullwithPos();
2343+ void collectFullwithID();
2344+ void collectFullwithIDPos();
2345+ void collectFullwithParticle();
2346+ void collectSum();
2347+
2348+ public:
2349+ VectorFluidInteractionFieldMaster(TML_Comm*,const string&,const string&,const string&,int,int,int);
2350+ virtual ~VectorFluidInteractionFieldMaster(){};
2351+
2352+ void collect();
2353+};
2354+#endif //__FlUIDINTERACTIONFIELDMASTER_H
2355
2356=== modified file 'Fields/Makefile.am'
2357--- Fields/Makefile.am 2017-01-04 08:26:43 +0000
2358+++ Fields/Makefile.am 2019-07-28 14:55:57 +0000
2359@@ -61,7 +61,19 @@
2360 VectorEdge2DFieldSlave.h \
2361 VectorEdge2DFieldSlave.cpp \
2362 MaxTrigger.cpp \
2363- MaxTrigger.h
2364+ MaxTrigger.h \
2365+ FluidFieldMaster.h \
2366+ FluidFieldMaster.cpp \
2367+ ScalarFluidFieldSlave.hpp \
2368+ ScalarFluidFieldSlave.h \
2369+ VectorFluidFieldSlave.hpp \
2370+ VectorFluidFieldSlave.h \
2371+ FluidInteractionFieldMaster.h \
2372+ FluidInteractionFieldMaster.cpp \
2373+ ScalarFluidInteractionFieldSlave.hpp \
2374+ ScalarFluidInteractionFieldSlave.h \
2375+ VectorFluidInteractionFieldSlave.hpp \
2376+ VectorFluidInteractionFieldSlave.h
2377
2378 libFields_la_LIBADD = \
2379 $(top_builddir)/Foundation/libFoundation.la \
2380
2381=== added file 'Fields/ScalarFluidFieldSlave.h'
2382--- Fields/ScalarFluidFieldSlave.h 1970-01-01 00:00:00 +0000
2383+++ Fields/ScalarFluidFieldSlave.h 2019-07-28 14:55:57 +0000
2384@@ -0,0 +1,51 @@
2385+/////////////////////////////////////////////////////////////
2386+// //
2387+// Copyright (c) 2003-2014 by The University of Queensland //
2388+// Centre for Geoscience Computing //
2389+// http://earth.uq.edu.au/centre-geoscience-computing //
2390+// //
2391+// Primary Business: Brisbane, Queensland, Australia //
2392+// Licensed under the Open Software License version 3.0 //
2393+// http://www.apache.org/licenses/LICENSE-2.0 //
2394+// //
2395+/////////////////////////////////////////////////////////////
2396+
2397+#ifndef __SCALAR_FLUID_FIELD_SLAVE_H
2398+#define __SCALAR_FLUID_FIELD_SLAVE_H
2399+
2400+// -- project includes --
2401+#include "FieldSlave.h"
2402+#include "Model/FluidCell.h"
2403+
2404+template <class T> class ParallelParticleArray;
2405+class TML_Comm;
2406+
2407+/*!
2408+ \class ScalarFluidFieldSlave
2409+ \brief class for slave part of scalar field defined on the fluid cells
2410+
2411+ \author Qi Shao
2412+ $Revision$
2413+ $Date$
2414+*/
2415+template <typename T>
2416+class ScalarFluidFieldSlave : public AFieldSlave
2417+{
2418+ private:
2419+ virtual void SendDataFull();
2420+ virtual void SendDataSum();
2421+ virtual void SendDataMax();
2422+
2423+ protected:
2424+ CFluidCell::ScalarFieldFunction m_rdf;
2425+ ParallelParticleArray<T>* m_ppa;
2426+
2427+ public:
2428+ ScalarFluidFieldSlave(TML_Comm*,ParallelParticleArray<T>*,CFluidCell::ScalarFieldFunction);
2429+
2430+ virtual void sendData();
2431+};
2432+
2433+#include "ScalarFluidFieldSlave.hpp"
2434+
2435+#endif //__SCALAR_FLUID_FIELD_SLAVE_H
2436
2437=== added file 'Fields/ScalarFluidFieldSlave.hpp'
2438--- Fields/ScalarFluidFieldSlave.hpp 1970-01-01 00:00:00 +0000
2439+++ Fields/ScalarFluidFieldSlave.hpp 2019-07-28 14:55:57 +0000
2440@@ -0,0 +1,121 @@
2441+/////////////////////////////////////////////////////////////
2442+// //
2443+// Copyright (c) 2003-2014 by The University of Queensland //
2444+// Centre for Geoscience Computing //
2445+// http://earth.uq.edu.au/centre-geoscience-computing //
2446+// //
2447+// Primary Business: Brisbane, Queensland, Australia //
2448+// Licensed under the Open Software License version 3.0 //
2449+// http://www.apache.org/licenses/LICENSE-2.0 //
2450+// //
2451+/////////////////////////////////////////////////////////////
2452+
2453+//-- STL includes --
2454+#include <vector>
2455+#include <utility>
2456+
2457+using std::vector;
2458+using std::pair;
2459+
2460+// -- IO includes --
2461+#include <iostream>
2462+
2463+using std::cout;
2464+using std::endl;
2465+
2466+/*!
2467+ constructor
2468+
2469+ \param comm the TML communicator used for sending the data back to the master
2470+ \param ppa a pointer to the particle array
2471+ \param rdf the fluid cell member function to access the data
2472+*/
2473+template <typename T>
2474+ScalarFluidFieldSlave<T>::ScalarFluidFieldSlave(TML_Comm* comm,ParallelParticleArray<T>* ppa,CFluidCell::ScalarFieldFunction rdf):AFieldSlave(comm)
2475+{
2476+ m_ppa=ppa;
2477+ m_rdf=rdf;
2478+}
2479+
2480+
2481+
2482+/*!
2483+ send full field data and position of the fluid cells
2484+*/
2485+template <typename T>
2486+void ScalarFluidFieldSlave<T>::SendDataFull()
2487+{
2488+ vector<pair<Vec3,double> > data_vec;
2489+
2490+ data_vec=m_ppa->forAllInnerCellsGet(m_rdf);
2491+
2492+ // send data to master
2493+ m_comm->send_gather(data_vec,0);
2494+}
2495+
2496+/*!
2497+ send sum only
2498+*/
2499+template <typename T>
2500+void ScalarFluidFieldSlave<T>::SendDataSum()
2501+{
2502+ vector<double> data_vec;
2503+
2504+ // get data from particles
2505+ data_vec=m_ppa->forAllInnerCellsGetSum(m_rdf);
2506+
2507+ // sum data
2508+ double sum=0.0;
2509+ for(vector<double>::iterator iter=data_vec.begin();
2510+ iter!=data_vec.end();
2511+ iter++){
2512+ sum+=*iter;
2513+ }
2514+
2515+ vector<double> sum_vec;
2516+ sum_vec.push_back(sum);
2517+ m_comm->send_gather(sum_vec,0);
2518+}
2519+
2520+
2521+/*!
2522+ send maximum only
2523+*/
2524+template <typename T>
2525+void ScalarFluidFieldSlave<T>::SendDataMax()
2526+{
2527+ vector<double> data_vec;
2528+
2529+ // get data from particles
2530+ data_vec=m_ppa->forAllInnerCellsGetSum(m_rdf);
2531+
2532+ // sum data
2533+ double max=*(data_vec.begin());
2534+ for(vector<double>::iterator iter=data_vec.begin();
2535+ iter!=data_vec.end();
2536+ iter++){
2537+ max=(*iter > max) ? *iter : max;
2538+ }
2539+
2540+ vector<double> max_vec;
2541+ max_vec.push_back(max);
2542+ m_comm->send_gather(max_vec,0);
2543+}
2544+
2545+
2546+/*!
2547+ send data back to master
2548+*/
2549+template <typename T>
2550+void ScalarFluidFieldSlave<T>::sendData()
2551+{
2552+ int coll_type;
2553+ m_comm->recv_broadcast(coll_type,0);
2554+
2555+ switch(coll_type){
2556+ case 1: SendDataFull();break;
2557+ case 2: SendDataSum();break;
2558+ case 3: SendDataMax();break;
2559+ default: std::cerr << "unknown collection type" << std::endl;
2560+ }
2561+}
2562
2563=== added file 'Fields/ScalarFluidInteractionFieldSlave.h'
2564--- Fields/ScalarFluidInteractionFieldSlave.h 1970-01-01 00:00:00 +0000
2565+++ Fields/ScalarFluidInteractionFieldSlave.h 2019-07-28 14:55:57 +0000
2566@@ -0,0 +1,55 @@
2567+/////////////////////////////////////////////////////////////
2568+// //
2569+// Copyright (c) 2003-2014 by The University of Queensland //
2570+// Centre for Geoscience Computing //
2571+// http://earth.uq.edu.au/centre-geoscience-computing //
2572+// //
2573+// Primary Business: Brisbane, Queensland, Australia //
2574+// Licensed under the Open Software License version 3.0 //
2575+// http://www.apache.org/licenses/LICENSE-2.0 //
2576+// //
2577+/////////////////////////////////////////////////////////////
2578+
2579+#ifndef __SCALAR_FLUID_INTERACTION_FIELD_SLAVE_H
2580+#define __SCALAR_FLUID_INTERACTION_FIELD_SLAVE_H
2581+
2582+// -- project includes --
2583+#include "Fields/FieldSlave.h"
2584+#include "Model/FluidInteraction.h"
2585+#include "pis/pi_storage_f.h"
2586+
2587+//template <typename I> class TParallelInteractionStorage;
2588+template <typename T> class ParallelInteractionStorage_F;
2589+class TML_Comm;
2590+
2591+/*!
2592+ \class ScalarFluidInteractionFieldSlave
2593+ \brief class for slave part of scalar field defined on the fluid interactions
2594+
2595+ \author Qi Shao
2596+ $Revision$
2597+ $Date$
2598+*/
2599+
2600+template <typename T>
2601+class ScalarFluidInteractionFieldSlave : public AFieldSlave
2602+{
2603+protected:
2604+ virtual void SendDataWithPos();
2605+ virtual void SendDataWithID();
2606+ virtual void SendDataWithIDPos();
2607+ virtual void SendDataWithParticle();
2608+ virtual void SendDataSum();
2609+ virtual void SendDataMax();
2610+ virtual void sendData();
2611+
2612+ CFluidInteraction::ScalarFieldFunction m_rdf;
2613+ ParallelInteractionStorage_F<T>* m_pis;
2614+
2615+ public:
2616+ ScalarFluidInteractionFieldSlave(TML_Comm*,ParallelInteractionStorage_F<T>*,CFluidInteraction::ScalarFieldFunction);
2617+};
2618+
2619+#include "Fields/ScalarFluidInteractionFieldSlave.hpp"
2620+
2621+#endif //__SCALAR_FLUID_INTERACTION_FIELD_SLAVE_H
2622
2623=== added file 'Fields/ScalarFluidInteractionFieldSlave.hpp'
2624--- Fields/ScalarFluidInteractionFieldSlave.hpp 1970-01-01 00:00:00 +0000
2625+++ Fields/ScalarFluidInteractionFieldSlave.hpp 2019-07-28 14:55:57 +0000
2626@@ -0,0 +1,176 @@
2627+/////////////////////////////////////////////////////////////
2628+// //
2629+// Copyright (c) 2003-2014 by The University of Queensland //
2630+// Centre for Geoscience Computing //
2631+// http://earth.uq.edu.au/centre-geoscience-computing //
2632+// //
2633+// Primary Business: Brisbane, Queensland, Australia //
2634+// Licensed under the Open Software License version 3.0 //
2635+// http://www.apache.org/licenses/LICENSE-2.0 //
2636+// //
2637+/////////////////////////////////////////////////////////////
2638+
2639+//-- STL includes --
2640+#include <vector>
2641+#include <utility>
2642+
2643+using std::vector;
2644+using std::pair;
2645+
2646+// -- IO includes --
2647+#include <iostream>
2648+
2649+using std::cout;
2650+using std::endl;
2651+
2652+#include "Foundation/triplet.h"
2653+#include "pis/pi_storage_f.h"
2654+
2655+/*!
2656+ constructor
2657+
2658+ \param comm the TML communicator used for sending the data back to the master
2659+ \param pis a pointer to the fluid interaction storage
2660+ \param rdf the fluid interaction member function to access the data
2661+*/
2662+template <typename T>
2663+ScalarFluidInteractionFieldSlave<T>::ScalarFluidInteractionFieldSlave(TML_Comm* comm,ParallelInteractionStorage_F<T>* pis,CFluidInteraction::ScalarFieldFunction rdf):AFieldSlave(comm)
2664+{
2665+ m_rdf=rdf;
2666+ m_pis=pis;
2667+}
2668+
2669+/*!
2670+ Send data back to master. Determine the type of data (full/sum) to send back from
2671+ the received coll_type and call the according send function.
2672+*/
2673+template <typename T>
2674+void ScalarFluidInteractionFieldSlave<T>::sendData()
2675+{
2676+ // debug output
2677+ console.XDebug() << "InteractionFieldSlave<T>::sendData()\n";
2678+
2679+ int coll_type;
2680+ m_comm->recv_broadcast(coll_type,0);
2681+
2682+ // debug output
2683+ console.XDebug() << "received coll_type=" << coll_type << "\n";
2684+
2685+ switch(coll_type){
2686+ case COLL_TYPE_FULL_WITH_POS : SendDataWithPos();break;
2687+ case COLL_TYPE_FULL_WITH_ID : SendDataWithID();break;
2688+ case COLL_TYPE_FULL_WITH_ID_POS : SendDataWithIDPos();break;
2689+ case COLL_TYPE_FULL_WITH_PARTICLE : SendDataWithParticle();break;
2690+ case COLL_TYPE_SUM : SendDataSum();break;
2691+ case COLL_TYPE_MAX : SendDataMax();break;
2692+
2693+ default: std::cerr << "unknown collection type" << std::endl;
2694+ }
2695+}
2696+
2697+
2698+/*!
2699+ send full field data and position of the interaction
2700+*/
2701+template <typename T>
2702+void ScalarFluidInteractionFieldSlave<T>::SendDataWithPos()
2703+{
2704+ vector<pair<Vec3,double> > data;
2705+
2706+ data=this->m_pis->forAllInnerInteractionsGetDataWithPos(m_rdf);
2707+
2708+ // send data to master
2709+ this->m_comm->send_gather(data,0);
2710+}
2711+
2712+/*!
2713+ send full field data and id of the particle
2714+*/
2715+template <typename T>
2716+void ScalarFluidInteractionFieldSlave<T>::SendDataWithID()
2717+{
2718+ vector<pair<int,double> > data;
2719+
2720+ data=this->m_pis->forAllInnerInteractionsGetDataWithID(m_rdf);
2721+
2722+ // send data to master
2723+ this->m_comm->send_gather(data,0);
2724+}
2725+
2726+
2727+/*!
2728+ send full field data and id, position of the particle
2729+*/
2730+template <typename T>
2731+void ScalarFluidInteractionFieldSlave<T>::SendDataWithIDPos()
2732+{
2733+ vector<pair<pair<int,Vec3>,double> > data;
2734+
2735+ data=this->m_pis->forAllInnerInteractionsGetDataWithIDPos(m_rdf);
2736+
2737+ // send data to master
2738+ this->m_comm->send_gather(data,0);
2739+}
2740+
2741+/*!
2742+ send full field data and id, position and radius of the particle
2743+*/
2744+template <typename T>
2745+void ScalarFluidInteractionFieldSlave<T>::SendDataWithParticle()
2746+{
2747+ vector<pair<esys::lsm::triplet<int,Vec3,double>,double> > data;
2748+
2749+ data=this->m_pis->forAllInnerInteractionsGetDataWithParticle(m_rdf);
2750+
2751+ // send data to master
2752+ this->m_comm->send_gather(data,0);
2753+}
2754+
2755+/*!
2756+ send sum only
2757+*/
2758+template <typename T>
2759+void ScalarFluidInteractionFieldSlave<T>::SendDataSum()
2760+{
2761+ vector<double> data_vec;
2762+
2763+ // get data from interactions
2764+ this->m_pis->forAllInnerInteractionsGet(data_vec,m_rdf);
2765+
2766+ // sum data
2767+ double sum=0.0;
2768+ for(vector<double>::iterator iter=data_vec.begin();
2769+ iter!=data_vec.end();
2770+ iter++){
2771+ sum+=*iter;
2772+ }
2773+
2774+ vector<double> sum_vec;
2775+ sum_vec.push_back(sum);
2776+ this->m_comm->send_gather(sum_vec,0);
2777+}
2778+
2779+
2780+/*!
2781+ send maximum only
2782+*/
2783+template <typename T>
2784+void ScalarFluidInteractionFieldSlave<T>::SendDataMax()
2785+{
2786+ vector<double> data_vec;
2787+
2788+ // get data from interactions
2789+ this->m_pis->forAllInnerInteractionsGet(data_vec,m_rdf);
2790+
2791+ // sum data
2792+ double max=*(data_vec.begin());
2793+ for(vector<double>::iterator iter=data_vec.begin();
2794+ iter!=data_vec.end();
2795+ iter++){
2796+ max=(*iter > max) ? *iter : max;
2797+ }
2798+
2799+ vector<double> max_vec;
2800+ max_vec.push_back(max);
2801+ this->m_comm->send_gather(max_vec,0);
2802+}
2803
2804=== added file 'Fields/VectorFluidFieldSlave.h'
2805--- Fields/VectorFluidFieldSlave.h 1970-01-01 00:00:00 +0000
2806+++ Fields/VectorFluidFieldSlave.h 2019-07-28 14:55:57 +0000
2807@@ -0,0 +1,47 @@
2808+/////////////////////////////////////////////////////////////
2809+// //
2810+// Copyright (c) 2003-2014 by The University of Queensland //
2811+// Centre for Geoscience Computing //
2812+// http://earth.uq.edu.au/centre-geoscience-computing //
2813+// //
2814+// Primary Business: Brisbane, Queensland, Australia //
2815+// Licensed under the Open Software License version 3.0 //
2816+// http://www.apache.org/licenses/LICENSE-2.0 //
2817+// //
2818+/////////////////////////////////////////////////////////////
2819+
2820+#ifndef __VECTOR_FLUID_FIELD_SLAVE_H
2821+#define __VECTOR_FLUID_FIELD_SLAVE_H
2822+
2823+// -- project includes --
2824+#include "FieldSlave.h"
2825+#include "Model/FluidCell.h"
2826+
2827+template <class T> class ParallelParticleArray;
2828+class TML_Comm;
2829+
2830+/*!
2831+ \class VectorFluidFieldSlave
2832+ \brief class for slave part of vector field defined on the fluid cells
2833+
2834+ \author Qi Shao
2835+ $Revision$
2836+ $Date$
2837+*/
2838+template <typename T>
2839+class VectorFluidFieldSlave : public AFieldSlave
2840+{
2841+ private:
2842+
2843+ protected:
2844+ CFluidCell::VectorFieldFunction m_rdf;
2845+ ParallelParticleArray<T>* m_ppa;
2846+
2847+ public:
2848+ VectorFluidFieldSlave(TML_Comm*,ParallelParticleArray<T>*,CFluidCell::VectorFieldFunction);
2849+ virtual void sendData();
2850+};
2851+
2852+#include "VectorFluidFieldSlave.hpp"
2853+
2854+#endif //__SCALAR_FLUID_FIELD_SLAVE_H
2855
2856=== added file 'Fields/VectorFluidFieldSlave.hpp'
2857--- Fields/VectorFluidFieldSlave.hpp 1970-01-01 00:00:00 +0000
2858+++ Fields/VectorFluidFieldSlave.hpp 2019-07-28 14:55:57 +0000
2859@@ -0,0 +1,54 @@
2860+/////////////////////////////////////////////////////////////
2861+// //
2862+// Copyright (c) 2003-2014 by The University of Queensland //
2863+// Centre for Geoscience Computing //
2864+// http://earth.uq.edu.au/centre-geoscience-computing //
2865+// //
2866+// Primary Business: Brisbane, Queensland, Australia //
2867+// Licensed under the Open Software License version 3.0 //
2868+// http://www.apache.org/licenses/LICENSE-2.0 //
2869+// //
2870+/////////////////////////////////////////////////////////////
2871+
2872+//-- STL includes --
2873+#include <vector>
2874+#include <utility>
2875+
2876+using std::vector;
2877+using std::pair;
2878+
2879+// -- IO includes --
2880+#include <iostream>
2881+
2882+using std::cout;
2883+using std::endl;
2884+
2885+/*!
2886+ constructor
2887+
2888+ \param comm the TML communicator used for sending the data back to the master
2889+ \param ppa a pointer to the particle array
2890+ \param rdf the fluid cell member function to access the data
2891+*/
2892+template <typename T>
2893+VectorFluidFieldSlave<T>::VectorFluidFieldSlave(TML_Comm* comm,ParallelParticleArray<T>* ppa,CFluidCell::VectorFieldFunction rdf):AFieldSlave(comm)
2894+{
2895+ m_ppa=ppa;
2896+ m_rdf=rdf;
2897+}
2898+
2899+/*!
2900+ send data back to master
2901+*/
2902+template <typename T>
2903+void VectorFluidFieldSlave<T>::sendData()
2904+{
2905+ vector<pair<Vec3,Vec3> > data_vec;
2906+
2907+ data_vec=m_ppa->forAllInnerCellsGet(m_rdf);
2908+
2909+ // send data to master
2910+ m_comm->send_gather(data_vec,0);
2911+}
2912+
2913+
2914
2915=== added file 'Fields/VectorFluidInteractionFieldSlave.h'
2916--- Fields/VectorFluidInteractionFieldSlave.h 1970-01-01 00:00:00 +0000
2917+++ Fields/VectorFluidInteractionFieldSlave.h 2019-07-28 14:55:57 +0000
2918@@ -0,0 +1,54 @@
2919+/////////////////////////////////////////////////////////////
2920+// //
2921+// Copyright (c) 2003-2014 by The University of Queensland //
2922+// Centre for Geoscience Computing //
2923+// http://earth.uq.edu.au/centre-geoscience-computing //
2924+// //
2925+// Primary Business: Brisbane, Queensland, Australia //
2926+// Licensed under the Open Software License version 3.0 //
2927+// http://www.apache.org/licenses/LICENSE-2.0 //
2928+// //
2929+/////////////////////////////////////////////////////////////
2930+
2931+#ifndef __VECTOR_FLUID_INTERACTION_FIELD_SLAVE_H
2932+#define __VECTOR_FLUID_INTERACTION_FIELD_SLAVE_H
2933+
2934+// -- project includes --
2935+#include "Fields/FieldSlave.h"
2936+#include "Model/FluidInteraction.h"
2937+#include "pis/pi_storage_f.h"
2938+
2939+template <typename T> class ParallelInteractionStorage_F;
2940+class TML_Comm;
2941+
2942+/*!
2943+ \class VectorFluidInteractionFieldSlave
2944+ \brief class for slave part of vector field defined on the fluid interactions
2945+
2946+ \author Qi Shao
2947+ $Revision$
2948+ $Date$
2949+*/
2950+
2951+template <typename T>
2952+class VectorFluidInteractionFieldSlave : public AFieldSlave
2953+{
2954+protected:
2955+ virtual void SendDataWithPos();
2956+ virtual void SendDataWithID();
2957+ virtual void SendDataWithIDPos();
2958+ virtual void SendDataWithParticle();
2959+ virtual void SendDataSum();
2960+ virtual void SendDataMax();
2961+ virtual void sendData();
2962+
2963+ CFluidInteraction::VectorFieldFunction m_rdf;
2964+ ParallelInteractionStorage_F<T>* m_pis;
2965+
2966+ public:
2967+ VectorFluidInteractionFieldSlave(TML_Comm*,ParallelInteractionStorage_F<T>*,CFluidInteraction::VectorFieldFunction);
2968+};
2969+
2970+#include "Fields/VectorFluidInteractionFieldSlave.hpp"
2971+
2972+#endif //__VECTOR_FLUID_INTERACTION_FIELD_SLAVE_H
2973
2974=== added file 'Fields/VectorFluidInteractionFieldSlave.hpp'
2975--- Fields/VectorFluidInteractionFieldSlave.hpp 1970-01-01 00:00:00 +0000
2976+++ Fields/VectorFluidInteractionFieldSlave.hpp 2019-07-28 14:55:57 +0000
2977@@ -0,0 +1,160 @@
2978+/////////////////////////////////////////////////////////////
2979+// //
2980+// Copyright (c) 2003-2014 by The University of Queensland //
2981+// Centre for Geoscience Computing //
2982+// http://earth.uq.edu.au/centre-geoscience-computing //
2983+// //
2984+// Primary Business: Brisbane, Queensland, Australia //
2985+// Licensed under the Open Software License version 3.0 //
2986+// http://www.apache.org/licenses/LICENSE-2.0 //
2987+// //
2988+/////////////////////////////////////////////////////////////
2989+
2990+//-- STL includes --
2991+#include <vector>
2992+#include <utility>
2993+
2994+using std::vector;
2995+using std::pair;
2996+
2997+// -- IO includes --
2998+#include <iostream>
2999+
3000+using std::cout;
3001+using std::endl;
3002+
3003+#include "Foundation/triplet.h"
3004+#include "pis/pi_storage_f.h"
3005+
3006+/*!
3007+ constructor
3008+
3009+ \param comm the TML communicator used for sending the data back to the master
3010+ \param pis a pointer to the fluid interaction storage
3011+ \param rdf the fluid interaction member function to access the data
3012+*/
3013+template <typename T>
3014+VectorFluidInteractionFieldSlave<T>::VectorFluidInteractionFieldSlave(TML_Comm* comm,ParallelInteractionStorage_F<T>* pis,CFluidInteraction::VectorFieldFunction rdf):AFieldSlave(comm)
3015+{
3016+ m_rdf=rdf;
3017+ m_pis=pis;
3018+}
3019+
3020+/*!
3021+ Send data back to master. Determine the type of data (full/sum) to send back from
3022+ the received coll_type and call the according send function.
3023+*/
3024+template <typename T>
3025+void VectorFluidInteractionFieldSlave<T>::sendData()
3026+{
3027+ // debug output
3028+ console.XDebug() << "InteractionFieldSlave<T>::sendData()\n";
3029+
3030+ int coll_type;
3031+ m_comm->recv_broadcast(coll_type,0);
3032+
3033+ // debug output
3034+ console.XDebug() << "received coll_type=" << coll_type << "\n";
3035+
3036+ switch(coll_type){
3037+ case COLL_TYPE_FULL_WITH_POS : SendDataWithPos();break;
3038+ case COLL_TYPE_FULL_WITH_ID : SendDataWithID();break;
3039+ case COLL_TYPE_FULL_WITH_ID_POS : SendDataWithIDPos();break;
3040+ case COLL_TYPE_FULL_WITH_PARTICLE : SendDataWithParticle();break;
3041+ case COLL_TYPE_SUM : SendDataSum();break;
3042+ case COLL_TYPE_MAX : SendDataMax();break;
3043+
3044+ default: std::cerr << "unknown collection type" << std::endl;
3045+ }
3046+}
3047+
3048+
3049+/*!
3050+ send full field data and position of the interaction
3051+*/
3052+template <typename T>
3053+void VectorFluidInteractionFieldSlave<T>::SendDataWithPos()
3054+{
3055+ vector<pair<Vec3,Vec3> > data;
3056+
3057+ data=this->m_pis->forAllInnerInteractionsGetDataWithPos(m_rdf);
3058+
3059+ // send data to master
3060+ this->m_comm->send_gather(data,0);
3061+}
3062+
3063+/*!
3064+ send full field data and id of the particle
3065+*/
3066+template <typename T>
3067+void VectorFluidInteractionFieldSlave<T>::SendDataWithID()
3068+{
3069+ vector<pair<int,Vec3> > data;
3070+
3071+ data=this->m_pis->forAllInnerInteractionsGetDataWithID(m_rdf);
3072+
3073+ // send data to master
3074+ this->m_comm->send_gather(data,0);
3075+}
3076+
3077+
3078+/*!
3079+ send full field data and id, position of the particle
3080+*/
3081+template <typename T>
3082+void VectorFluidInteractionFieldSlave<T>::SendDataWithIDPos()
3083+{
3084+ vector<pair<pair<int,Vec3>,Vec3> > data;
3085+
3086+ data=this->m_pis->forAllInnerInteractionsGetDataWithIDPos(m_rdf);
3087+
3088+ // send data to master
3089+ this->m_comm->send_gather(data,0);
3090+}
3091+
3092+/*!
3093+ send full field data and id, position and radius of the particle
3094+*/
3095+template <typename T>
3096+void VectorFluidInteractionFieldSlave<T>::SendDataWithParticle()
3097+{
3098+ vector<pair<esys::lsm::triplet<int,Vec3,double>,Vec3> > data;
3099+
3100+ data=this->m_pis->forAllInnerInteractionsGetDataWithParticle(m_rdf);
3101+
3102+ // send data to master
3103+ this->m_comm->send_gather(data,0);
3104+}
3105+
3106+/*!
3107+ send sum only
3108+*/
3109+template <typename T>
3110+void VectorFluidInteractionFieldSlave<T>::SendDataSum()
3111+{
3112+ vector<Vec3> data_vec;
3113+
3114+ // get data from interactions
3115+ this->m_pis->forAllInnerInteractionsGet(data_vec,m_rdf);
3116+
3117+ // sum data
3118+ Vec3 sum=Vec3(0.0,0.0,0.0);
3119+ for(vector<Vec3>::iterator iter=data_vec.begin();
3120+ iter!=data_vec.end();
3121+ iter++){
3122+ sum+=*iter;
3123+ }
3124+
3125+ vector<Vec3> sum_vec;
3126+ sum_vec.push_back(sum);
3127+ this->m_comm->send_gather(sum_vec,0);
3128+}
3129+
3130+
3131+/*!
3132+ send maximum only
3133+*/
3134+template <typename T>
3135+void VectorFluidInteractionFieldSlave<T>::SendDataMax()
3136+{
3137+}
3138
3139=== modified file 'Fields/field_const.h'
3140--- Fields/field_const.h 2017-01-04 08:26:43 +0000
3141+++ Fields/field_const.h 2019-07-28 14:55:57 +0000
3142@@ -23,7 +23,14 @@
3143 WRITE_TYPE_RAW,
3144 WRITE_TYPE_RAW_WITH_ID,
3145 WRITE_TYPE_RAW_WITH_POS_ID,
3146- WRITE_TYPE_SILO
3147+ WRITE_TYPE_SILO,
3148+ /****fluid contents: begin****/
3149+ WRITE_TYPE_RAW_WITH_POS,
3150+ WRITE_TYPE_RAW_WITH_ID_POS,
3151+ WRITE_TYPE_RAW_WITH_PARTICLE,
3152+ WRITE_TYPE_VTI,
3153+ WRITE_TYPE_VTU
3154+ /****fluid contents: end****/
3155 };
3156
3157 enum {
3158@@ -33,7 +40,12 @@
3159 COLL_TYPE_FULL2 = 5,
3160 COLL_TYPE_FULL_DX,
3161 COLL_TYPE_FULL_WITH_ID,
3162- COLL_TYPE_FULL_WITH_POS_ID
3163+ COLL_TYPE_FULL_WITH_POS_ID,
3164+ /****fluid contents: begin****/
3165+ COLL_TYPE_FULL_WITH_POS,
3166+ COLL_TYPE_FULL_WITH_ID_POS,
3167+ COLL_TYPE_FULL_WITH_PARTICLE
3168+ /****fluid contents: end****/
3169 };
3170
3171 #endif // __FIELD_CONST_H
3172
3173=== added file 'Model/FluidCell.cpp'
3174--- Model/FluidCell.cpp 1970-01-01 00:00:00 +0000
3175+++ Model/FluidCell.cpp 2019-07-28 14:55:57 +0000
3176@@ -0,0 +1,145 @@
3177+/////////////////////////////////////////////////////////////
3178+// //
3179+// Copyright (c) 2003-2014 by The University of Queensland //
3180+// Centre for Geoscience Computing //
3181+// http://earth.uq.edu.au/centre-geoscience-computing //
3182+// //
3183+// Primary Business: Brisbane, Queensland, Australia //
3184+// Licensed under the Open Software License version 3.0 //
3185+// http://www.apache.org/licenses/LICENSE-2.0 //
3186+// //
3187+/////////////////////////////////////////////////////////////
3188+
3189+
3190+// -- project includes --
3191+#include "Model/FluidCell.h"
3192+#include "tml/message/packed_message_interface.h"
3193+
3194+
3195+CFluidCell::CFluidCell()
3196+{
3197+ m_Mu=0;m_Phi=0;m_newPhi=0;m_effPhi=0;m_D=0;m_K=0;m_effK=0;m_Bf=0;m_effBf=0;m_P=0;m_disP=0;m_effP=0;m_Volume=0;m_Size=Vec3(0,0,0);
3198+ m_Vp=Vec3(0,0,0);m_Vf=Vec3(0,0,0);m_Pg=Vec3(0,0,0);m_Pos=Vec3(0,0,0);
3199+ c_W=0;c_E=0;c_N=0;c_S=0;c_D=0;c_U=0;c_C=0;c_B=0;
3200+}
3201+
3202+
3203+CFluidCell& CFluidCell::operator=(const CFluidCell& rhs)
3204+{
3205+ m_Mu=rhs.m_Mu;
3206+ m_Phi=rhs.m_Phi;
3207+ m_newPhi=rhs.m_newPhi;
3208+ m_effPhi=rhs.m_effPhi;
3209+ m_D=rhs.m_D;
3210+ m_K=rhs.m_K;
3211+ m_effK=rhs.m_effK;
3212+ m_Bf=rhs.m_Bf;
3213+ m_effBf=rhs.m_effBf;
3214+ m_P=rhs.m_P;
3215+ m_disP=rhs.m_disP;
3216+ m_effP=rhs.m_effP;
3217+ m_Volume=rhs.m_Volume;
3218+ m_Size=rhs.m_Size;
3219+ m_Vp=rhs.m_Vp;
3220+ m_Vf=rhs.m_Vf;
3221+ m_Pg=rhs.m_Pg;
3222+ m_Pos=rhs.m_Pos;
3223+ m_Index=rhs.m_Index;
3224+ c_W=rhs.c_W;
3225+ c_E=rhs.c_E;
3226+ c_N=rhs.c_N;
3227+ c_S=rhs.c_S;
3228+ c_D=rhs.c_D;
3229+ c_U=rhs.c_U;
3230+ c_C=rhs.c_C;
3231+ c_B=rhs.c_B;
3232+ return *this;
3233+}
3234+
3235+
3236+/*!
3237+ Get the fluidcell member function which returns a scalar field of a given name.
3238+
3239+ \param name the name of the field
3240+*/
3241+CFluidCell::ScalarFieldFunction CFluidCell::getScalarFieldFunction(const string& name)
3242+{
3243+ CFluidCell::ScalarFieldFunction sf;
3244+
3245+ if(name=="Mu"){
3246+ sf=&CFluidCell::getMu;
3247+ } else if(name=="Phi"){
3248+ sf=&CFluidCell::geteffPhi;
3249+ } else if(name=="dPhi"){
3250+ sf=&CFluidCell::getdPhi;
3251+ } else if(name=="D"){
3252+ sf=&CFluidCell::getD;
3253+ } else if(name=="K"){
3254+ sf=&CFluidCell::geteffK;
3255+ } else if(name=="Bf"){
3256+ sf=&CFluidCell::geteffBf;
3257+ } else if(name=="P"){
3258+ sf=&CFluidCell::getP;
3259+ } else if(name=="disP"){
3260+ sf=&CFluidCell::getdisP;
3261+ } else if(name=="effP"){
3262+ sf=&CFluidCell::geteffP;
3263+ } else if(name=="dP"){
3264+ sf=&CFluidCell::getdP;
3265+ } else if(name=="Volume"){
3266+ sf=&CFluidCell::getVolume;
3267+ } else if(name=="Vp_abs"){
3268+ sf=&CFluidCell::getAbsVp;
3269+ } else if(name=="Vf_abs"){
3270+ sf=&CFluidCell::getAbsVf;
3271+ } else if(name=="c_W"){
3272+ sf=&CFluidCell::getc_W;
3273+ } else if(name=="c_E"){
3274+ sf=&CFluidCell::getc_E;
3275+ } else if(name=="c_N"){
3276+ sf=&CFluidCell::getc_N;
3277+ } else if(name=="c_S"){
3278+ sf=&CFluidCell::getc_S;
3279+ } else if(name=="c_D"){
3280+ sf=&CFluidCell::getc_D;
3281+ } else if(name=="c_U"){
3282+ sf=&CFluidCell::getc_U;
3283+ } else if(name=="c_C"){
3284+ sf=&CFluidCell::getc_C;
3285+ } else if(name=="c_B"){
3286+ sf=&CFluidCell::getc_B;
3287+ } else {
3288+ sf=NULL;
3289+ cerr << "ERROR - invalid name for fluid cell scalar access function" << endl;
3290+ }
3291+
3292+ return sf;
3293+}
3294+
3295+
3296+/*!
3297+ Get the fluid cell member function which returns a vector field of a given name.
3298+
3299+ \param name the name of the field
3300+*/
3301+
3302+CFluidCell::VectorFieldFunction CFluidCell::getVectorFieldFunction(const string& name)
3303+ {
3304+ CFluidCell::VectorFieldFunction sf;
3305+
3306+ if(name=="Vp"){
3307+ sf=&CFluidCell::getVp;
3308+ } else if(name=="Vf"){
3309+ sf=&CFluidCell::getVf;
3310+ } else if(name=="Pos"){
3311+ sf=&CFluidCell::getPos;
3312+ } else if(name=="Index"){
3313+ sf=&CFluidCell::getIndex;
3314+ } else {
3315+ sf=NULL;
3316+ cerr << "ERROR - invalid name for fluid cell vector access function" << endl;
3317+ }
3318+
3319+ return sf;
3320+}
3321+
3322
3323=== added file 'Model/FluidCell.h'
3324--- Model/FluidCell.h 1970-01-01 00:00:00 +0000
3325+++ Model/FluidCell.h 2019-07-28 14:55:57 +0000
3326@@ -0,0 +1,133 @@
3327+/////////////////////////////////////////////////////////////
3328+// //
3329+// Copyright (c) 2003-2014 by The University of Queensland //
3330+// Centre for Geoscience Computing //
3331+// http://earth.uq.edu.au/centre-geoscience-computing //
3332+// //
3333+// Primary Business: Brisbane, Queensland, Australia //
3334+// Licensed under the Open Software License version 3.0 //
3335+// http://www.apache.org/licenses/LICENSE-2.0 //
3336+// //
3337+/////////////////////////////////////////////////////////////
3338+
3339+#ifndef __FLUID_CELL_H
3340+#define __FLUID_CELL_H
3341+
3342+// -- project includes --
3343+#include "Foundation/vec3.h"
3344+#include "tml/message/packed_message_interface.h"
3345+
3346+/*!
3347+ \class CFluidCell
3348+ \brief base class for fluid cells
3349+
3350+ \author Qi Shao
3351+ $Revision$
3352+ $Date$
3353+*/
3354+
3355+class CFluidCell
3356+{
3357+ protected:
3358+ double m_Mu;//fluid viscosity
3359+ double m_Phi,m_newPhi,m_effPhi;//porosity
3360+ double m_D, m_K, m_effK;//permeability
3361+ double m_Bf, m_effBf;//fluid bulk modulus
3362+ double m_P, m_disP, m_effP;//pore fluid pressure
3363+ double m_Volume; //total particle volume
3364+ Vec3 m_Size;
3365+ Vec3 m_Vp, m_Vf;//mean particle velocity/fluid velocity
3366+ Vec3 m_Pg;//pressure gradient
3367+ Vec3 m_Pos;//global position of cell centre
3368+ Vec3 m_Index;//global index of cell
3369+
3370+ //coefficients for linear equations:
3371+ double c_W;
3372+ double c_E;
3373+ double c_N;
3374+ double c_S;
3375+ double c_D;
3376+ double c_U;
3377+ double c_C;
3378+ double c_B;
3379+
3380+ public:
3381+ CFluidCell();
3382+ virtual ~CFluidCell(){};
3383+
3384+ inline double getMu() const {return m_Mu;};
3385+ inline double getPhi() const {return m_Phi;};
3386+ inline double getnewPhi() const {return m_newPhi;};
3387+ inline double geteffPhi() const {return m_effPhi;};
3388+ inline double getdPhi() const {return m_newPhi-m_Phi;};
3389+ inline double getD() const {return m_D;};
3390+ inline double getK() const {return m_K;};
3391+ inline double geteffK() const {return m_effK;};
3392+ inline double getBf() const {return m_Bf;};
3393+ inline double geteffBf() const {return m_effBf;};
3394+ inline double getP() const {return m_P;};
3395+ inline double getdisP() const {return m_disP;};
3396+ inline double geteffP() const {return m_effP;};
3397+ inline double getdP() const {return m_disP-m_P;};
3398+ inline double getVolume() const {return m_Volume;};
3399+ inline Vec3 getSize() const {return m_Size;};
3400+ inline Vec3 getVp() const {return m_Vp;};
3401+ inline Vec3 getVf() const {return m_Vf;};
3402+ inline Vec3 getPg() const {return m_Pg;};
3403+ inline Vec3 getPos() const {return m_Pos;};
3404+ inline Vec3 getIndex() const {return m_Index;};
3405+ inline double getAbsVp() const {return m_Vp.norm();};
3406+ inline double getAbsVf() const {return m_Vf.norm();};
3407+ //get coefficients
3408+ inline double getc_W() const {return c_W;};
3409+ inline double getc_E() const {return c_E;};
3410+ inline double getc_N() const {return c_N;};
3411+ inline double getc_S() const {return c_S;};
3412+ inline double getc_D() const {return c_D;};
3413+ inline double getc_U() const {return c_U;};
3414+ inline double getc_C() const {return c_C;};
3415+ inline double getc_B() const {return c_B;};
3416+
3417+ void setMu(double Mu){m_Mu=Mu;};
3418+ void setPhi(double Phi){m_Phi=Phi;};
3419+ void setnewPhi(double newPhi){m_newPhi=newPhi;};
3420+ void seteffPhi(double effPhi){m_effPhi=effPhi;};
3421+ void setD(double D){m_D=D;};
3422+ void setK(double K){m_K=K;};
3423+ void seteffK(double effK){m_effK=effK;};
3424+ void setBf(double Bf){m_Bf=Bf;};
3425+ void seteffBf(double effBf){m_effBf=effBf;};
3426+ void setP(double P){m_P=P;};
3427+ void setdisP(double disP){m_disP=disP;};
3428+ void seteffP(double effP){m_effP=effP;};
3429+ void setVolume(double Volume){m_Volume=Volume;};
3430+ void setSize(Vec3 Size){m_Size=Size;};
3431+ void setVp(Vec3 Vp){m_Vp=Vp;};
3432+ void setVf(Vec3 Vf){m_Vf=Vf;};
3433+ void setPg(Vec3 Pg){m_Pg=Pg;};
3434+ void setPos(Vec3 Pos){m_Pos=Pos;};
3435+ void setIndex(Vec3 Index){m_Index=Index;};
3436+ //set coefficients
3437+ void setc_W(double W){c_W=W;};
3438+ void setc_E(double E){c_E=E;};
3439+ void setc_N(double N){c_N=N;};
3440+ void setc_S(double S){c_S=S;};
3441+ void setc_D(double D){c_D=D;};
3442+ void setc_U(double U){c_U=U;};
3443+ void setc_C(double C){c_C=C;};
3444+ void setc_B(double B){c_B=B;};
3445+
3446+ CFluidCell& operator=(const CFluidCell&);
3447+
3448+ typedef double (CFluidCell::* ScalarFieldFunction)() const;
3449+ typedef Vec3 (CFluidCell::* VectorFieldFunction)() const;
3450+
3451+ static ScalarFieldFunction getScalarFieldFunction(const string&);
3452+ static VectorFieldFunction getVectorFieldFunction(const string&);
3453+
3454+ friend class TML_PackedMessageInterface;
3455+
3456+};
3457+
3458+
3459+#endif //__FLUID_CELL_H
3460
3461=== added file 'Model/FluidInteraction.cpp'
3462--- Model/FluidInteraction.cpp 1970-01-01 00:00:00 +0000
3463+++ Model/FluidInteraction.cpp 2019-07-28 14:55:57 +0000
3464@@ -0,0 +1,99 @@
3465+/////////////////////////////////////////////////////////////
3466+// //
3467+// Copyright (c) 2003-2014 by The University of Queensland //
3468+// Centre for Geoscience Computing //
3469+// http://earth.uq.edu.au/centre-geoscience-computing //
3470+// //
3471+// Primary Business: Brisbane, Queensland, Australia //
3472+// Licensed under the Open Software License version 3.0 //
3473+// http://www.apache.org/licenses/LICENSE-2.0 //
3474+// //
3475+/////////////////////////////////////////////////////////////
3476+
3477+
3478+#include "Model/FluidInteraction.h"
3479+
3480+
3481+CFluidInteraction::CFluidInteraction (CParticle* p,CFluidCell* cell)
3482+{
3483+ m_p=p;
3484+ m_cell=cell;
3485+}
3486+
3487+
3488+/*!
3489+Calculate the fluid force.
3490+*/
3491+void CFluidInteraction::calcForces()
3492+{
3493+ double Mu=m_cell->getMu();
3494+ double Phi=m_cell->geteffPhi();
3495+ double D=m_cell->getD();
3496+ Vec3 vrel=m_cell->getVf()-m_p->getVel();
3497+ //drag force on unit volume
3498+ Vec3 drag;
3499+ if(D==0){drag=Vec3(0,0,0);}
3500+ else{drag=(150.0*Mu*(1.0-Phi)/Phi/D/D+1.75*1000.0*vrel.norm()/D)*vrel;};
3501+ //buoyancy force on unit volume
3502+ Vec3 buoyancy=m_cell->getPg();
3503+ //force applied on particle
3504+ double r=m_p->getRad();
3505+ double Volume=4.0/3.0*3.1415926*r*r*r;
3506+ m_drag=drag*Volume;
3507+ m_buoyancy=buoyancy*Volume;
3508+ m_force=m_drag+m_buoyancy;
3509+ m_p->applyForce(m_force,m_p->getPos());
3510+}
3511+
3512+
3513+/*!
3514+ Get the fluidinteraction member function which returns a scalar field of a given name.
3515+
3516+ \param name the name of the field
3517+*/
3518+CFluidInteraction::ScalarFieldFunction CFluidInteraction::getScalarFieldFunction(const string& name)
3519+{
3520+ CFluidInteraction::ScalarFieldFunction sf;
3521+ if(name=="ID"){
3522+ sf=&CFluidInteraction::getParticleID;
3523+ } else if(name=="Drag_abs"){
3524+ sf=&CFluidInteraction::getVbsDrag;
3525+ } else if(name=="Buoyancy_abs"){
3526+ sf=&CFluidInteraction::getVbsBuoyancy;
3527+ } else if(name=="Force_abs"){
3528+ sf=&CFluidInteraction::getVbsForce;
3529+ } else {
3530+ sf=NULL;
3531+ cerr << "ERROR - invalid name for fluid interaction scalar access function" << endl;
3532+ }
3533+
3534+ return sf;
3535+}
3536+
3537+
3538+/*!
3539+ Get the fluid interaction member function which returns a vector field of a given name.
3540+
3541+ \param name the name of the field
3542+*/
3543+CFluidInteraction::VectorFieldFunction CFluidInteraction::getVectorFieldFunction(const string& name)
3544+{
3545+ CFluidInteraction::VectorFieldFunction sf;
3546+
3547+ if(name=="Position"){
3548+ sf=&CFluidInteraction::getParticlePos;
3549+ } else if(name=="Drag"){
3550+ sf=&CFluidInteraction::getDrag;
3551+ } else if(name=="Buoyancy"){
3552+ sf=&CFluidInteraction::getBuoyancy;
3553+ } else if(name=="Force"){
3554+ sf=&CFluidInteraction::getForce;
3555+ } else {
3556+ sf=NULL;
3557+ cerr << "ERROR - invalid name for fluid interaction vector access function" << endl;
3558+ }
3559+
3560+ return sf;
3561+}
3562+
3563+
3564
3565=== added file 'Model/FluidInteraction.h'
3566--- Model/FluidInteraction.h 1970-01-01 00:00:00 +0000
3567+++ Model/FluidInteraction.h 2019-07-28 14:55:57 +0000
3568@@ -0,0 +1,72 @@
3569+/////////////////////////////////////////////////////////////
3570+// //
3571+// Copyright (c) 2003-2014 by The University of Queensland //
3572+// Centre for Geoscience Computing //
3573+// http://earth.uq.edu.au/centre-geoscience-computing //
3574+// //
3575+// Primary Business: Brisbane, Queensland, Australia //
3576+// Licensed under the Open Software License version 3.0 //
3577+// http://www.apache.org/licenses/LICENSE-2.0 //
3578+// //
3579+/////////////////////////////////////////////////////////////
3580+
3581+
3582+#ifndef __FLUID_INTERACTION_H
3583+#define __FLUID_INTERACTION_H
3584+
3585+
3586+#include "Model/FluidCell.h"
3587+#include "Model/Particle.h"
3588+#include "Foundation/vec3.h"
3589+#include "Foundation/triplet.h"
3590+
3591+/*!
3592+ \class CFluidInteraction
3593+ \brief base class for fluid interactions
3594+
3595+ \author Qi Shao
3596+ $Revision$
3597+ $Date$
3598+*/
3599+
3600+class CFluidInteraction {
3601+ protected:
3602+ CFluidCell *m_cell;
3603+ CParticle *m_p;
3604+ Vec3 m_drag;
3605+ Vec3 m_buoyancy;
3606+ Vec3 m_force;
3607+
3608+ public:
3609+ CFluidInteraction (CParticle*,CFluidCell*);
3610+ virtual ~CFluidInteraction(){};
3611+
3612+ void calcForces();
3613+
3614+ inline Vec3 getParticlePos() const {return m_p->getPos();};
3615+ inline Vec3 getDrag() const {return m_drag;};
3616+ inline Vec3 getBuoyancy() const {return m_buoyancy;};
3617+ inline Vec3 getForce() const {return m_force;};
3618+ inline double getVbsDrag() const {return m_drag.norm();};
3619+ inline double getVbsBuoyancy() const {return m_buoyancy.norm();};
3620+ inline double getVbsForce() const {return m_force.norm();};
3621+ inline double getParticleID() const {return m_p->getID();};
3622+ inline esys::lsm::triplet<int,Vec3,double> getParticleData() const
3623+ {
3624+ return
3625+ esys::lsm::triplet<int,Vec3,double>(
3626+ m_p->getID(),
3627+ m_p->getPos(),
3628+ m_p->getRad()
3629+ );
3630+ };
3631+
3632+ typedef double (CFluidInteraction::* ScalarFieldFunction)() const;
3633+ typedef Vec3 (CFluidInteraction::* VectorFieldFunction)() const;
3634+
3635+ static ScalarFieldFunction getScalarFieldFunction(const string&);
3636+ static VectorFieldFunction getVectorFieldFunction(const string&);
3637+
3638+};
3639+
3640+#endif //__FLUID_INTERACTION_H
3641
3642=== modified file 'Model/Makefile.am'
3643--- Model/Makefile.am 2018-06-26 06:26:59 +0000
3644+++ Model/Makefile.am 2019-07-28 14:55:57 +0000
3645@@ -86,6 +86,10 @@
3646 EWallInteractionGroup.hpp \
3647 EWallInteraction.h \
3648 EWallInteraction.hpp \
3649+ FluidCell.cpp \
3650+ FluidCell.h \
3651+ FluidInteraction.cpp \
3652+ FluidInteraction.h \
3653 FractalFriction.cpp \
3654 FractalFriction.h \
3655 FrictionInteraction.cpp \
3656@@ -227,4 +231,4 @@
3657 $(top_builddir)/Geometry/libGgGeometry.la \
3658 $(top_builddir)/tml/message/libTmlMessage.la \
3659 $(top_builddir)/tml/type/libTmlType.la \
3660- $(top_builddir)/tml/comm/libTmlComm.la
3661+ $(top_builddir)/tml/comm/libTmlComm.la
3662
3663=== modified file 'Parallel/ASubLattice.h'
3664--- Parallel/ASubLattice.h 2017-09-07 08:07:19 +0000
3665+++ Parallel/ASubLattice.h 2019-07-28 14:55:57 +0000
3666@@ -46,7 +46,7 @@
3667 typedef std::pair<int,int> ParticleIdPair;
3668 typedef std::vector<ParticleIdPair> ParticleIdPairVector;
3669 typedef std::vector<int> IdVector;
3670-
3671+
3672 virtual ~ASubLattice();
3673 void setNTSize(int);
3674 virtual void setParticleType(const std::string &particleType)
3675@@ -57,6 +57,16 @@
3676 {
3677 return m_particleType;
3678 }
3679+ /****fluid contents: begin***/
3680+ virtual void addFluidInteraction()=0;
3681+ virtual void updateFluid()=0;
3682+ virtual void exchangeCells()=0;
3683+ virtual void sendCoeffi()=0;
3684+ virtual void recvPressure()=0;
3685+ virtual void solveMatrix()=0;
3686+
3687+ /****fluid contents: end***/
3688+
3689 virtual void setTimeStepSize(double dt) = 0;
3690 virtual vector<int> getCommCoords() const=0;
3691 virtual vector<int> getCommDims() const=0;
3692@@ -126,8 +136,8 @@
3693 virtual void applyForceToWall()=0;
3694 virtual void setVelocityOfWall()=0;
3695 virtual void setParticleVelocity()=0;
3696- virtual void setParticleDensity()=0;
3697- virtual void setTaggedParticleVel()=0;
3698+ virtual void setParticleDensity()=0;
3699+ virtual void setTaggedParticleVel()=0;
3700 virtual void setParticleAngularVelocity(){};
3701 virtual void setParticleNonDynamic()=0;
3702 virtual void setParticleNonRot()=0;
3703@@ -137,7 +147,7 @@
3704 virtual void translateMeshBy(const std::string &meshName, const Vec3 &translation)=0;
3705
3706 virtual void setTimer(MpiWTimers &timers) = 0;
3707-
3708+
3709 virtual void do2dCalculations(bool do2d) = 0;
3710
3711 // --- setting interaction parameters during a simulation ---
3712@@ -156,12 +166,18 @@
3713 virtual void addScalarTriangleField()=0;
3714 virtual void sendFieldData()=0;
3715 virtual void addVectorWallField()=0;
3716+ /****fluid contents: begin***/
3717+ virtual void addScalarFluidField()=0;
3718+ virtual void addVectorFluidField()=0;
3719+ virtual void addScalarFluidInteractionField()=0;
3720+ virtual void addVectorFluidInteractionField()=0;
3721+ /****fluid contents: end***/
3722
3723 // output
3724 virtual void printStruct()=0;
3725 virtual void printData()=0;
3726 virtual void printTimes()=0;
3727-
3728+
3729 // -- mesh data exchange --
3730 virtual void getMeshNodeRef()=0;
3731 virtual void getMeshFaceRef()=0;
3732
3733=== added file 'Parallel/GMRESSolverMaster.cpp'
3734--- Parallel/GMRESSolverMaster.cpp 1970-01-01 00:00:00 +0000
3735+++ Parallel/GMRESSolverMaster.cpp 2019-07-28 14:55:57 +0000
3736@@ -0,0 +1,143 @@
3737+/////////////////////////////////////////////////////////////
3738+// //
3739+// Copyright (c) 2003-2014 by The University of Queensland //
3740+// Centre for Geoscience Computing //
3741+// http://earth.uq.edu.au/centre-geoscience-computing //
3742+// //
3743+// Primary Business: Brisbane, Queensland, Australia //
3744+// Licensed under the Open Software License version 3.0 //
3745+// http://www.apache.org/licenses/LICENSE-2.0 //
3746+// //
3747+/////////////////////////////////////////////////////////////
3748+
3749+
3750+#include "GMRESSolverMaster.h"
3751+
3752+#include <iostream>
3753+#include <fstream>
3754+#include <math.h>
3755+#include <string>
3756+#include <sstream>
3757+#include <cstring>
3758+#include <iomanip>
3759+#include <algorithm>
3760+#include <stdlib.h>
3761+#include <stdio.h>
3762+#include "mpi.h"
3763+
3764+#include "Parallel/mpicmdbuf.h"
3765+#include "Parallel/mpisgvbuf.h"
3766+#include "Parallel/sublattice_cmd.h"
3767+#include "Parallel/mpibarrier.h"
3768+#include "Parallel/BroadCast_cmd.h"
3769+#include "Parallel/mpi_tag_defs.h"
3770+#include "tml/comm/comm.h"
3771+
3772+using namespace std;
3773+
3774+
3775+GMRESSolverMaster::GMRESSolverMaster(
3776+ MPI_Comm* comm,
3777+ TML_Comm* tml_comm,
3778+ vector<pair<Vec3,double> > co_W,
3779+ vector<pair<Vec3,double> > co_E,
3780+ vector<pair<Vec3,double> > co_N,
3781+ vector<pair<Vec3,double> > co_S,
3782+ vector<pair<Vec3,double> > co_D,
3783+ vector<pair<Vec3,double> > co_U,
3784+ vector<pair<Vec3,double> > co_C,
3785+ vector<pair<Vec3,double> > co_B,
3786+ int nx,
3787+ int ny,
3788+ int nz
3789+)
3790+{
3791+ m_comm=comm;
3792+ m_tml_comm=tml_comm;
3793+ MPI_Comm_rank(*m_comm, &m_rank);
3794+
3795+ Nx=nx;Ny=ny;Nz=nz;
3796+ Number=Nx*Ny*Nz;
3797+ W.resize(Number); E.resize(Number); N.resize(Number); S.resize(Number); D.resize(Number); U.resize(Number); C.resize(Number); B.resize(Number);
3798+ irow.resize(Number*7); icol.resize(Number*7); var.resize(Number*7);
3799+ index.resize(Number);
3800+ for(int n=0;n<Number;n++){
3801+ W[n]=(&co_W[n])->second; E[n]=(&co_E[n])->second;
3802+ N[n]=(&co_N[n])->second; S[n]=(&co_S[n])->second;
3803+ D[n]=(&co_D[n])->second; U[n]=(&co_U[n])->second;
3804+ C[n]=(&co_C[n])->second; B[n]=(&co_B[n])->second;
3805+ index[n]=(&co_W[n])->first;
3806+ }
3807+}
3808+
3809+
3810+GMRESSolverMaster::~GMRESSolverMaster()
3811+{
3812+ irow.clear();icol.clear();
3813+ W.clear();E.clear();N.clear();S.clear();D.clear();U.clear();C.clear();B.clear();
3814+ var.clear();index.clear();
3815+}
3816+
3817+
3818+vector<pair<Vec3,double> > GMRESSolverMaster::MatrixSolving()
3819+{
3820+ CMPILCmdBuffer cmd_buffer(*m_comm,m_rank);
3821+ CMPIBarrier barrier(*m_comm);
3822+
3823+ GlobalMatrixSetting();
3824+
3825+ cmd_buffer.broadcast(CMD_SOLVE);
3826+
3827+ m_tml_comm->broadcast_cont(B);
3828+ m_tml_comm->broadcast_cont(irow);
3829+ m_tml_comm->broadcast_cont(icol);
3830+ m_tml_comm->broadcast_cont(var);
3831+ m_tml_comm->broadcast(Number);
3832+ m_tml_comm->broadcast(nnz);
3833+
3834+ multimap<int,double> temp;
3835+ m_tml_comm->gather(temp);
3836+
3837+ barrier.wait("solve matrix");
3838+
3839+ vector<double> x;
3840+ for(multimap<int,double>::iterator iter=temp.begin();
3841+ iter!=temp.end();
3842+ iter++){
3843+ x.push_back(iter->second);
3844+ };
3845+
3846+ //putting x into results vector
3847+ vector<pair<Vec3,double> > results;
3848+ for(int n=0;n<Number;n++){
3849+ results.push_back(make_pair(index[n],x[n]));
3850+ }
3851+
3852+ x.clear();
3853+ temp.clear();
3854+ return results;
3855+}
3856+
3857+
3858+void GMRESSolverMaster::GlobalMatrixSetting()
3859+{
3860+ nnz=0;
3861+ for(int n=1,m1=0,m2=0;n<=Number;n++){
3862+ if (n>Nx*Ny){irow[nnz]=n;icol[nnz]=n-Nx*Ny;var[nnz]=D[n-1];nnz++;};//down3
3863+ if (n>Nx*(1+Ny*m1) && n<=Nx*Ny*(m1+1)) {
3864+ irow[nnz]=n;icol[nnz]=n-Nx;var[nnz]=N[n-1];nnz++; //down2
3865+ if(n==Nx*Ny*(m1+1)){m1++;};
3866+ };
3867+ if(n%Nx!=1){irow[nnz]=n;icol[nnz]=n-1;var[nnz]=W[n-1];nnz++;};//down1
3868+ irow[nnz]=n;icol[nnz]=n;var[nnz]=C[n-1];nnz++; //centre
3869+ if(n%Nx!=0){irow[nnz]=n;icol[nnz]=n+1;var[nnz]=E[n-1];nnz++;};//up1
3870+ if (n>m2*Nx*Ny && n<=(m2+1)*Ny*Nx-Nx) {
3871+ irow[nnz]=n;icol[nnz]=n+Nx;var[nnz]=S[n-1];nnz++;//up2
3872+ if(n==(m2+1)*Ny*Nx-Nx){m2++;};
3873+ };
3874+ if (n<=Nx*Ny*(Nz-1)){irow[nnz]=n;icol[nnz]=n+Nx*Ny;var[nnz]=U[n-1];nnz++;};//up3
3875+ };
3876+}
3877+
3878+
3879+
3880
3881=== added file 'Parallel/GMRESSolverMaster.h'
3882--- Parallel/GMRESSolverMaster.h 1970-01-01 00:00:00 +0000
3883+++ Parallel/GMRESSolverMaster.h 2019-07-28 14:55:57 +0000
3884@@ -0,0 +1,81 @@
3885+/////////////////////////////////////////////////////////////
3886+// //
3887+// Copyright (c) 2003-2014 by The University of Queensland //
3888+// Centre for Geoscience Computing //
3889+// http://earth.uq.edu.au/centre-geoscience-computing //
3890+// //
3891+// Primary Business: Brisbane, Queensland, Australia //
3892+// Licensed under the Open Software License version 3.0 //
3893+// http://www.apache.org/licenses/LICENSE-2.0 //
3894+// //
3895+/////////////////////////////////////////////////////////////
3896+
3897+
3898+#ifndef __GMRESSOLVERMASTER_H
3899+#define __GMRESSOLVERMASTER_H
3900+
3901+//--- STL includes ---
3902+#include <string>
3903+using std::string;
3904+
3905+// --- project includes ---
3906+#include "Foundation/console.h"
3907+#include "Foundation/vec3.h"
3908+
3909+// --- TML includes ---
3910+#include "tml/comm/comm.h"
3911+
3912+class TML_Comm;
3913+
3914+/*!
3915+ \class GMRESSolverMaster
3916+ \brief Master class of GMRES Solver for solving sparse matrix of linear equations
3917+
3918+ \author Qi Shao
3919+ $Revision$
3920+ $Date$
3921+*/
3922+class GMRESSolverMaster
3923+{
3924+ private:
3925+ int Nx,Ny,Nz,Number,nnz;
3926+ vector<int> irow,icol;
3927+ vector<double> W, E, N, S, D, U, C, B;
3928+ vector <double> var;
3929+ vector<Vec3> index;
3930+ int m_rank;
3931+
3932+ protected:
3933+ MPI_Comm *m_comm;
3934+ TML_Comm *m_tml_comm;
3935+
3936+ void GlobalMatrixSetting();
3937+
3938+ public:
3939+ vector<pair<Vec3,double> > MatrixSolving();
3940+
3941+ GMRESSolverMaster(
3942+ MPI_Comm*,
3943+ TML_Comm*,
3944+ vector<pair<Vec3,double> >,
3945+ vector<pair<Vec3,double> >,
3946+ vector<pair<Vec3,double> >,
3947+ vector<pair<Vec3,double> >,
3948+ vector<pair<Vec3,double> >,
3949+ vector<pair<Vec3,double> >,
3950+ vector<pair<Vec3,double> >,
3951+ vector<pair<Vec3,double> >,
3952+ int,
3953+ int,
3954+ int
3955+ );
3956+ ~GMRESSolverMaster();
3957+};
3958+
3959+#endif //__GMRESSOLVERMASTER_H
3960+
3961+
3962+
3963+
3964+
3965+
3966
3967=== added file 'Parallel/GMRESSolverSlave.cpp'
3968--- Parallel/GMRESSolverSlave.cpp 1970-01-01 00:00:00 +0000
3969+++ Parallel/GMRESSolverSlave.cpp 2019-07-28 14:55:57 +0000
3970@@ -0,0 +1,573 @@
3971+/////////////////////////////////////////////////////////////
3972+// //
3973+// Copyright (c) 2003-2014 by The University of Queensland //
3974+// Centre for Geoscience Computing //
3975+// http://earth.uq.edu.au/centre-geoscience-computing //
3976+// //
3977+// Primary Business: Brisbane, Queensland, Australia //
3978+// Licensed under the Open Software License version 3.0 //
3979+// http://www.apache.org/licenses/LICENSE-2.0 //
3980+// //
3981+/////////////////////////////////////////////////////////////
3982+
3983+
3984+
3985+#include "GMRESSolverSlave.h"
3986+
3987+#include <iostream>
3988+#include <fstream>
3989+#include <math.h>
3990+#include <string>
3991+#include <sstream>
3992+#include <cstring>
3993+#include <iomanip>
3994+#include <algorithm>
3995+#include <stdlib.h>
3996+#include <stdio.h>
3997+#include "mpi.h"
3998+
3999+using namespace std;
4000+
4001+
4002+
4003+GMRESSolverSlave::GMRESSolverSlave(
4004+ MPI_Comm* comm,
4005+ TML_Comm* tml_comm,
4006+ vector<double> B0,
4007+ vector<int> row,
4008+ vector<int> col,
4009+ vector<double> v,
4010+ int number,
4011+ int nz
4012+)
4013+{
4014+ m_worker_comm=comm;
4015+ m_tml_global_comm=tml_comm;
4016+ MPI_Comm_rank(*m_worker_comm, &id);
4017+ MPI_Comm_size(*m_worker_comm, &nproc);
4018+
4019+ Number=number;
4020+ nrowsI=Number/nproc;
4021+ B.resize(Number);
4022+ for(int n=0;n<Number;n++){
4023+ B[n]=B0[n];
4024+ }
4025+ nnz=nz;
4026+ irow.resize(nnz); icol.resize(nnz);var.resize(nnz);
4027+ for(int n=0;n<nnz;n++){
4028+ irow[n]=row[n];
4029+ icol[n]=col[n];
4030+ var[n]=v[n];
4031+ }
4032+}
4033+
4034+GMRESSolverSlave::~GMRESSolverSlave()
4035+{
4036+ vector<int> zero;
4037+ irow.swap(zero);icol.swap(zero);ia.swap(zero);ja.swap(zero);
4038+ vector<double> zero_db;
4039+ var.swap(zero_db);A.swap(zero_db);B.swap(zero_db);b.swap(zero_db);
4040+}
4041+
4042+
4043+
4044+void GMRESSolverSlave::LocalMatrixSolving()
4045+{
4046+ /* local vars */
4047+ int precond_flag = 0;//0 for normal GMRES and 1 for preconditioned GMRES algorithm
4048+ double gmres_res = 1.e-5; //gmres residual tolerance
4049+ int i = 0, j = 0, k, ss, k_end = 200, mm = 10, k_sum=0;//k_end=number of GMRES iterations; m=number of restarts;
4050+
4051+ LocalMatrixSetting();
4052+
4053+ //localize
4054+ int **receive = NULL;
4055+ int *cntowner= NULL;
4056+ int **to_be_sent= NULL;
4057+ int *cntsent = NULL;
4058+ localize(&receive, &cntowner, &to_be_sent, &cntsent);
4059+
4060+ int n_total = max_int_array(ja, my_nnz)+1;
4061+ double norm_r0 = 0.;
4062+
4063+ double *u = (double *)calloc( nrowsI , sizeof(double) );
4064+ double **v = (double **)malloc( 1 * sizeof(double *) );
4065+ double **h = (double **)malloc( 1 * sizeof(double *) );
4066+ double *x0 = (double *)calloc( n_total , sizeof(double) );
4067+ double *r0 = (double *)calloc( nrowsI , sizeof(double) );
4068+ double *g = (double *)calloc( 1 , sizeof(double) );
4069+ double *c = NULL, *s = NULL, *resi = NULL;
4070+ double delta = 0., gamma = 0.;
4071+
4072+ vector<double> x_local;
4073+ x_local.resize(nrowsI);
4074+
4075+ //pick the diagonal entries for preconditioning (if it is preconditioned)
4076+ double *diag = (double *) calloc( nrowsI , sizeof(double) );
4077+ double *yk = (double *) calloc( n_total , sizeof(double) );
4078+ if( precond_flag ) //if it is preconditioned
4079+ {
4080+ for (i = 0 ; i < nrowsI; i++ )
4081+ for (j = ia[i] ; j < ia[i+1]; j++)
4082+ if(ja[j] == i )
4083+ diag[i] = A[j];
4084+ }
4085+
4086+ //init x0
4087+ for(i = 0; i < nrowsI; i++)
4088+ x0[i] = 1.0;
4089+
4090+ //restarting vars
4091+ int rst = 0, completed = 0, total_itr = 1;
4092+
4093+ //restarting loop
4094+ for(rst = 0; rst < mm; rst++){
4095+ gather_sync(receive, cntowner, to_be_sent, cntsent, x0);
4096+ //matrix vector product
4097+ sparse_matmult(x0, r0);
4098+ //updating the final r0
4099+ for(i = 0; i < nrowsI; i++)
4100+ r0[i] = b[i] - r0[i];
4101+
4102+ // normalization ...
4103+ dotproduct(r0, r0, &norm_r0); //dot product
4104+ norm_r0 = sqrt(norm_r0); //root
4105+ for(i = 0; i < nrowsI; i++)
4106+ r0[i] /= norm_r0; //normalizing
4107+
4108+ // initial allocation of v
4109+ h[0] = (double *)calloc(1 , sizeof(double) );
4110+ v[0] = (double *)calloc(n_total , sizeof(double) );
4111+ for(i = 0; i < nrowsI; i++)
4112+ v[0][i] = r0[i]; //initializing ...
4113+
4114+ g[0] = norm_r0; //initial g
4115+
4116+ //gmres loop
4117+ for(k = 0; k < k_end; k++)
4118+ {
4119+ //reallocating vars to have enough space for the next items
4120+ g = (double *) realloc (g, (k+2) * sizeof(double) );
4121+ g[k+1] = 0.;
4122+ c = (double *) realloc (c, (k+1) * sizeof(double) );
4123+ s = (double *) realloc (s, (k+1) * sizeof(double) );
4124+ resi = (double *) realloc (resi, (k+1) * sizeof(double) );
4125+ v = (double **)realloc (v, (k+2) * sizeof(double *) );
4126+ v[k+1] = (double *)calloc(n_total , sizeof(double) );
4127+ h = (double **)realloc (h , (k+2) * sizeof(double *) );
4128+ h[k+1] = (double *)calloc(1 , sizeof(double) );
4129+
4130+ for (j = 0; j <= (k+1); j++)
4131+ h[j] = (double *)realloc( h[j], (k+1) *sizeof(double) );
4132+
4133+ if( precond_flag )
4134+ {
4135+ for (i = 0 ; i < nrowsI; i++ )
4136+ if( diag[i] )
4137+ yk[i] = v[k][i] / diag[i];
4138+ else
4139+ yk[i] = v[k][i];
4140+ //gather/sync
4141+ gather_sync(receive, cntowner, to_be_sent, cntsent, yk);
4142+ //matrix vector product
4143+ sparse_matmult(yk, u);
4144+ }
4145+ else
4146+ {
4147+ //gather/sync
4148+ gather_sync(receive, cntowner, to_be_sent, cntsent, v[k]);
4149+ //matrix vector product
4150+ sparse_matmult(v[k], u);
4151+ }
4152+
4153+ for (j = 0 ; j <= k; j++)
4154+ {
4155+ dotproduct(v[j], u, &h[j][k]); //dot product
4156+ for( ss = 0; ss < nrowsI; ss++)
4157+ u[ss] -= h[j][k] * v[j][ss];
4158+ }
4159+ dotproduct(u, u, &h[k+1][k]); //dot product
4160+ h[k+1][k] = sqrt(h[k+1][k]); //norm
4161+ //updating v[k+1]
4162+ for(ss = 0; ss < nrowsI; ss++)
4163+ v[k+1][ss] = u[ss] / h[k+1][k];
4164+
4165+ for (j = 0 ; j < k; j++)
4166+ {
4167+ delta = h[j][k];
4168+ h[j][k] = c[j] * delta + s[j] * h[j+1][k];
4169+ h[j+1][k] = -s[j] * delta + c[j] * h[j+1][k];
4170+ }
4171+ gamma = sqrt(h[k][k] * h[k][k] + h[k+1][k]*h[k+1][k]);
4172+ c[k] = h[k][k] / gamma;
4173+ s[k] = h[k+1][k] / gamma;
4174+ h[k][k] = gamma;
4175+ h[k+1][k] = 0.;
4176+ delta = g[k];
4177+ g[k] = c[k] * delta + s[k] * g[k+1];
4178+ g[k+1] = -s[k] * delta + c[k] * g[k+1];
4179+ resi[k] = fabs(g[k+1]);
4180+
4181+ if (resi[k] <= gmres_res)
4182+ {
4183+ completed = 1; //set the completed flag
4184+ break; //terminate gmres loop ASAP!
4185+ }
4186+
4187+ if (rst==mm-1 && k==k_end-1 && resi[k] > gmres_res && id==0){
4188+ cerr << "Fails to converge, please try a larger number of GMRES iterations or restarts \n";
4189+ exit(0);
4190+ };
4191+
4192+ } //end of the main gmres loop
4193+
4194+ k--; //reducing k to emulate the inside the loop effect
4195+ k_sum+=k;
4196+
4197+ //compute alpha
4198+ double *alpha = (double *)calloc(k+1 , sizeof(double) );
4199+ //solve backward
4200+ for (j = k ; j >= 0; j--)
4201+ {
4202+ alpha[j] = g[j]/h[j][j];
4203+ for (ss = (j+1) ; ss <= k; ss++)
4204+ alpha[j] -= (h[j][ss]/h[j][j] * alpha[ss]);
4205+ }
4206+ //compute zk
4207+ double *zk = (double *)calloc(nrowsI , sizeof(double) );
4208+ for (j = 0 ; j <= k; j++)
4209+ for (ss = 0 ; ss < nrowsI; ss++)
4210+ zk[ss] += alpha[j] * v[j][ss];
4211+
4212+ if(precond_flag )
4213+ for (i = 0 ; i < nrowsI; i++ )
4214+ if (diag[i] )
4215+ zk[i] /= diag[i];
4216+
4217+ //compute solution
4218+ for (ss = 0 ; ss < nrowsI; ss++){
4219+ x_local[ss] = x0[ss] + zk[ss];
4220+ }
4221+
4222+ //preparing for restart operation ...
4223+ for(i = 0; i < nrowsI; i++)
4224+ x0[i] = x_local[i]; //put last x_local into x0
4225+
4226+ //clean ups
4227+ free(alpha);
4228+ free(zk);
4229+ for (j = 0 ; j <= (k+1); j++)
4230+ {
4231+ free(v[j]);
4232+ free(h[j]);
4233+ }
4234+
4235+ if(completed)
4236+ break; //terminate restart loop ASAP!
4237+
4238+ } //end of restarting loop
4239+ //Sending local results for gather by master
4240+ m_tml_global_comm->send_gather(x_local,0);
4241+
4242+ free(u);
4243+ free(x0);
4244+ free(r0);
4245+ free(g);
4246+ free(c);
4247+ free(diag);
4248+ free(yk);
4249+ free(s);
4250+ free(resi);
4251+ free(v);
4252+ free(h);
4253+ free(cntowner);
4254+ free(cntsent);
4255+ for (j = 0 ; j < nproc; j++)
4256+ {
4257+ free(receive[j]);
4258+ free(to_be_sent[j]);
4259+ }
4260+ free(receive);
4261+ free(to_be_sent);
4262+}
4263+
4264+
4265+
4266+void GMRESSolverSlave::LocalMatrixSetting()
4267+{
4268+ //Setting local CRS matrix
4269+ int i, i_start, i_end, DI;
4270+
4271+ DI = Number/nproc;
4272+ int *col_at_row = (int *)calloc( DI , sizeof(int) );
4273+ i_start = id * DI;
4274+ i_end = (id+1) * DI;
4275+
4276+ int irow2,icol2;
4277+ for(i = 0; i < nnz; i++) {
4278+ irow2=irow[i]-1; //converting to zero-based
4279+ if ( (i_start <= irow2) && (irow2 < i_end) ) {
4280+ col_at_row[(irow2-i_start)]++;
4281+ };
4282+ }
4283+
4284+ //find my total number of non-zero items (my_nnz) and also update ia[]
4285+ ia.resize(DI+1);
4286+ my_nnz=0;
4287+ for(i= 0; i < DI; i++){
4288+ ia[i] = my_nnz;
4289+ my_nnz += col_at_row[i];
4290+ }
4291+ ia[i] = my_nnz;
4292+
4293+ //allocate A one big chunck of doubles for local CRS array for storing data
4294+ A.resize(my_nnz);
4295+ //allocate (*ja)
4296+ ja.resize(my_nnz);
4297+
4298+ //fill data matrix A[], ia[] and ja[]
4299+ for(i= 0; i < DI; i++)
4300+ col_at_row[i] = 0; //reset col_at_row[]
4301+
4302+ for( i = 0; i < nnz; i++){
4303+ irow2=irow[i]-1;
4304+ icol2=icol[i]-1;//converting to zero-based
4305+ if ((i_start <= irow2) && (irow2 < i_end) ){
4306+ irow2 -= i_start;
4307+ A[ia[irow2] + col_at_row[irow2]] = var[i];
4308+ ja[ia[irow2] + col_at_row[irow2]] = icol2;
4309+ col_at_row[irow2]++;
4310+ }
4311+ }
4312+
4313+ //setting local rhs
4314+ b.resize(nrowsI);
4315+ for(i = 0; i < Number; i++) {
4316+ if ( (i_start <= i) && (i < i_end) )
4317+ b[(i-i_start)] = B[i]; //put it in local [b]
4318+ }
4319+
4320+ //clean-up
4321+ free(col_at_row);
4322+}
4323+
4324+
4325+//finds the maximum of the given integer array with size "n"
4326+int GMRESSolverSlave::max_int_array(vector<int> input, int n)
4327+{
4328+ //locals
4329+ int i = 0;
4330+ int max = input[0];
4331+ for( i = 0; i < n ; i++)
4332+ if( input[i] > max)
4333+ max = input[i];
4334+ //returning the maximum value
4335+ return max;
4336+}
4337+
4338+
4339+void GMRESSolverSlave::localize(int ***receive, int **cntowner, int ***to_be_sent, int **cntsent)
4340+{
4341+ // locals
4342+ int i, j, indx, owner;
4343+ int phn = nrowsI;
4344+ int *tag = (int *)calloc(Number , sizeof(int) );
4345+ (*receive) = (int **)malloc(nproc * sizeof(int *));
4346+ (*to_be_sent) = (int **)malloc(nproc * sizeof(int *));
4347+ int **expect = (int **)malloc(nproc * sizeof(int *));
4348+ (*cntowner) = (int *)calloc(nproc , sizeof(int));
4349+ (*cntsent) = (int *)calloc(nproc , sizeof(int));
4350+
4351+ int *tmp_receive = NULL, *tmp_expect = NULL;
4352+ int msg_tag1 = 40, msg_tag2 = 60;
4353+ MPI_Status status1[nproc];
4354+ MPI_Request request1[nproc];
4355+ MPI_Status status2[nproc];
4356+ MPI_Request request2[nproc];
4357+
4358+ //initializing to NULL (safe)
4359+ for ( i = 0; i < nproc; i++){
4360+ (*receive)[i] = NULL;
4361+ expect[i] = NULL;
4362+ (*to_be_sent)[i] = NULL;
4363+ }
4364+
4365+ // main loop for hacking ja[]
4366+ for ( i = 0; i < nrowsI; i++) { // i-local always starts from zero
4367+ for ( indx = ia[i]; indx < ia[i+1]; indx++) //ia[] always starts from zero for this process
4368+ {
4369+ j=ja[indx]; //find the global column (zero-based)
4370+ owner = (int)(j/nrowsI); //and the owner rank
4371+ if (owner == id)
4372+ ja[indx] -= (id*nrowsI); // NOTE: my_rank*nlocal=vect_start_indx for this process
4373+ else
4374+ {
4375+ if( !tag[j] ) //is it already localized? if no do it!
4376+ {
4377+ //updating receive list
4378+ tmp_receive = (int *) realloc ((*receive)[owner], ((*cntowner)[owner]+1)*sizeof(int));
4379+ if(tmp_receive == NULL) {
4380+ cout<<"can't realloc receive list on processor "<<id<<endl;
4381+ fflush(stdout);
4382+ exit(0);
4383+ } else
4384+ (*receive)[owner] = tmp_receive;
4385+
4386+ (*receive)[owner][(*cntowner)[owner]] = phn;
4387+ //updating expect list
4388+ tmp_expect = (int *) realloc (expect[owner], ((*cntowner)[owner]+1) * sizeof(int));
4389+ if(tmp_expect == NULL) {
4390+ cout<<"can't realloc expect list on processor "<<id<<endl;
4391+ fflush(stdout);
4392+ exit(0);
4393+ } else
4394+ expect[owner] = tmp_expect;
4395+
4396+ expect[owner][(*cntowner)[owner]] = j;
4397+ // updating countors
4398+ tag[j] = phn;
4399+ ((*cntowner)[owner])++;
4400+ phn++;
4401+ }
4402+ ja[indx] = tag[j];
4403+ }
4404+ }
4405+ }
4406+
4407+ // sending expect list to other processes
4408+ int rq_inc1 = 0, rq_inc2 = 0; //increaments for requests
4409+ for( owner = 0; owner < nproc; owner++) //loop over process which we want to send expect list
4410+ if ( owner == id) //dont send to myself
4411+ continue;
4412+ else
4413+ {
4414+ //sending the numbers of expected values to that process
4415+ MPI_Isend(&(*cntowner)[owner], 1, MPI_INT, owner, msg_tag1, *m_worker_comm, request1+rq_inc1++);
4416+ //sending expected values to that process
4417+ MPI_Isend(expect[owner], (*cntowner)[owner], MPI_INT, owner, msg_tag2, *m_worker_comm, request2+ rq_inc2++);
4418+ }
4419+
4420+ // receiving to_be_sent list from other processes
4421+ int st_inc1 = 0, st_inc2 = 0; //increaments for status
4422+ for( owner = 0; owner < nproc; owner++) //loop over process which we want to receive from them.
4423+ if ( owner == id)
4424+ continue;
4425+ else
4426+ {
4427+ //receiving the numbers of expected values for that process
4428+ MPI_Recv(&(*cntsent)[owner], 1, MPI_INT, owner, msg_tag1, *m_worker_comm, status1+st_inc1++ );
4429+ //once get the size of data, reallocating enough space
4430+ tmp_expect = (int *)realloc ((*to_be_sent)[owner], (*cntsent)[owner]*sizeof(int) );
4431+ if(tmp_expect == NULL){
4432+ printf("\ncan't realloc to_be_sent list!\n");
4433+ exit(0);
4434+ }
4435+ else {
4436+ (*to_be_sent)[owner] = tmp_expect;
4437+ }
4438+ // reciving data and putting them in place
4439+ MPI_Recv((*to_be_sent)[owner],(*cntsent)[owner], MPI_INT, owner, msg_tag2, *m_worker_comm, status2+st_inc2++ );
4440+ for ( i = 0; i < (*cntsent)[owner]; i++) //localizing data
4441+ (*to_be_sent)[owner][i] -= (id*nrowsI);
4442+ }
4443+
4444+ // wait until all send and receives ar complete
4445+ MPI_Waitall(nproc-1, request1, status1);
4446+ MPI_Waitall(nproc-1, request2, status2);
4447+
4448+ // clean-up
4449+ for( i = 0; i < nproc; i++)
4450+ if( i != id)
4451+ free(expect[i]);
4452+ free(expect);
4453+ free(tag);
4454+}
4455+
4456+
4457+
4458+// for gathering data in one location in the local process and synchronization among processes
4459+void GMRESSolverSlave::gather_sync(int **receive, int *cntowner, int **to_be_sent, int *cntsent, double *qk)
4460+{
4461+ int i = 0;
4462+ int owner = 0;
4463+ int msg_tag = 40;
4464+ MPI_Status status[nproc];
4465+ MPI_Request request[nproc];
4466+ double **buff_snd = (double **)malloc(nproc * sizeof(double *));
4467+ double **buff_rcv = (double **)malloc(nproc * sizeof(double *));
4468+
4469+ // sending doubles to other processes
4470+ int rq_inc = 0; //increament for requests
4471+ for(owner = 0; owner < nproc; owner++) //loop over process which we want to send
4472+ if (owner == id) //dont send to myself
4473+ continue;
4474+ else
4475+ {
4476+ // first pack doubles into the send buffer
4477+ buff_snd[owner] = (double *)malloc(cntsent[owner] * sizeof(double));
4478+ for( i = 0; i < cntsent[owner]; i++)
4479+ buff_snd[owner][i] = qk[to_be_sent[owner][i]];
4480+
4481+ //sending expected values to that process
4482+ MPI_Isend(buff_snd[owner], cntsent[owner], MPI_DOUBLE, owner, msg_tag, *m_worker_comm, request+rq_inc++);
4483+ }
4484+
4485+ // receiving doubles from other processes and putting them in the right places
4486+ int st_inc = 0; //increament for status
4487+ for( owner = 0; owner < nproc; owner++) //loop over process which we want to receive from them.
4488+ if ( owner == id)
4489+ continue;
4490+ else
4491+ {
4492+ // first allocate receive buffer
4493+ buff_rcv[owner] = (double *)malloc(cntowner[owner] * sizeof(double));
4494+ // reciving data and putting them in receive buffer
4495+ MPI_Recv(buff_rcv[owner], cntowner[owner], MPI_DOUBLE, owner, msg_tag, *m_worker_comm, status+st_inc++ );
4496+ for ( i = 0; i < cntowner[owner]; i++) //putting in right place indicated by ja[]
4497+ qk[receive[owner][i]] = buff_rcv[owner][i];
4498+ }
4499+
4500+ // wait until all send and receives ar complete
4501+ MPI_Waitall(nproc-1, request, status);
4502+
4503+ //clean - ups
4504+ for( i = 0; i < nproc ; i++)
4505+ if( i != id) {
4506+ free(buff_snd[i]);
4507+ free(buff_rcv[i]);
4508+ }
4509+ free(buff_rcv);
4510+ free(buff_snd);
4511+}
4512+
4513+
4514+void GMRESSolverSlave::sparse_matmult(double *q, double *u)
4515+{
4516+ //locals
4517+ int r, j;
4518+ // reset u
4519+ for( r = 0 ; r < nrowsI; r++)
4520+ u[r] = 0.;
4521+
4522+ for( r = 0 ; r < nrowsI; r++)
4523+ for ( j = ia[r]; j < ia[r+1]; j++) {
4524+ u[r] += (A[j] * q[ja[j]]);
4525+ }
4526+}
4527+
4528+
4529+void GMRESSolverSlave::dotproduct(double *a, double *b, double *c)
4530+{
4531+ //locals
4532+ int i = 0;
4533+ double local_dot = 0.;
4534+ //perform local dot product
4535+ for( i = 0; i < nrowsI; i++)
4536+ local_dot += (a[i] * b[i]);
4537+
4538+ //reduceall to single values
4539+ MPI_Allreduce(&local_dot,c,1,MPI_DOUBLE,MPI_SUM,*m_worker_comm);
4540+}
4541+
4542+
4543+
4544
4545=== added file 'Parallel/GMRESSolverSlave.h'
4546--- Parallel/GMRESSolverSlave.h 1970-01-01 00:00:00 +0000
4547+++ Parallel/GMRESSolverSlave.h 2019-07-28 14:55:57 +0000
4548@@ -0,0 +1,76 @@
4549+/////////////////////////////////////////////////////////////
4550+// //
4551+// Copyright (c) 2003-2014 by The University of Queensland //
4552+// Centre for Geoscience Computing //
4553+// http://earth.uq.edu.au/centre-geoscience-computing //
4554+// //
4555+// Primary Business: Brisbane, Queensland, Australia //
4556+// Licensed under the Open Software License version 3.0 //
4557+// http://www.apache.org/licenses/LICENSE-2.0 //
4558+// //
4559+/////////////////////////////////////////////////////////////
4560+
4561+
4562+#ifndef __GMRESSOLVERSLAVE_H
4563+#define __GMRESSOLVERSLAVE_H
4564+
4565+//--- STL includes ---
4566+#include <string>
4567+using std::string;
4568+
4569+// --- project includes ---
4570+#include "Foundation/console.h"
4571+#include "Foundation/vec3.h"
4572+
4573+// --- TML includes ---
4574+#include "tml/comm/comm.h"
4575+
4576+class TML_Comm;
4577+
4578+/*!
4579+ \class GMRESSolverSlave
4580+ \brief Slave class of GMRES Solver for solving sparse matrix of linear equations
4581+
4582+ \author Qi Shao
4583+ $Revision$
4584+ $Date$
4585+*/
4586+class GMRESSolverSlave
4587+{
4588+ private:
4589+ int nrowsI,nproc,id,nnz;
4590+ int Nx,Ny,Nz,Number,my_nnz;
4591+ vector<int> irow,icol,ia,ja;
4592+ vector<double> var, A, B, b;
4593+
4594+ protected:
4595+ MPI_Comm *m_worker_comm;
4596+ TML_Comm *m_tml_global_comm;
4597+
4598+ void LocalMatrixSetting();
4599+ int max_int_array(vector<int>, int);
4600+ void localize(int ***, int **, int ***, int **);
4601+ void gather_sync(int **, int *, int **, int *, double *);
4602+ void sparse_matmult(double *, double *);
4603+ void dotproduct(double *, double *, double *);
4604+
4605+ public:
4606+ void LocalMatrixSolving();
4607+
4608+ GMRESSolverSlave(
4609+ MPI_Comm*,
4610+ TML_Comm*,
4611+ vector<double>,
4612+ vector<int>,
4613+ vector<int>,
4614+ vector<double>,
4615+ int,
4616+ int
4617+ );
4618+ ~GMRESSolverSlave();
4619+};
4620+
4621+#endif //__GMRESSOLVERSLAVE_H
4622+
4623+
4624+
4625
4626=== modified file 'Parallel/LatticeMaster.cpp'
4627--- Parallel/LatticeMaster.cpp 2019-03-12 16:42:13 +0000
4628+++ Parallel/LatticeMaster.cpp 2019-07-28 14:55:57 +0000
4629@@ -97,8 +97,12 @@
4630 m_center_id(0),
4631 m_total_time(0),
4632 m_t(0),
4633+ m_t_f(0), //fluid contents
4634+ m_t_old(0), //fluid contents
4635 m_dt(0),
4636+ m_dt_f(0), //fluid contents
4637 m_isInitialized(false),
4638+ m_fluidinitiated(false), //fluid contents
4639 m_particle_type(),
4640 m_preRunnableVector(),
4641 m_postRunnableVector(),
4642@@ -133,6 +137,291 @@
4643 console.Debug() << "CLatticeMaster::~CLatticeMaster: exit\n";
4644 }
4645
4646+/****fluid contents: begin****/
4647+/*!
4648+ add fluid to the sublattice
4649+
4650+ \param Bw Bulk modulus of water
4651+ \param Bp Bulk modulus of particle
4652+ \param Mu Viscosity of water
4653+ \param alpha Adjusting factor between two time steps
4654+*/
4655+void CLatticeMaster::addFluidInteraction(double cellside,double Bw, double Bp, double Mu, double alpha, double flowrate, double pressure, Vec3 inflow, Vec3 outflow, double dt_f)
4656+{
4657+ console.Debug() << "CLatticeMaster::addFluidInteraction\n";
4658+
4659+ BroadcastCommand cmd(getGlobalRankAndComm(), CMD_ADDFLUID);
4660+
4661+ cmd.append(cellside);
4662+ cmd.append(Bw);
4663+ cmd.append(Bp);
4664+ cmd.append(Mu);
4665+ cmd.append(alpha);
4666+ cmd.append(flowrate);
4667+ cmd.append(pressure);
4668+ cmd.append(inflow);
4669+ cmd.append(outflow);
4670+ //send command to slave
4671+ cmd.broadcast();
4672+ m_dt_f=dt_f;
4673+ m_fluidinitiated=true;
4674+ console.Debug() << "end CLatticeMaster::addFluidInteraction() \n";
4675+}
4676+
4677+/*!
4678+ add fluid to the sublattice
4679+
4680+ \param Bw Bulk modulus of water
4681+ \param Bp Bulk modulus of particle
4682+ \param Mu Viscosity of water
4683+ \param alpha Adjusting factor between two time steps
4684+*/
4685+void CLatticeMaster::addFluidInteractionVec3(Vec3 cellside,double Bw, double Bp, double Mu, double alpha, double flowrate, double pressure, Vec3 inflow, Vec3 outflow, double dt_f)
4686+{
4687+ console.Debug() << "CLatticeMaster::addFluidInteractionVec3\n";
4688+
4689+ BroadcastCommand cmd(getGlobalRankAndComm(), CMD_ADDFLUID_VEC3);
4690+
4691+ cmd.append(cellside);
4692+ cmd.append(Bw);
4693+ cmd.append(Bp);
4694+ cmd.append(Mu);
4695+ cmd.append(alpha);
4696+ cmd.append(flowrate);
4697+ cmd.append(pressure);
4698+ cmd.append(inflow);
4699+ cmd.append(outflow);
4700+ //send command to slave
4701+ cmd.broadcast();
4702+ m_dt_f=dt_f;
4703+ m_fluidinitiated=true;
4704+ console.Debug() << "end CLatticeMaster::addFluidInteractionVec3() \n";
4705+}
4706+
4707+
4708+/*!
4709+ add a scalar fluid field to the list of fields to be saved
4710+
4711+ \param filename the name of the file the field is saved into
4712+ \param fieldname the name of the field
4713+ \param savetype output file format
4714+ \param t_0 first timestep to be saved
4715+ \param t_end last timestep to be saved
4716+ \param dt timesteps between saves
4717+*/
4718+void CLatticeMaster::addScalarFluidSaveField(
4719+ const string& filename,
4720+ const string& fieldname,
4721+ const string& savetype,
4722+ int t_0,
4723+ int t_end,
4724+ int dt
4725+)
4726+{
4727+ console.Debug()
4728+ << "CLatticeMaster::addScalarFluidSaveField("
4729+ << filename
4730+ << ","
4731+ << fieldname
4732+ << ","
4733+ << t_0
4734+ << ","
4735+ << t_end
4736+ << ","
4737+ << dt
4738+ << ")\n";
4739+
4740+ CMPILCmdBuffer cmd_buffer(m_global_comm,m_global_rank);
4741+ CMPIBarrier barrier(m_global_comm);
4742+
4743+ //send cmd to slave
4744+ cmd_buffer.broadcast(CMD_ADD_SFF);
4745+
4746+ AFieldMaster* new_fm =
4747+ new ScalarFluidFieldMaster(
4748+ &m_tml_global_comm,
4749+ fieldname,
4750+ filename,
4751+ savetype,
4752+ t_0,
4753+ t_end,
4754+ dt
4755+ );
4756+ m_save_fields.push_back(new_fm);
4757+
4758+ barrier.wait("CLatticeMaster::addScalarFluidSaveField");
4759+
4760+ console.Debug() << "end CLatticeMaster::addScalarFluidSaveField()\n";
4761+}
4762+
4763+/*!
4764+ add a vector fluid field to the list of fields to be saved
4765+
4766+ \param filename the name of the file the field is saved into
4767+ \param fieldname the name of the field
4768+ \param savetype output file format
4769+ \param t_0 first timestep to be saved
4770+ \param t_end last timestep to be saved
4771+ \param dt timesteps between saves
4772+*/
4773+void CLatticeMaster::addVectorFluidSaveField(
4774+ const string& filename,
4775+ const string& fieldname,
4776+ const string& savetype,
4777+ int t_0,
4778+ int t_end,
4779+ int dt
4780+)
4781+{
4782+ console.Debug()
4783+ << "CLatticeMaster::addVectorFluidSaveField("
4784+ << filename
4785+ << ","
4786+ << fieldname
4787+ << ","
4788+ << t_0
4789+ << ","
4790+ << t_end
4791+ << ","
4792+ << dt
4793+ << ")\n";
4794+
4795+ CMPILCmdBuffer cmd_buffer(m_global_comm,m_global_rank);
4796+ CMPIBarrier barrier(m_global_comm);
4797+
4798+ //send cmd to slave
4799+ cmd_buffer.broadcast(CMD_ADD_VFF);
4800+
4801+ AFieldMaster* new_fm =
4802+ new VectorFluidFieldMaster(
4803+ &m_tml_global_comm,
4804+ fieldname,
4805+ filename,
4806+ savetype,
4807+ t_0,
4808+ t_end,
4809+ dt
4810+ );
4811+ m_save_fields.push_back(new_fm);
4812+
4813+ barrier.wait("CLatticeMaster::addVectorFluidSaveField");
4814+
4815+ console.Debug() << "end CLatticeMaster::addVectorFluidSaveField()\n";
4816+}
4817+
4818+/*!
4819+ add a scalar fluid interaction field to the list of fields to be saved
4820+
4821+ \param filename the name of the file the field is saved into
4822+ \param fieldname the name of the field
4823+ \param savetype output file format
4824+ \param t_0 first timestep to be saved
4825+ \param t_end last timestep to be saved
4826+ \param dt timesteps between saves
4827+*/
4828+void CLatticeMaster::addScalarFluidInteractionSaveField(
4829+ const string& filename,
4830+ const string& fieldname,
4831+ const string& savetype,
4832+ int t_0,
4833+ int t_end,
4834+ int dt
4835+)
4836+{
4837+ console.Debug()
4838+ << "CLatticeMaster::addScalarFluidInteractionSaveField("
4839+ << filename
4840+ << ","
4841+ << fieldname
4842+ << ","
4843+ << t_0
4844+ << ","
4845+ << t_end
4846+ << ","
4847+ << dt
4848+ << ")\n";
4849+
4850+ CMPILCmdBuffer cmd_buffer(m_global_comm,m_global_rank);
4851+ CMPIBarrier barrier(m_global_comm);
4852+
4853+ //send cmd to slave
4854+ cmd_buffer.broadcast(CMD_ADD_SFIF);
4855+
4856+ AFieldMaster* new_fm =
4857+ new ScalarFluidInteractionFieldMaster(
4858+ &m_tml_global_comm,
4859+ fieldname,
4860+ filename,
4861+ savetype,
4862+ t_0,
4863+ t_end,
4864+ dt
4865+ );
4866+ m_save_fields.push_back(new_fm);
4867+
4868+ barrier.wait("CLatticeMaster::addScalarFluiedInteractionSaveField");
4869+
4870+ console.Debug() << "end CLatticeMaster::addScalarFluidInteractionSaveField()\n";
4871+}
4872+
4873+/*!
4874+ add a vector fluid interaction field to the list of fields to be saved
4875+
4876+ \param filename the name of the file the field is saved into
4877+ \param fieldname the name of the field
4878+ \param savetype output file format
4879+ \param t_0 first timestep to be saved
4880+ \param t_end last timestep to be saved
4881+ \param dt timesteps between saves
4882+*/
4883+void CLatticeMaster::addVectorFluidInteractionSaveField(
4884+ const string& filename,
4885+ const string& fieldname,
4886+ const string& savetype,
4887+ int t_0,
4888+ int t_end,
4889+ int dt
4890+)
4891+{
4892+ console.Debug()
4893+ << "CLatticeMaster::addVectorFluidInteractionSaveField("
4894+ << filename
4895+ << ","
4896+ << fieldname
4897+ << ","
4898+ << t_0
4899+ << ","
4900+ << t_end
4901+ << ","
4902+ << dt
4903+ << ")\n";
4904+
4905+ CMPILCmdBuffer cmd_buffer(m_global_comm,m_global_rank);
4906+ CMPIBarrier barrier(m_global_comm);
4907+
4908+ //send cmd to slave
4909+ cmd_buffer.broadcast(CMD_ADD_VFIF);
4910+
4911+ AFieldMaster* new_fm =
4912+ new VectorFluidInteractionFieldMaster(
4913+ &m_tml_global_comm,
4914+ fieldname,
4915+ filename,
4916+ savetype,
4917+ t_0,
4918+ t_end,
4919+ dt
4920+ );
4921+ m_save_fields.push_back(new_fm);
4922+
4923+ barrier.wait("CLatticeMaster::addVectorFluiedInteractionSaveField");
4924+
4925+ console.Debug() << "end CLatticeMaster::addVectorFluidInteractionSaveField()\n";
4926+}
4927+
4928+/****fluid contents: end****/
4929+
4930+
4931 void CLatticeMaster::init()
4932 {
4933 MPI_Comm_size(MPI_COMM_WORLD, &m_global_size);
4934@@ -151,7 +440,7 @@
4935
4936 // -- local communicator = global comm - proc(0)
4937 // get global MPI_Group
4938- MPI_Group mpi_global_group;
4939+ MPI_Group mpi_global_group;
4940 MPI_Comm_group(MPI_COMM_WORLD,&mpi_global_group);
4941 // subtract id 0 from global group
4942 int id0=0;
4943@@ -163,12 +452,12 @@
4944 MPI_Group_free(&mpi_global_group);
4945
4946 m_tml_global_comm.barrier();
4947-
4948+
4949 if(m_global_size!=ns+1){
4950 cerr << "wrong number of processes !! aborting" << endl << flush;
4951 exit(255);
4952 // should send abort command to workers
4953- }
4954+ }
4955 initializeConsole("console.out",1000); // 1st barrier -> sync time
4956 }
4957
4958@@ -249,7 +538,7 @@
4959
4960 //send command to slave
4961 cmd_buffer.broadcast(CMD_MAKELATTICE);
4962-
4963+
4964 // send lattice parameters
4965
4966 CLatticeParam clp(ptype, nrange, alpha, getProcessDims());
4967@@ -305,7 +594,7 @@
4968
4969 /*!
4970 * Provides the initial minimum and maximum extents of all the particles read in from a geometry file.
4971- *
4972+ *
4973 * \param initMinPt Minimum extent of particles inside domain.
4974 * \param initMaxPt Maximum extent of particles inside domain.
4975 */
4976@@ -328,10 +617,10 @@
4977
4978 // check if domain has already been set
4979 if(m_geometry_is_initialized){
4980- console.Warning() << "Spatial Domain has already been set and can not be changed - this call does nothing.\n";
4981- } else {
4982- // set geometry info according to parameters
4983- // no need to set circular bc - defaults to (0,0,0) anyway
4984+ console.Warning() << "Spatial Domain has already been set and can not be changed - this call does nothing.\n";
4985+ } else {
4986+ // set geometry info according to parameters
4987+ // no need to set circular bc - defaults to (0,0,0) anyway
4988 m_geo_info.setBBox(minBBoxPt,maxBBoxPt);
4989 m_bbx_has_been_set=true;
4990 // none of the boundaries circular -> simple initLattice
4991@@ -367,23 +656,23 @@
4992
4993 // check if domain has already been set
4994 if(m_geometry_is_initialized){
4995- console.Warning() << "Spatial Domain has already been set and can not be changed - this call does nothing.\n";
4996+ console.Warning() << "Spatial Domain has already been set and can not be changed - this call does nothing.\n";
4997 } else {
4998 // check if we actually need the circular part or if the periodic dimensions are (false,false,false)
4999 if((circDimVector[0]!=0) || (circDimVector[1]!=0) || (circDimVector[2]!=0)) { // we have at least one periodic dimension
5000- // set geometry info according to parameters
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches