From 2c462e84cef8b3fddfc5c4f22a95d0bd5f60a6ea Mon Sep 17 00:00:00 2001 From: 1uc Date: Mon, 23 Sep 2024 14:55:26 +0000 Subject: [PATCH] =?UTF-8?q?Deploying=20to=20gh-pages=20from=20@=20BlueBrai?= =?UTF-8?q?n/nmodl@1e0db745c82f78ca06bfbb18716eccda8cd5353a=20=F0=9F=9A=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .doctrees/environment.pickle | Bin 148616 -> 148616 bytes .../notebooks/nmodl-kinetic-schemes.ipynb | 56 +- .../notebooks/nmodl-python-tutorial.ipynb | 160 +- .../notebooks/nmodl-sympy-conductance.ipynb | 72 +- .../notebooks/nmodl-sympy-solver-cnexp.ipynb | 64 +- .../nmodl-sympy-solver-derivimplicit.ipynb | 32 +- .../notebooks/nmodl-sympy-solver-sparse.ipynb | 32 +- ...en_1_1_codegen_coreneuron_cpp_visitor.html | 168 +- ..._1_codegen_neuron_cpp_visitor-members.html | 2 +- ...odegen_1_1_codegen_neuron_cpp_visitor.html | 174 +- ...1codegen_1_1_codegen_neuron_cpp_visitor.js | 2 +- .../codegen__acc__visitor_8cpp_source.html | 2 +- ..._coreneuron__cpp__visitor_8hpp_source.html | 168 +- ...gen__neuron__cpp__visitor_8hpp_source.html | 166 +- doxygen/functions_func_p.html | 4 +- doxygen/functions_p.html | 4 +- doxygen/group__codegen__backends.js | 2 +- doxygen/namespacenmodl_1_1codegen.html | 6 +- doxygen/navtreeindex16.js | 2 +- doxygen/search/all_f.js | 2 +- doxygen/search/functions_f.js | 2 +- ..._coreneuron__cpp__visitor_8cpp_source.html | 5271 ++++++++--------- ...gen__neuron__cpp__visitor_8cpp_source.html | 4626 +++++++-------- notebooks/nmodl-kinetic-schemes.ipynb | 56 +- notebooks/nmodl-python-tutorial.ipynb | 160 +- notebooks/nmodl-sympy-conductance.ipynb | 72 +- notebooks/nmodl-sympy-solver-cnexp.ipynb | 64 +- .../nmodl-sympy-solver-derivimplicit.ipynb | 32 +- notebooks/nmodl-sympy-solver-sparse.ipynb | 32 +- 29 files changed, 5717 insertions(+), 5716 deletions(-) diff --git a/.doctrees/environment.pickle b/.doctrees/environment.pickle index 0b596a4b162d8aef3dbe4c01837342a30ae7f5f5..043ff638a22b3aea0c2cd5156ec6ea1026467bb9 100644 GIT binary patch delta 9418 zcmY+Kdq7pi_QzSEfDcko@C7Cw%vYdh;0qNL6~(ufrlx`rvIUL_h?NPbA?PvDIXVI= zqWC19M>rxN^3cj_Wo5r!*VLQc>X)Tvx4dS0*UZ{`4F6z#*JozWtl6{n-s^04R(89r z>~`hu0qtB8%+V?FF-cao`k)`{9MGS288T?@YI9s-a?F|}tHmWQF)2ARKHh5Syuyqu zF*#{SY`i%tG9DW{Cz_LEqRi%1NkisN3Jsev$!ckvhy!Bpj-vOPxTuumxWrhir7y2I z1AW2DG79<*w799E;>?LSPCGS@)#4f-nHZZA85?7@3=#+V@Pf18=7S^0B}T`rvs&EM z!E57^;*!l6C7X888J)l~hV*0i0{XDwL%K2+(9t^SGkA9t-XO}V>^u4uK&%WFGPD|zBY*|aL<)$W_2oq6It*0fW~J69d8oi@CP zt;^16<%T!CP>azT3~xq_gOk?mv`k<9OpgJTT9eT@^OyS3TB+g9x)?l3vm4%=v;GlU zo#D;haQ{QC%<$%gX0Fm2l{ayF8&>`N7&y;%gHyaz+yIqbvqk`lH z`mprC9vyPzB9T~vSYzN&HhzRFI~N$ttU<$^%+WDyN6B0@}T&&=NJ#;6cBKh(s+M$d%`Rye9J zo0Z&`eLXsa^`2y6F$w zcos9k#6BFWF&1WG{tezNbzBJh8i)RB+{>(Ztcfief0tRum~a9YmJ+g<9UNt1_d+7s zNE|dZG@2a@HnB&cu?)gY{PkYo&#ERgvHsZUQK$<~42C|eabgJb#*zHO%q(@HiB*TK zWWC0ic*1h73@a!~t7K#_miKZO#xr~N3D$UE(K zp+wGUPHpi|vrwId5~-&72qn@?8zhuSF>QoUBE7UQp+stFbA%FUr7cC3Vri3Xc3qKV zebYKXJ8!F*+y_`qvLE|v$^}+*$BcXK#lokCv(0z?Y^SDn2iEedhwYnb&kKQI9<(M@{6IC2I?wz+0VM#8@V2MO9wDxT6(bEE5iBgPDV@+-2;T{VS6@a zw64~@g5yf|UaZ-X)Ko1Fh0KvoBH#u(`Px z*owVA?AjYWMZk2`7v_btfLuT3mR}F`wtwVj0-Cw|F9_44pv4i*H-&-1>w5S(N9^j( z799DRbzNb`8^yEi+R<>9wo1>jh+{nfJrCR3o;8SB>ce?? zZ+%zU{rU_1g%@$d{TrhB>V*cbYAA;i+bidafq&8&)H%T$oDVzDgaf$aMz(LpQ;W*o zmfLb#S5X&k>1x?~cS*6BOTBGr7eCW^%%zo%bksK=exmodI?dJg{^j5G9+yACY?tHq zx{~v^9xb0e)V*F;I@@l1zE$tF{>$sSO8B}Xjm<=lysMa}a-0#@{;jKk8~@Uk@3(FB zI7WOoSXW8kyXwaW+;HQUig0NsfBZl{AnPYDy<6GOn4@ywA-7Y2{Z>aAzG(bF7JN6H zU8(o89lz@hDBNwI{W{8tV6arYdyVb{aw;uEbuU+Iorf6ViRbye)cZj-~~du+eiDrT3t5w2Ha&ym-Yd?9Z6a53z{xRnjle^c0X`& zqz)fF15Bq$1K_PLtU26lJbhQPoT+~H2f={Pj-09P+~N-=`{Z#jQM&fL@eof94_|_T z_K;8rCU+))kNxBX=tOS%^WehW=HVXPoGA7{3?-S|mnoBUX3v=nbN|+gThhhu7T)Rz z+?Ou))l>h@?B}M5y$U;O=3Qq%Dw|bIn`AGWA>y7@?7_GDLtp!!Ghqt3f4u|)?CWQX zXhkdLj<@~ab40Yll?l{mo|r;+o$VOtcfjwD!P`T1*Z_X(MF{6DzItXuU~Jvtuz%BbvI?33-1B&O^RD8P6_-Pk_CiY8LfP9P7M6U(18#;0fq@ z*nOj5DXE<+0JC4P4l$S|sJhyByaMTZzmxeVQr!|^4XJ!H zCXg(rB0$zs5l*s@ig1%vR787OLPa>r`YFO$7EcjR$;t^25M>j#iJA$aD3}n6Y6*=f zl_I*!G6}ng8VR8&kZ@>G9ib;mBb={!kg#oKRTR-qmP8R9Wjz$pK^8+euc(C3H1f-@ zVkXw}u50ibDB;`Iz~D3yMUEEbCJSsCQ;rth%G=n=+uX{_Y~^LO^0v0}EUmm_h9`c+ z6tA9%$D(G$?&pdbXhXEEE!vt3@uBu zdtfkGGO{cgSQf@LWdlrwD!y|A3`2u;!3Z>eZh$~E!_x4UQ^gb0U>uqfGPlye-&y{a z*vx0ALsvdL9iJjqJR%*RXjMEl9R^YJcG29cV{}ryRr}c2!C`<_#Xn4k;nd3L;9eQ< zG#)i3LmnKPA?Mwn0m0O(fO_4_fZhZS5g5D?CKEV9VB1Cr!*MQelo7jaf)Mm(Zh`q5NP2SGcmZ9JzK7JhqGkc<1Cnm-n?vlR@nH_Y`h2Bc&Dw9 zfacAu^0=n0@Dh67+hotx+h96+_1j@|GG4xF}v*Y3dkPz8Uy1MlS(d}I#HhO<07 z2N!mhH{`%nXK~Tso1WzrVB*7eLbL&1-HG7rPT9ZHE_ji6S3YYOc=GgJFc}AUcNea( znh#}o>#pXU!B8~sG0d`R?qQW?i4|UQ7FVRjlOM2x7ysCb0abI)-LL}9YrA1Rn%{TB z>u6rxgDa@!7x%zeG_D*cujVgtdTkt~Fw(y)o(7i)sQgXoBxR_MDC-Uou@Pt%xufy;>nuxs7r!)g9;BY5-JlX%&l=DtmsU?rrlp2ST)%}vG9#E~g0#;d5D z!#fygIbZw^mZA;(frib>VX2baNe zG?p@4bu<5=43eOUFDaMiayjlt6TeyoetcvFUfoUntqRCT<68-tXpUE6E;aE^HY|&p zc)AVC@Fwnf3XVgQz4#Q=w*fnktHFiZ`IQOz=E^ zbQX85j%HcdfZ0?>vz#D(vjI;;8J~C#Ln`AH=U^)LJ_7@}sSzgdJ7*w}=QTnQUsQ{4 zf`4v=;rv7`bmJqMQ2i=YMiahQZsbKxc<*lH-!)C09)gn7TbzY7z>pUz%c=9~nY-DFIzTb@V^tk}>^b#(*uY*IT zRf|QViBe+a0rcX8C*F8!8Z{>vKsK8VPg_mRNeUzrNEJZ7&|62~4Fkvl-V|P%Y~Mib z8AiLdk-%FDWD>|$U@L*`3cO8VrvkeO>{eh8fm{Xh`Z}bEb?;Z?0Cg!;;1Gc$3LGVH zLV=S6N(7MaEK3Pg7(g!8W_a2uYPJ&)FCkA8sa0L-2s9{gjzF^lEd<_G;5`EWP~Z}Q zj}*8};8O*z2#{vgJ{Ls3WBr1=^Rlu1*I|bSkaFamP z{n}GR+$9mfgMg<3UIcn5(360V0=)_ND$tieKLws4Fi?R(sM0W2e?Nn ze9;|5V3Yym>c;O`zsU0*FVoHaU&RRD;L^A`DNPPR+9vm`z}w z0`mzhP+%c}#R@DTuuOsF1Xc);Cc}s(605qbB(O?>cmiexRuf24Aelg_0_zC8p}=|q zV#&1nbQ4|wn*@>nQ?Qx3Y%zd59ofPYKZ>-i)V$pQvU!K_WXq2`GSgb|pcKsbSj zsftV|GE0Hk1m-C)pTGhI77|!20e89#|DPuRUvL-k-GU1sqvM9B?WN{?1@;jrP~aee z!wM7;IHtgH0>uivBSD&0rbsz;u_qfGCx<-w6C+06FP@4NrSO&3`HIhye6625>|rs;st(I16Ic+9}YU zfSUqO5pY+)gMg<3UIcn5(360V0=*?jSvfYs!P1tm{1kt9o#*d-3jJKphAR7oYv9?|Q#{Ts9plZB(aU1iMJB9{ zj}h-f@7#parTF8$V4`EU9c7!-^!ctizHKtvJZh2j}aJXb{_$P*al5RJ!xC+HZ_9&K;D z@C=tD4#+hex`{CfCgzuztWguQaW}5V>LzY9`m64m=`8#)^{r3$>sS4{`n{URcLQ`_VoMJPuvedl=iN z=+omD>h(*~`n^3mYShHh>|g!-S+B!y=#1{F|Duf4RB@om*jBLA#Dy8@i75#tHvX{A z&FGR8o05T^pwr@T62JMGe+-ij2pJ?UEYaIWLR{f`! z9ji6`={>uqRT-3fdDkuNqVld(_S0&W_er}knpJrZ0^PO7KfR4#YUh>r?;fd|+3tN( zeV~PQ!R}pk{pVAyLV5FgWol(k^KG-gdeXE}tF$+Mn(k)RitOHJQ=S%R#dfc$=Zc@T zdgc9ntU)WZd)JQ7@YHIQcVZf27Y2tzDf{=}QLV(a-EQi|W(9X;!-IWV=gBK7$iZN5 zmKE&lnkOfT#2Civg8Q@HA+9VrbRzQ)jbXtd16Y2D3q-J#&|vl`v<({^GSDeDEuHlb ziC}j_B4DUErq%Mqw8V6MD*H8b0iJipkS3NnQpd)Jy~~;+b!_*fuIx@&B=Z@rW21(? z4VT!FFn=~V-Jg9oG?H~4tz!}4@3I)|>9TGByB8kG4iD3@m|^d*@1k_PC>Ffg--bmp z7^Pz)h9|Mm5jytSaE(om(y_o=AC?&r$$H@d-$wj}^$gRo*&`ma!)Pa`w_+KQbC@Yy z$DTySGQXiZmKhbtED<{PGAez!qHP@{%W0DI5^1M)dmSA@>Yu!O606IQ7DnG+H9dju4)NF zi9FTPg%UZcWeX+pQ~N-uDxvlZCDKwmC6q`>txPD9j#_;S1AdBJ)IJwbmiSR7RC}RB%4xnriFDI?3nfxb3l&PFnKnu&kz(3Zp+tIV^H60N zTcqn<7Nwcin0jfY=87@hfK{Xius_CL=Fh@GkBjcbqQ^yZ!x(VqC2gUD`TV%oLAV~~ z@5e{jRM3R~0(WWyA*>`b3=`x%{%sowFn62W8H92-$4r@LQx~Tu*;MGX;Wo8#dV)>; z@y`1;l`}(cQ!caa*%ZtfVN+A*J|x8s8(Ns(nK#hJ_soA_Q#^JJuu&)Un4%tR*P>`P z`K)aM5a$i(d6@gg4;7-L`Th5X5&0-V?*EO}rP$#0SPDJ6l#yroY`#HFqV z+y!^izF{}U2AFTAcLo&hW|!ruz+cYCs~8dP#a?Ab^L7jES8>UTz93u=^XZjFn;N@n zl1-glooG}3Ym;s2h~XKq*M8OGu3E_>TiORISmzDG^)NTBkF}|h8{=%MVpEDuh2@N~ zsU5j1ZOUisY@5n48Xakr9@}wI<#@~U?5#iSz%`R>0b65J4^6l}k}ci+1la5Qd|B3> zBKD7c0qpI)m-!d%!I#}y)mbD2?fE17qIros1TeS#)zD!6Wd9~W^Y+2Z!g%L5I->dh zNU-oa9vkQ=`nt0j1^?m&6YvU-7~YrNIuXscPQ<*5If-*dGv=LSVOcP z@9%H2VUMF(-nanssmDHm!rlDUzr&nZ&q;dQUB3&<|23LDnr+*`uD^8w^gPT%o@u}$ z=XEq+`0o-TgPyM@QTHO7#PpYVM$!4JSBs^vOa|y96)%|!-j)JK7@}NfzH~Ztu(-7N zUuK!v5@P=BVySd)=5}>~pMdSnmYMJ*(vRs6<03sitpfOrw%BA? z;%>ovX>UN~VY%-HaikWugQdXS>wVen=(q9t)sruF$L>?E&`6f+?r;Lo^{^N_0Isq1 z>yDP)p74+u?*zCJMe6}?z&SsylDA{=P|RT0s{)2EuzB5J z5~*rGz{@Jw5FMnDvIIaYN69|#0gPB$FWA_Dzw<7py6suJm@l1a%XiD~!vG%?=nuAx ziGYz(dRX?4fF&gPAG4sBB{B+z*?e3UAIq7ypqSlptULtLs3mZjsGL5p=ft{->p0(#zYB4qNCuJ$<;PZV=N--B-p1b@r( zNif#t%jj265z)(b8U5d;qwh9NoXPGxTQIjUQ)+7TScCN9_us|E-BBB0nK=Vb z1BI4lGvR%-%zbSK%d1(i5H*hg2()aUgX5wT)e<^!w`n+skLTjZb|O=@?>xYJ!26kc zQP(80_G@iP^ZNqu1av(tehXn9sU3>|_kv)q@gn8gukC2r{vKr6`Ync4q>2(@E-9ZS z@E$2s5-cOtDFsqV?bl-#$ucSiWDOPLBnzk*H(5Q!w2`G#jH9fZVw`2s6!V&_nD78m zE@7Lfl@N+T38AQx(1;Q#=5<*fVHZ&wAryrX4lSx8^h8O7=PT%?nYXE# zm($GK(#$h9^G@15@g*itJ8gHRcea^V*vu<#=9M<{%+0*AW}c;)SE0O6ar*1Z72d;U z-s5K8@%D6`YAwH{!3%gtd3I8gbRph_y!} z)*g*mdo)(du;p;ud6KA&(G`o2Ml3!WvG{1j;-eA8Fd9(_qY-5=8c_qI5d|~Pr1cPlX3ctd6V1u>a&EWQgD-l2km<5PdSPT{NMp&^0N(+W@~N938qK~< z&<`y9$|elXvU{`m15lTV>CsTVVov)mvda8W$tXhjV1NJ~TRP*FL z&=bwBJ47`#mDZ0(P*~q z!(6Z8SNGw?sN#HB|A6R!fR`Kx9~OSc-4VN19fLvG=g~2|OjbVOIOevMpE!=W zX63hz%hUQ4;9@rN*#(&Jjl8G;2129dP66Pbng4hK{(>gzB(95@Z#ju&p_%{ZB-Vsx z9(M{1t;D_9=r@_yodQ4ZdRn$GKMk4K{@^s04rYF>4t#jR8QivJ?pKcqR!I8h8C+yD z*PWFnkxcPf+(j2Se29Tw;B!92g0zC4`49`C3hq{jJuCRMLWo6ER*1`0#&tyyk0!SW z)1i!CEyAQK<9&)@0UBd5&bo}>D~2?vsN^@xAb<}k#ob-WHd-RiMyvkIjN)w&9=axouPg_pIMZzO|r_u_Y{rBgK| zK|SA44eQXfs=F{+qRxRz6gW)%NjguJze3MwU|=%bcIiozFUh|qmYlZ z;*~1orB*z!kOtA!;pHl%LH5-_2x?rW$!KteM6V_Fm?Aa&cs<^`Yxqy~nEo|l7Y|1h z(|}Rea8m=WQ4N340GhLSn`G?C`!!;Kn;LNhtgjkn=hrUD5hh=P*$B^E!kdlk?8Oi2 z@j`UF3`z9zEzaM;VUtNqK%{X0@npFn6$42k?&T&p)TLqfsFj7-P69O<~st&H?Tht z_{k1r|DWxic8{8WQQ$s-hYCFM$NA#|JW=E+b@@$!X9S)LAYb;rAn?i#~HKAq2wgK+Zl~c%qcohEekfJCMy$c29eYnnw#D9@W|y1Ceod zA`ghMd)fqQo~*zW0@Dav)?5(Sb7=oMH> zAWeaE0+|Y|Ah1e-)da+pY4%w<+W+eX5nq0^4b)|`9mva(D?IU~NZUfq+w4F#Zx^1d z8J1xUfa|%_4rH^*?rFPFVe=jZw)oNc_bZY|BOFp-FM%TploB|uz(TmaeYmfh2CQ}d4s+$Hd{0`~~~qQHFu4;6Ss;E4iHB`|2eDe{cEJXhcafmaGR z1jy+)p^#A7jt$N<%4Ac0^7LI{KjApZ+Dl)!L1kmnd- z_q0fA9;v`60%H}3CJ-}DkqJa5D=>w?GzF#;n4!Q-0&^tbPEGJngZzKsPU5=+7eGeG z?Vh%Wn)fSkfIz+ihY1{0;5dPk3Y;QvR)G&CFlfb!lu#G50_O=>6euT9sX!HhS^?_C zT3QJ-*nynMCA+6xrsj_oFkB(>sUn{dxTe4t1in(>{|MX=K)gKCz9n$e4&+I1**)zx zHUFr?L&q5tn|_lW$Wy4)x5P=QATo(NDQPWqI~H_I zOa~mLko(+#31MRBHS}^4jks^25qBRnVoWqeWW<2ziEKtwXgP8N0-V8WxqJ&eTY8G8 zxv_1MJ~nQC{EFD*rAhJPb?BqJP&5y}yXTK|?Bp%GxQbulB&5cs#}8bDJ;YIicnmodl::codegen::CodegenCppVisitor.

-

Definition at line 493 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 492 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1325,7 +1325,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 479 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 478 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1383,7 +1383,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 640 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 639 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1518,7 +1518,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 529 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 528 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1557,7 +1557,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 534 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 533 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1608,7 +1608,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 810 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 809 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1639,7 +1639,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1032 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1031 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1676,7 +1676,7 @@

-

Definition at line 1635 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1634 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1726,7 +1726,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 864 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 863 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1776,7 +1776,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 854 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 853 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1834,7 +1834,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 828 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 827 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1866,7 +1866,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 504 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 503 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1899,7 +1899,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 512 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 511 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1968,7 +1968,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 746 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 745 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -1998,7 +1998,7 @@

Returns
The target code string
-

Definition at line 2372 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2371 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2060,7 +2060,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 552 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 551 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2092,7 +2092,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 564 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 563 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2375,7 +2375,7 @@

Definition at line 1936 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1935 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2491,7 +2491,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 3005 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 3004 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2520,7 +2520,7 @@

Definition at line 2935 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2934 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2551,7 +2551,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2974 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2973 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2580,7 +2580,7 @@

Definition at line 948 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 947 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2618,7 +2618,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2946 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2945 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2687,7 +2687,7 @@

Todo:
Data is not derived.

Need to add instance into instance struct? data used here is wrong in AoS because as in original implementation, data is not incremented every iteration for AoS. May be better to derive actual variable names? [resolved now?] slist needs to added as local variable

-

Definition at line 2583 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2582 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2811,7 +2811,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2862 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2861 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2840,7 +2840,7 @@

Definition at line 686 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 685 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -2869,7 +2869,7 @@

Definition at line 694 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 693 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3061,7 +3061,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2965 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2964 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3090,7 +3090,7 @@

Definition at line 2377 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2376 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3139,7 +3139,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1810 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1809 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3201,7 +3201,7 @@

nmodl::codegen::CodegenAccVisitor.

-

Definition at line 1609 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1608 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3233,7 +3233,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1207 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1206 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3264,7 +3264,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2928 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2927 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3301,7 +3301,7 @@

Definition at line 1774 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1773 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3457,7 +3457,7 @@

Definition at line 1651 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1650 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3493,7 +3493,7 @@

Definition at line 1587 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1586 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3522,7 +3522,7 @@

Definition at line 1557 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1556 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3553,7 +3553,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1604 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1603 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3651,7 +3651,7 @@

Definition at line 726 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 725 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3693,7 +3693,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1060 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1059 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3731,7 +3731,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1515 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1514 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3771,7 +3771,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1314 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1313 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3800,7 +3800,7 @@

Definition at line 735 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 734 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3869,7 +3869,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2267 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2266 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3898,7 +3898,7 @@

Definition at line 2319 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2318 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -3998,7 +3998,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2240 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2239 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4027,7 +4027,7 @@

Definition at line 2542 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2541 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4056,7 +4056,7 @@

Definition at line 715 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 714 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4092,7 +4092,7 @@

Definition at line 2398 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2397 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4139,7 +4139,7 @@

Definition at line 2157 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2156 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4168,7 +4168,7 @@

Definition at line 2483 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2482 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4197,7 +4197,7 @@

Definition at line 2386 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2385 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4226,7 +4226,7 @@

Definition at line 2393 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2392 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4349,7 +4349,7 @@

Definition at line 2456 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2455 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4381,7 +4381,7 @@

nmodl::codegen::CodegenAccVisitor.

-

Definition at line 2444 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2443 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4413,7 +4413,7 @@

nmodl::codegen::CodegenAccVisitor.

-

Definition at line 2449 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2448 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4451,7 +4451,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2208 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2207 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4482,7 +4482,7 @@

nmodl::codegen::CodegenAccVisitor.

-

Definition at line 2672 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2671 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4513,7 +4513,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2014 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2013 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4544,7 +4544,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1990 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1989 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4575,7 +4575,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2891 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2890 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4614,7 +4614,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2755 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2754 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4652,7 +4652,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2826 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2825 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4746,7 +4746,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2792 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2791 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4785,7 +4785,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2737 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2736 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4816,7 +4816,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2002 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2001 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4852,7 +4852,7 @@

Definition at line 1861 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1860 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4884,7 +4884,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2682 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2681 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4913,7 +4913,7 @@

Definition at line 702 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 701 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -4974,7 +4974,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 981 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 980 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5003,7 +5003,7 @@

Definition at line 2351 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2350 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5032,7 +5032,7 @@

Definition at line 1614 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1613 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5063,7 +5063,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 937 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 936 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5098,7 +5098,7 @@

Definition at line 761 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 760 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5127,7 +5127,7 @@

Definition at line 1452 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 1451 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5187,7 +5187,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2953 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2952 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5219,7 +5219,7 @@

Todo:
Similar to neuron/coreneuron we are using first watch and ignoring rest.
-

Definition at line 2027 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2026 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5250,7 +5250,7 @@

Todo:
Similar to print_watch_activate, we are using only first watch.

need to verify with neuron/coreneuron about rest.

-

Definition at line 2075 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 2074 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5291,7 +5291,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 602 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 601 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5360,7 +5360,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 621 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 620 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5399,7 +5399,7 @@

Returns
The possibly replace variable name

if function is defined the same mod file then the arguments must contain mechanism instance as well.

-

Definition at line 573 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 572 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5462,7 +5462,7 @@

nmodl::visitor::ConstVisitor.

-

Definition at line 3036 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 3035 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5494,7 +5494,7 @@

nmodl::visitor::ConstVisitor.

-

Definition at line 3044 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 3043 of file codegen_coreneuron_cpp_visitor.cpp.

@@ -5526,7 +5526,7 @@

nmodl::visitor::ConstVisitor.

-

Definition at line 3076 of file codegen_coreneuron_cpp_visitor.cpp.

+

Definition at line 3075 of file codegen_coreneuron_cpp_visitor.cpp.

diff --git a/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor-members.html b/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor-members.html index 3a77877e9..2c6dce254 100644 --- a/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor-members.html +++ b/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor-members.html @@ -188,7 +188,7 @@ print_atomic_reduction_pragma() overridenmodl::codegen::CodegenNeuronCppVisitorprotectedvirtual print_backend_info()nmodl::codegen::CodegenCppVisitorprotected print_callable_preamble_from_prop()nmodl::codegen::CodegenNeuronCppVisitorprotected - print_check_table_function_prototypes()nmodl::codegen::CodegenNeuronCppVisitorprotected + print_check_table_entrypoint()nmodl::codegen::CodegenNeuronCppVisitorprotected print_codegen_routines() overridenmodl::codegen::CodegenNeuronCppVisitorprotectedvirtual print_compute_functions() overridenmodl::codegen::CodegenNeuronCppVisitorprotectedvirtual print_data_structures(bool print_initializers) overridenmodl::codegen::CodegenNeuronCppVisitorprotectedvirtual diff --git a/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html b/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html index 084e9ea85..84ac337c8 100644 --- a/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html +++ b/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html @@ -599,9 +599,9 @@ void print_function_prototypes () override  Print function and procedures prototype declaration. More...
  -void print_check_table_function_prototypes () - Print all check_* function declarations. More...
-  +void print_check_table_entrypoint () + Print all check_* function declarations. More...
+  void print_function_or_procedure (const ast::Block &node, const std::string &name, const std::unordered_set< CppObjectSpecifier > &specifiers={ CppObjectSpecifier::Inline}) override  Print nmodl function or procedure (common code) More...
  @@ -1193,7 +1193,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 415 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 417 of file codegen_neuron_cpp_visitor.cpp.

@@ -1225,7 +1225,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 407 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 409 of file codegen_neuron_cpp_visitor.cpp.

@@ -1282,7 +1282,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 518 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 520 of file codegen_neuron_cpp_visitor.cpp.

@@ -1418,7 +1418,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 446 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 448 of file codegen_neuron_cpp_visitor.cpp.

@@ -1458,7 +1458,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 452 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 454 of file codegen_neuron_cpp_visitor.cpp.

@@ -1509,7 +1509,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 544 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 546 of file codegen_neuron_cpp_visitor.cpp.

@@ -1540,7 +1540,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 784 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 786 of file codegen_neuron_cpp_visitor.cpp.

@@ -1590,7 +1590,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 627 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 629 of file codegen_neuron_cpp_visitor.cpp.

@@ -1640,7 +1640,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 617 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 619 of file codegen_neuron_cpp_visitor.cpp.

@@ -1670,7 +1670,7 @@

Definition at line 482 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 484 of file codegen_neuron_cpp_visitor.cpp.

@@ -1700,7 +1700,7 @@

Definition at line 488 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 490 of file codegen_neuron_cpp_visitor.cpp.

@@ -1758,7 +1758,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 560 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 562 of file codegen_neuron_cpp_visitor.cpp.

@@ -1790,7 +1790,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 421 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 423 of file codegen_neuron_cpp_visitor.cpp.

@@ -1822,7 +1822,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 427 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 429 of file codegen_neuron_cpp_visitor.cpp.

@@ -1853,7 +1853,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 513 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 515 of file codegen_neuron_cpp_visitor.cpp.

@@ -1880,7 +1880,7 @@

-

Definition at line 2315 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2317 of file codegen_neuron_cpp_visitor.cpp.

@@ -1907,7 +1907,7 @@

-

Definition at line 1883 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1885 of file codegen_neuron_cpp_visitor.cpp.

@@ -1934,7 +1934,7 @@

-

Definition at line 1888 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1890 of file codegen_neuron_cpp_visitor.cpp.

@@ -1966,7 +1966,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 459 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 461 of file codegen_neuron_cpp_visitor.cpp.

@@ -1998,7 +1998,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 465 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 467 of file codegen_neuron_cpp_visitor.cpp.

@@ -2168,12 +2168,12 @@

Definition at line 1634 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1636 of file codegen_neuron_cpp_visitor.cpp.

- -

◆ print_check_table_function_prototypes()

+ +

◆ print_check_table_entrypoint()

@@ -2182,7 +2182,7 @@

- + @@ -2228,7 +2228,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2209 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2211 of file codegen_neuron_cpp_visitor.cpp.

@@ -2259,7 +2259,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2192 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2194 of file codegen_neuron_cpp_visitor.cpp.

@@ -2297,7 +2297,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2161 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2163 of file codegen_neuron_cpp_visitor.cpp.

@@ -2329,7 +2329,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2033 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2035 of file codegen_neuron_cpp_visitor.cpp.

@@ -2385,7 +2385,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 238 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 239 of file codegen_neuron_cpp_visitor.cpp.

@@ -2423,7 +2423,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 265 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 266 of file codegen_neuron_cpp_visitor.cpp.

@@ -2454,7 +2454,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 229 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 218 of file codegen_neuron_cpp_visitor.cpp.

@@ -2485,7 +2485,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2183 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2185 of file codegen_neuron_cpp_visitor.cpp.

@@ -2533,7 +2533,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1544 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1546 of file codegen_neuron_cpp_visitor.cpp.

@@ -2562,7 +2562,7 @@

Definition at line 2103 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2105 of file codegen_neuron_cpp_visitor.cpp.

@@ -2591,7 +2591,7 @@

Definition at line 985 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 987 of file codegen_neuron_cpp_visitor.cpp.

@@ -2620,7 +2620,7 @@

Definition at line 961 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 963 of file codegen_neuron_cpp_visitor.cpp.

@@ -2651,7 +2651,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 998 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1000 of file codegen_neuron_cpp_visitor.cpp.

@@ -2682,7 +2682,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2085 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2087 of file codegen_neuron_cpp_visitor.cpp.

@@ -2720,7 +2720,7 @@

-

Definition at line 280 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 281 of file codegen_neuron_cpp_visitor.cpp.

@@ -2747,7 +2747,7 @@

-

Definition at line 391 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 392 of file codegen_neuron_cpp_visitor.cpp.

@@ -2777,7 +2777,7 @@

Definition at line 1522 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1524 of file codegen_neuron_cpp_visitor.cpp.

@@ -2806,7 +2806,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2234 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2236 of file codegen_neuron_cpp_visitor.cpp.

@@ -2835,7 +2835,7 @@

Definition at line 2095 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2097 of file codegen_neuron_cpp_visitor.cpp.

@@ -2864,7 +2864,7 @@

Definition at line 1386 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1388 of file codegen_neuron_cpp_visitor.cpp.

@@ -2893,7 +2893,7 @@

Definition at line 1451 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1453 of file codegen_neuron_cpp_visitor.cpp.

@@ -2932,7 +2932,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 791 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 793 of file codegen_neuron_cpp_visitor.cpp.

@@ -2970,7 +2970,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1348 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1350 of file codegen_neuron_cpp_visitor.cpp.

@@ -3002,7 +3002,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1115 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1117 of file codegen_neuron_cpp_visitor.cpp.

@@ -3031,7 +3031,7 @@

Definition at line 2120 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2122 of file codegen_neuron_cpp_visitor.cpp.

@@ -3069,7 +3069,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2277 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2279 of file codegen_neuron_cpp_visitor.cpp.

@@ -3099,7 +3099,7 @@

Definition at line 2362 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2364 of file codegen_neuron_cpp_visitor.cpp.

@@ -3137,7 +3137,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2264 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2266 of file codegen_neuron_cpp_visitor.cpp.

@@ -3166,7 +3166,7 @@

Definition at line 2340 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2342 of file codegen_neuron_cpp_visitor.cpp.

@@ -3193,7 +3193,7 @@

-

Definition at line 2322 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2324 of file codegen_neuron_cpp_visitor.cpp.

@@ -3258,7 +3258,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2239 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2241 of file codegen_neuron_cpp_visitor.cpp.

@@ -3288,7 +3288,7 @@

Definition at line 1341 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1343 of file codegen_neuron_cpp_visitor.cpp.

@@ -3317,7 +3317,7 @@

Definition at line 732 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 734 of file codegen_neuron_cpp_visitor.cpp.

@@ -3345,7 +3345,7 @@

Definition at line 1437 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1439 of file codegen_neuron_cpp_visitor.cpp.

@@ -3376,7 +3376,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1696 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1698 of file codegen_neuron_cpp_visitor.cpp.

@@ -3407,7 +3407,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1659 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1661 of file codegen_neuron_cpp_visitor.cpp.

@@ -3434,7 +3434,7 @@

-

Definition at line 1653 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1655 of file codegen_neuron_cpp_visitor.cpp.

@@ -3466,7 +3466,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2039 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2041 of file codegen_neuron_cpp_visitor.cpp.

@@ -3505,7 +3505,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1928 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1930 of file codegen_neuron_cpp_visitor.cpp.

@@ -3543,7 +3543,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2000 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2002 of file codegen_neuron_cpp_visitor.cpp.

@@ -3575,7 +3575,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1966 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1968 of file codegen_neuron_cpp_visitor.cpp.

@@ -3614,7 +3614,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1910 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1912 of file codegen_neuron_cpp_visitor.cpp.

@@ -3645,7 +3645,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1677 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1679 of file codegen_neuron_cpp_visitor.cpp.

@@ -3672,7 +3672,7 @@

-

Definition at line 1673 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1675 of file codegen_neuron_cpp_visitor.cpp.

@@ -3708,7 +3708,7 @@

Definition at line 1567 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1569 of file codegen_neuron_cpp_visitor.cpp.

@@ -3737,7 +3737,7 @@

Definition at line 1600 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1602 of file codegen_neuron_cpp_visitor.cpp.

@@ -3769,7 +3769,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 1833 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1835 of file codegen_neuron_cpp_visitor.cpp.

@@ -3833,7 +3833,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 743 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 745 of file codegen_neuron_cpp_visitor.cpp.

@@ -3862,7 +3862,7 @@

Definition at line 186 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 187 of file codegen_neuron_cpp_visitor.cpp.

@@ -3893,7 +3893,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 716 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 718 of file codegen_neuron_cpp_visitor.cpp.

@@ -3922,7 +3922,7 @@

Definition at line 1307 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1309 of file codegen_neuron_cpp_visitor.cpp.

@@ -3944,7 +3944,7 @@

Definition at line 1490 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 1492 of file codegen_neuron_cpp_visitor.cpp.

@@ -3975,7 +3975,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 2171 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2173 of file codegen_neuron_cpp_visitor.cpp.

@@ -4015,7 +4015,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 471 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 473 of file codegen_neuron_cpp_visitor.cpp.

@@ -4045,7 +4045,7 @@

Definition at line 497 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 499 of file codegen_neuron_cpp_visitor.cpp.

@@ -4075,7 +4075,7 @@

Definition at line 503 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 505 of file codegen_neuron_cpp_visitor.cpp.

@@ -4107,7 +4107,7 @@

nmodl::codegen::CodegenCppVisitor.

-

Definition at line 477 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 479 of file codegen_neuron_cpp_visitor.cpp.

@@ -4214,7 +4214,7 @@

Definition at line 596 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 598 of file codegen_neuron_cpp_visitor.cpp.

@@ -4246,7 +4246,7 @@

nmodl::visitor::ConstVisitor.

-

Definition at line 2394 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2396 of file codegen_neuron_cpp_visitor.cpp.

@@ -4278,7 +4278,7 @@

nmodl::visitor::ConstVisitor.

-

Definition at line 2390 of file codegen_neuron_cpp_visitor.cpp.

+

Definition at line 2392 of file codegen_neuron_cpp_visitor.cpp.

diff --git a/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.js b/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.js index 4f5dd1adb..ce89bd864 100644 --- a/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.js +++ b/doxygen/classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.js @@ -28,7 +28,7 @@ var classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor = [ "position_of_int_var", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a8c348f1869653cec3dcc39b1866bab97", null ], [ "print_atomic_reduction_pragma", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a910f9f22e6fbdae4eefae177eb8910f9", null ], [ "print_callable_preamble_from_prop", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a47b63167dd62680e43439a45833b5666", null ], - [ "print_check_table_function_prototypes", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a4ad426a930f14bc970072c5a57fb2bc9", null ], + [ "print_check_table_entrypoint", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a389587ac35ce1f02ced547e3c8698241", null ], [ "print_codegen_routines", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a9f9d3aebd2dc5bbc05e0cb7e9a707c7f", null ], [ "print_compute_functions", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a94f4993226aa53f6095ad4a44fe47420", null ], [ "print_data_structures", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#aeb0635e7e6e488519882d930401b5c2b", null ], diff --git a/doxygen/codegen__acc__visitor_8cpp_source.html b/doxygen/codegen__acc__visitor_8cpp_source.html index 9b1b328e2..90c09ed9b 100644 --- a/doxygen/codegen__acc__visitor_8cpp_source.html +++ b/doxygen/codegen__acc__visitor_8cpp_source.html @@ -500,7 +500,7 @@
@ NetReceive
net_receive block
bool point_process
if mod file is point process
void print_dt_update_to_device() const override
update dt from host to device
-
std::string get_variable_name(const std::string &name, bool use_instance=true) const override
Determine variable name in the structure of mechanism properties.
+
std::string get_variable_name(const std::string &name, bool use_instance=true) const override
Determine variable name in the structure of mechanism properties.
void print_global_variable_device_update_annotation() override
update global variable from host to the device
bool electrode_current
if electrode current specified
void print_kernel_data_present_annotation_block_end() override
end of annotation like "acc enter data"
diff --git a/doxygen/codegen__coreneuron__cpp__visitor_8hpp_source.html b/doxygen/codegen__coreneuron__cpp__visitor_8hpp_source.html index f11b851d3..5a198773e 100644 --- a/doxygen/codegen__coreneuron__cpp__visitor_8hpp_source.html +++ b/doxygen/codegen__coreneuron__cpp__visitor_8hpp_source.html @@ -1092,152 +1092,152 @@
996 } // namespace nmodl
-
void print_coreneuron_includes()
Print includes from coreneuron.
-
virtual std::string net_receive_buffering_declaration()
Generate the target backend code for the net_receive_buffering function delcaration.
+
void print_coreneuron_includes()
Print includes from coreneuron.
+
virtual std::string net_receive_buffering_declaration()
Generate the target backend code for the net_receive_buffering function delcaration.
virtual bool nrn_cur_reduction_loop_required()
Check if reduction block in nrn_cur required.
-
void print_net_move_call(const ast::FunctionCall &node) override
Print call to net_move.
-
void print_mechanism_range_var_structure(bool print_initializers) override
Print the structure that wraps all range and int variables required for the NMODL.
+
void print_net_move_call(const ast::FunctionCall &node) override
Print call to net_move.
+
void print_mechanism_range_var_structure(bool print_initializers) override
Print the structure that wraps all range and int variables required for the NMODL.
virtual void print_net_send_buf_count_update_to_host() const
Print the code to update NetSendBuffer_t count from device to host.
Visitor for printing C++ code compatible with legacy api of CoreNEURON
void print_function_prototypes() override
Print function and procedures prototype declaration.
-
void print_v_unused() const override
Set v_unused (voltage) for NRN_PRCELLSTATE feature.
-
void print_derivimplicit_kernel(const ast::Block &block)
Print derivative kernel when derivimplicit method is used.
+
void print_v_unused() const override
Set v_unused (voltage) for NRN_PRCELLSTATE feature.
+
void print_derivimplicit_kernel(const ast::Block &block)
Print derivative kernel when derivimplicit method is used.
virtual void print_device_stream_wait() const
Print the code to synchronise/wait on stream specific to NrnThread.
virtual void print_instance_struct_delete_from_device()
Delete the instance struct from the device.
virtual void print_net_send_buf_count_update_to_device() const
Print the code to update NetSendBuffer_t count from host to device.
codegen::CodegenInfo info
All ast information for code generation.
-
std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
Determine the variable name for a global variable given its symbol.
-
virtual void print_newtonspace_transfer_to_device() const
Print code block to transfer newtonspace structure to device.
+
std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
Determine the variable name for a global variable given its symbol.
+
virtual void print_newtonspace_transfer_to_device() const
Print code block to transfer newtonspace structure to device.
CodegenCppVisitor(std::string mod_filename, const std::string &output_dir, std::string float_type, const bool optimize_ionvar_copies, std::unique_ptr< nmodl::utils::Blame > blame)
Constructs the C++ code generator visitor.
int thread_data_index
thread_data_index indicates number of threads being allocated.
-
void print_net_send_call(const ast::FunctionCall &node) override
Print call to net_send.
-
virtual void print_net_receive_loop_begin()
Print the code for the main net_receive loop.
+
void print_net_send_call(const ast::FunctionCall &node) override
Print call to net_send.
+
virtual void print_net_receive_loop_begin()
Print the code for the main net_receive loop.
virtual void print_kernel_data_present_annotation_block_begin()
Print accelerator annotations indicating data presence on device.
-
void print_g_unused() const override
Set g_unused (conductance) for NRN_PRCELLSTATE feature.
- -
std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
Determine the name of an int variable given its symbol.
-
void print_initial_block(const ast::InitialBlock *node)
Print initial block statements.
-
void print_net_receive_kernel()
Print net_receive kernel function definition.
-
void print_thread_getters()
Print the getter method for thread variables and ids.
+
void print_g_unused() const override
Set g_unused (conductance) for NRN_PRCELLSTATE feature.
+ +
std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
Determine the name of an int variable given its symbol.
+
void print_initial_block(const ast::InitialBlock *node)
Print initial block statements.
+
void print_net_receive_kernel()
Print net_receive kernel function definition.
+
void print_thread_getters()
Print the getter method for thread variables and ids.
std::string simulator_name() override
Name of the simulator the code was generated for.
-
void print_net_send_buffering()
Print kernel for buffering net_send events.
-
virtual std::string namespace_name() override
Name of "our" namespace.
+
void print_net_send_buffering()
Print kernel for buffering net_send events.
+
virtual std::string namespace_name() override
Name of "our" namespace.
encapsulates code generation backend implementations
Definition: ast_common.hpp:26
int num_thread_objects() const noexcept
Determine the number of threads to allocate.
-
void print_fast_imem_calculation() override
Print fast membrane current calculation code.
+
void print_fast_imem_calculation() override
Print fast membrane current calculation code.
virtual void print_dt_update_to_device() const
Print the code to update dt from host to device.
virtual void print_net_init_acc_serial_annotation_block_end()
Print accelerator kernels end annotation for net_init kernel.
-
const ParamVector external_method_parameters(bool table=false) noexcept override
Parameters for functions in generated code that are called back from external code.
-
void print_net_init()
Print initial block in the net receive block.
-
void print_net_event_call(const ast::FunctionCall &node) override
Print call to net_event.
-
void print_nrn_destructor() override
Print nrn_destructor function definition.
-
void print_sdlists_init(bool print_initializers) override
+
const ParamVector external_method_parameters(bool table=false) noexcept override
Parameters for functions in generated code that are called back from external code.
+
void print_net_init()
Print initial block in the net receive block.
+
void print_net_event_call(const ast::FunctionCall &node) override
Print call to net_event.
+
void print_nrn_destructor() override
Print nrn_destructor function definition.
+
void print_sdlists_init(bool print_initializers) override
Implement classes for representing symbol table at block and file scope.
- -
void print_send_event_move()
Print send event move block used in net receive as well as watch.
-
void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
Generate Function call statement for nrn_wrote_conc.
+ +
void print_send_event_move()
Print send event move block used in net receive as well as watch.
+
void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
Generate Function call statement for nrn_wrote_conc.
void print_atomic_reduction_pragma() override
Print atomic update pragma for reduction statements.
virtual void print_instance_struct_copy_to_device()
Transfer the instance struct to the device.
-
void print_global_variables_for_hoc() override
Print byte arrays that register scalar and vector variables for hoc interface.
-
void print_net_receive_common_code(const ast::Block &node, bool need_mech_inst=true)
Print the common code section for net receive related methods.
-
void print_nrn_current(const ast::BreakpointBlock &node) override
Print the nrn_current kernel.
-
void print_first_pointer_var_index_getter()
Print the getter method for index position of first pointer variable.
-
void print_setup_range_variable()
Print the function that initialize range variable with different data type.
-
std::string internal_method_arguments() override
Arguments for functions that are defined and used internally.
+
void print_global_variables_for_hoc() override
Print byte arrays that register scalar and vector variables for hoc interface.
+
void print_net_receive_common_code(const ast::Block &node, bool need_mech_inst=true)
Print the common code section for net receive related methods.
+
void print_nrn_current(const ast::BreakpointBlock &node) override
Print the nrn_current kernel.
+
void print_first_pointer_var_index_getter()
Print the getter method for index position of first pointer variable.
+
void print_setup_range_variable()
Print the function that initialize range variable with different data type.
+
std::string internal_method_arguments() override
Arguments for functions that are defined and used internally.
virtual void print_instance_struct_transfer_routine_declarations()
Print declarations of the functions used by print_instance_struct_copy_to_device and print_instance_s...
-
std::string nrn_thread_arguments() const override
Arguments for "_threadargs_" macro in neuron implementation.
-
void print_net_receive_arg_size_getter()
Print the getter method for getting number of arguments for net_receive.
+
std::string nrn_thread_arguments() const override
Arguments for "_threadargs_" macro in neuron implementation.
+
void print_net_receive_arg_size_getter()
Print the getter method for getting number of arguments for net_receive.
void print_function_or_procedure(const ast::Block &node, const std::string &name, const std::unordered_set< CppObjectSpecifier > &specifiers={ CppObjectSpecifier::Inline}) override
Print nmodl function or procedure (common code)
Visitor for printing C++ code compatible with legacy api of CoreNEURON
Base class for all block scoped nodes.
Definition: block.hpp:41
Represents a INITIAL block in the NMODL.
virtual void print_nrn_cur_matrix_shadow_update()
Print the update to matrix elements with/without shadow vectors.
-
std::string register_mechanism_arguments() const override
Arguments for register_mech or point_register_mech function.
+
std::string register_mechanism_arguments() const override
Arguments for register_mech or point_register_mech function.
virtual void print_net_init_acc_serial_annotation_block_begin()
Print accelerator kernels begin annotation for net_init kernel.
virtual void print_deriv_advance_flag_transfer_to_device() const
Print the code to copy derivative advance flag to device.
-
void print_first_random_var_index_getter()
Print the getter method for index position of first RANDOM variable.
+
void print_first_random_var_index_getter()
Print the getter method for index position of first RANDOM variable.
Represent WATCH statement in NMODL.
-
virtual void print_before_after_block(const ast::Block *node, size_t block_id)
Print NMODL before / after block in target backend code.
+
virtual void print_before_after_block(const ast::Block *node, size_t block_id)
Print NMODL before / after block in target backend code.
Represents a BREAKPOINT block in NMODL.
-
void print_ion_variable() override
Print the ion variable struct.
+
void print_ion_variable() override
Print the ion variable struct.
Helper class for printing C/C++ code.
-
void print_ion_var_structure()
Print structure of ion variables used for local copies.
-
ParamVector internal_method_parameters() override
Parameters for internally defined functions.
+
void print_ion_var_structure()
Print structure of ion variables used for local copies.
+
ParamVector internal_method_parameters() override
Parameters for internally defined functions.
int position_of_int_var(const std::string &name) const override
Determine the position in the data array for a given int variable.
virtual void print_channel_iteration_block_parallel_hint(BlockType type, const ast::Block *block)
Print pragma annotations for channel iterations.
virtual bool is_constant_variable(const std::string &name) const
Check if variable is qualified as constant.
void print_function_procedure_helper(const ast::Block &node) override
Common helper function to help printing function or procedure blocks.
-
void visit_for_netcon(const ast::ForNetcon &node) override
visit node of type ast::ForNetcon
-
void print_compute_functions() override
Print all compute functions for every backend.
-
void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
Print the nrn_cur kernel with NMODL conductance keyword provisions.
+
void visit_for_netcon(const ast::ForNetcon &node) override
visit node of type ast::ForNetcon
+
void print_compute_functions() override
Print all compute functions for every backend.
+
void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
Print the nrn_cur kernel with NMODL conductance keyword provisions.
virtual void print_memory_allocation_routine() const
Print memory allocation routine.
void print_check_table_thread_function()
Print check_table functions.
std::string backend_name() const override
Name of the code generation backend.
virtual void print_rhs_d_shadow_variables()
Print the setup method for setting matrix shadow vectors.
-
std::string process_verbatim_text(std::string const &text) override
Process a verbatim block for possible variable renaming.
-
void print_net_receive()
Print net_receive function definition.
+
std::string process_verbatim_text(std::string const &text) override
Process a verbatim block for possible variable renaming.
+
void print_net_receive()
Print net_receive function definition.
bool vectorize
true if mod file is vectorizable (which should be always true for coreneuron) But there are some bloc...
bool optimize_ion_variable_copies() const override
Check if ion variable copies should be avoided.
void print_top_verbatim_blocks()
Print top level (global scope) verbatim blocks.
Various types to store code generation specific information.
-
void print_mechanism_global_var_structure(bool print_initializers) override
Print the structure that wraps all global variables used in the NMODL.
-
void print_nrn_constructor() override
Print nrn_constructor function definition.
+
void print_mechanism_global_var_structure(bool print_initializers) override
Print the structure that wraps all global variables used in the NMODL.
+
void print_nrn_constructor() override
Print nrn_constructor function definition.
virtual void print_kernel_data_present_annotation_block_end()
Print matching block end of accelerator annotations for data presence on device.
virtual void print_instance_struct_transfer_routines(std::vector< std::string > const &)
Print the definitions of the functions used by print_instance_struct_copy_to_device and print_instanc...
-
void print_nrn_alloc() override
Print nrn_alloc function definition.
+
void print_nrn_alloc() override
Print nrn_alloc function definition.
virtual void print_nrn_cur_matrix_shadow_reduction()
Print the reduction to matrix elements from shadow vectors.
-
void print_memb_list_getter()
Print the getter method for returning membrane list from NrnThread.
+
void print_memb_list_getter()
Print the getter method for returning membrane list from NrnThread.
- -
void print_instance_variable_setup()
Print the function that initialize instance structure.
-
void print_nrn_init(bool skip_init_check=true)
Print the nrn_init function definition.
-
std::string get_variable_name(const std::string &name, bool use_instance=true) const override
Determine variable name in the structure of mechanism properties.
+ +
void print_instance_variable_setup()
Print the function that initialize instance structure.
+
void print_nrn_init(bool skip_init_check=true)
Print the nrn_init function definition.
+
std::string get_variable_name(const std::string &name, bool use_instance=true) const override
Determine variable name in the structure of mechanism properties.
virtual void print_abort_routine() const
Print backend specific abort routine.
-
void print_nrn_cur() override
Print nrn_cur / current update function definition.
-
std::string get_range_var_float_type(const SymbolType &symbol)
Returns floating point type for given range variable symbol.
-
virtual void print_net_send_buffering_cnt_update() const
Print the code related to the update of NetSendBuffer_t cnt.
-
void print_thread_memory_callbacks()
Print thread related memory allocation and deallocation callbacks.
+
void print_nrn_cur() override
Print nrn_cur / current update function definition.
+
std::string get_range_var_float_type(const SymbolType &symbol)
Returns floating point type for given range variable symbol.
+
virtual void print_net_send_buffering_cnt_update() const
Print the code related to the update of NetSendBuffer_t cnt.
+
void print_thread_memory_callbacks()
Print thread related memory allocation and deallocation callbacks.
Represent a callback to NEURON's derivimplicit solver.
-
void print_net_receive_buffering(bool need_mech_inst=true)
Print kernel for buffering net_receive events.
-
virtual void print_global_function_common_code(BlockType type, const std::string &function_name="") override
Print common code for global functions like nrn_init, nrn_cur and nrn_state.
-
ParamVector functor_params() override
The parameters of the Newton solver "functor".
+
void print_net_receive_buffering(bool need_mech_inst=true)
Print kernel for buffering net_receive events.
+
virtual void print_global_function_common_code(BlockType type, const std::string &function_name="") override
Print common code for global functions like nrn_init, nrn_cur and nrn_state.
+
ParamVector functor_params() override
The parameters of the Newton solver "functor".
Implement logger based on spdlog library.
parser::NmodlParser::symbol_type SymbolType
Definition: main_nmodl.cpp:33
BlockType
Helper to represent various block types.
-
void print_mechanism_register() override
Print the mechanism registration function.
+
void print_mechanism_register() override
Print the mechanism registration function.
virtual void print_global_method_annotation()
Print backend specific global method annotation.
-
void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
Print main body of nrn_cur function.
-
std::string nrn_thread_internal_arguments() override
Arguments for "_threadargs_" macro in neuron implementation.
+
void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
Print main body of nrn_cur function.
+
std::string nrn_thread_internal_arguments() override
Arguments for "_threadargs_" macro in neuron implementation.
virtual void print_device_atomic_capture_annotation() const
Print pragma annotation for increase and capture of variable in automatic way.
-
void visit_derivimplicit_callback(const ast::DerivimplicitCallback &node) override
visit node of type ast::DerivimplicitCallback
+
void visit_derivimplicit_callback(const ast::DerivimplicitCallback &node) override
visit node of type ast::DerivimplicitCallback
Visitor for printing C++ code compatible with legacy api of CoreNEURON
-
virtual void print_get_memb_list()
Print the target backend code for defining and checking a local Memb_list variable.
-
void print_num_variable_getter()
Print the getter methods for float and integer variables count.
-
void print_standard_includes() override
Print standard C/C++ includes.
-
virtual void print_net_receive_loop_end()
Print the code for closing the main net_receive loop.
-
void print_mech_type_getter()
Print the getter method for returning mechtype.
-
std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
Determine the name of a float variable given its symbol.
-
virtual void print_global_variable_device_update_annotation()
Print the pragma annotation to update global variables from host to the device.
+
virtual void print_get_memb_list()
Print the target backend code for defining and checking a local Memb_list variable.
+
void print_num_variable_getter()
Print the getter methods for float and integer variables count.
+
void print_standard_includes() override
Print standard C/C++ includes.
+
virtual void print_net_receive_loop_end()
Print the code for closing the main net_receive loop.
+
void print_mech_type_getter()
Print the getter method for returning mechtype.
+
std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
Determine the name of a float variable given its symbol.
+
virtual void print_global_variable_device_update_annotation()
Print the pragma annotation to update global variables from host to the device.
void print_function_tables(const ast::FunctionTableBlock &node)
Print NMODL function_table in target backend code.
-
void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
Add the variable tqitem during get_int_variables.
-
void visit_watch_statement(const ast::WatchStatement &node) override
visit node of type ast::WatchStatement
-
const std::string external_method_arguments() noexcept override
Arguments for external functions called from generated code.
-
void print_nrn_cur_non_conductance_kernel() override
Print the nrn_cur kernel without NMODL conductance keyword provisions.
+
void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
Add the variable tqitem during get_int_variables.
+
void visit_watch_statement(const ast::WatchStatement &node) override
visit node of type ast::WatchStatement
+
const std::string external_method_arguments() noexcept override
Arguments for external functions called from generated code.
+
void print_nrn_cur_non_conductance_kernel() override
Print the nrn_cur kernel without NMODL conductance keyword provisions.
virtual void print_backend_includes()
Print backend specific includes (none needed for C++ backend)
std::string process_verbatim_token(const std::string &token)
Process a token in a verbatim block for possible variable renaming.
-
void print_nrn_state() override
Print nrn_state / state update function definition.
-
void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
Add the variable point_process during get_int_variables.
-
void print_codegen_routines() override
Print entry point to code generation.
- -
virtual void print_net_send_buffering_grow()
Print statement that grows NetSendBuffering_t structure if needed.
+
void print_nrn_state() override
Print nrn_state / state update function definition.
+
void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
Add the variable point_process during get_int_variables.
+
void print_codegen_routines() override
Print entry point to code generation.
+ +
virtual void print_net_send_buffering_grow()
Print statement that grows NetSendBuffering_t structure if needed.
virtual void print_net_send_buf_update_to_host() const
Print the code to update NetSendBuffer_t from device to host.
-
void print_data_structures(bool print_initializers) override
Print all classes.
-
std::string replace_if_verbatim_variable(std::string name)
Replace commonly used verbatim variables.
-
virtual void print_ion_var_constructor(const std::vector< std::string > &members)
Print constructor of ion variables.
+
void print_data_structures(bool print_initializers) override
Print all classes.
+
std::string replace_if_verbatim_variable(std::string name)
Replace commonly used verbatim variables.
+
virtual void print_ion_var_constructor(const std::vector< std::string > &members)
Print constructor of ion variables.
int position_of_float_var(const std::string &name) const override
Determine the position in the data array for a given float variable.
std::vector< std::tuple< std::string, std::string, std::string, std::string > > ParamVector
A vector of parameters represented by a 4-tuple of strings:
Concrete visitor for all AST classes.
diff --git a/doxygen/codegen__neuron__cpp__visitor_8hpp_source.html b/doxygen/codegen__neuron__cpp__visitor_8hpp_source.html index 62c33c807..b71d522c6 100644 --- a/doxygen/codegen__neuron__cpp__visitor_8hpp_source.html +++ b/doxygen/codegen__neuron__cpp__visitor_8hpp_source.html @@ -322,7 +322,7 @@
226  /**
227  * Print all `check_*` function declarations
228  */
- +
230 
231 
232  void print_function_or_procedure(const ast::Block& node,
@@ -823,128 +823,128 @@
727 } // namespace nmodl
-
void print_nrn_destructor() override
Print nrn_destructor function definition.
-
void print_callable_preamble_from_prop()
Print the set of common variables from a Prop only.
-
void print_global_var_external_access()
Print functions for EXTERNAL use.
-
void print_nrn_cur_non_conductance_kernel() override
Print the nrn_cur kernel without NMODL conductance keyword provisions.
-
std::string py_function_signature(const std::string &function_or_procedure_name) const
Get the signature of the npy <func_or_proc_name> function.
+
void print_nrn_destructor() override
Print nrn_destructor function definition.
+
void print_callable_preamble_from_prop()
Print the set of common variables from a Prop only.
+
void print_global_var_external_access()
Print functions for EXTERNAL use.
+
void print_nrn_cur_non_conductance_kernel() override
Print the nrn_cur kernel without NMODL conductance keyword provisions.
+
std::string py_function_signature(const std::string &function_or_procedure_name) const
Get the signature of the npy <func_or_proc_name> function.
Helper to represent information about index/int variables.
-
void print_nrn_cur() override
Print nrn_cur / current update function definition.
-
void print_macro_definitions()
Print all NEURON macros.
+
void print_nrn_cur() override
Print nrn_cur / current update function definition.
+
void print_macro_definitions()
Print all NEURON macros.
CodegenCppVisitor(std::string mod_filename, const std::string &output_dir, std::string float_type, const bool optimize_ionvar_copies, std::unique_ptr< nmodl::utils::Blame > blame)
Constructs the C++ code generator visitor.
-
void print_net_event_call(const ast::FunctionCall &node) override
Print call to net_event.
- -
void print_neuron_includes()
Print includes from NEURON.
-
void print_nrn_init(bool skip_init_check=true)
Print the nrn_init function definition.
-
void print_hoc_py_wrapper_function_body(const ast::Block *function_or_procedure_block, InterpreterWrapper wrapper_type)
-
std::string internal_method_arguments() override
Arguments for functions that are defined and used internally.
-
void print_nrn_alloc() override
Print nrn_alloc function definition.
+
void print_net_event_call(const ast::FunctionCall &node) override
Print call to net_event.
+ +
void print_neuron_includes()
Print includes from NEURON.
+
void print_nrn_init(bool skip_init_check=true)
Print the nrn_init function definition.
+
void print_hoc_py_wrapper_function_body(const ast::Block *function_or_procedure_block, InterpreterWrapper wrapper_type)
+
std::string internal_method_arguments() override
Arguments for functions that are defined and used internally.
+
void print_nrn_alloc() override
Print nrn_alloc function definition.
-
std::string register_mechanism_arguments() const override
Arguments for register_mech or point_register_mech function.
+
std::string register_mechanism_arguments() const override
Arguments for register_mech or point_register_mech function.
int position_of_int_var(const std::string &name) const override
Determine the position in the data array for a given int variable.
- +
std::string table_thread_function_name() const
Name of the threaded table checking function.
bool optimize_ion_variable_copies() const override
Check if ion variable copies should be avoided.
-
std::string hoc_function_name(const std::string &function_or_procedure_name) const
All functions and procedures need a hoc <func_or_proc_name> to be available to the HOC interpreter.
+
std::string hoc_function_name(const std::string &function_or_procedure_name) const
All functions and procedures need a hoc <func_or_proc_name> to be available to the HOC interpreter.
encapsulates code generation backend implementations
Definition: ast_common.hpp:26
-
std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
Determine the variable name for a global variable given its symbol.
+
std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
Determine the variable name for a global variable given its symbol.
Represent ions used in mod file.
-
std::string nrn_thread_internal_arguments() override
Arguments for "_threadargs_" macro in neuron implementation.
+
std::string nrn_thread_internal_arguments() override
Arguments for "_threadargs_" macro in neuron implementation.
Implement classes for representing symbol table at block and file scope.
std::string backend_name() const override
Name of the code generation backend.
-
std::string process_verbatim_text(std::string const &text) override
Process a verbatim block for possible variable renaming.
- -
void print_global_param_default_values()
Print global struct with default value of RANGE PARAMETERs.
-
void print_g_unused() const override
Set g_unused (conductance) for NRN_PRCELLSTATE feature.
-
void visit_watch_statement(const ast::WatchStatement &node) override
TODO: Edit for NEURON.
-
void print_net_send_call(const ast::FunctionCall &node) override
Print call to net_send.
-
void print_net_move_call(const ast::FunctionCall &node) override
Print call to net_move.
-
std::string thread_variable_name(const ThreadVariableInfo &var_info, bool use_instance=true) const
Determine the C++ string to print for thread variables.
-
void print_nrn_state() override
Print nrn_state / state update function definition.
-
void print_initial_block(const ast::InitialBlock *node)
Print the initial block.
-
void print_global_function_common_code(BlockType type, const std::string &function_name="") override
Print common code for global functions like nrn_init, nrn_cur and nrn_state.
-
std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
Determine the name of a float variable given its symbol.
- -
void print_v_unused() const override
Set v_unused (voltage) for NRN_PRCELLSTATE feature.
- -
ParamVector functor_params() override
The parameters of the Newton solver "functor".
+
std::string process_verbatim_text(std::string const &text) override
Process a verbatim block for possible variable renaming.
+ +
void print_global_param_default_values()
Print global struct with default value of RANGE PARAMETERs.
+
void print_g_unused() const override
Set g_unused (conductance) for NRN_PRCELLSTATE feature.
+
void visit_watch_statement(const ast::WatchStatement &node) override
TODO: Edit for NEURON.
+
void print_net_send_call(const ast::FunctionCall &node) override
Print call to net_send.
+
void print_net_move_call(const ast::FunctionCall &node) override
Print call to net_move.
+
std::string thread_variable_name(const ThreadVariableInfo &var_info, bool use_instance=true) const
Determine the C++ string to print for thread variables.
+
void print_nrn_state() override
Print nrn_state / state update function definition.
+
void print_initial_block(const ast::InitialBlock *node)
Print the initial block.
+
void print_global_function_common_code(BlockType type, const std::string &function_name="") override
Print common code for global functions like nrn_init, nrn_cur and nrn_state.
+
std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
Determine the name of a float variable given its symbol.
+ +
void print_v_unused() const override
Set v_unused (voltage) for NRN_PRCELLSTATE feature.
+ +
ParamVector functor_params() override
The parameters of the Newton solver "functor".
Visitor for printing C++ code compatible with legacy api of CoreNEURON
Base class for all block scoped nodes.
Definition: block.hpp:41
Represents a INITIAL block in the NMODL.
void print_point_process_function_definitions()
Print POINT_PROCESS related functions Wrap external NEURON functions related to POINT_PROCESS mechani...
- -
void print_net_init()
Print NET_RECEIVE{ INITIAL{ ...
+ +
void print_net_init()
Print NET_RECEIVE{ INITIAL{ ...
Represent WATCH statement in NMODL.
-
void print_neuron_global_variable_declarations()
Print extern declarations for neuron global variables.
+
void print_neuron_global_variable_declarations()
Print extern declarations for neuron global variables.
Represents a BREAKPOINT block in NMODL.
-
void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
Print the nrn_cur kernel with NMODL conductance keyword provisions.
-
std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
Determine the name of an int variable given its symbol.
+
void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
Print the nrn_cur kernel with NMODL conductance keyword provisions.
+
std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
Determine the name of an int variable given its symbol.
InterpreterWrapper
Enum to switch between HOC and Python wrappers for functions and procedures defined in mechanisms.
-
void print_global_variables_for_hoc() override
Print byte arrays that register scalar and vector variables for hoc interface.
-
void print_standard_includes() override
Print standard C/C++ includes.
+
void print_global_variables_for_hoc() override
Print byte arrays that register scalar and vector variables for hoc interface.
+
void print_standard_includes() override
Print standard C/C++ includes.
std::vector< ThreadVariableInfo > codegen_thread_variables
GLOBAL variables in THREADSAFE MOD files that are not read-only are converted to thread variables.
-
void print_mechanism_range_var_structure(bool print_initializers) override
Print the structure that wraps all range and int variables required for the NMODL.
-
void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
Add the variable tqitem during get_int_variables.
+
void print_mechanism_range_var_structure(bool print_initializers) override
Print the structure that wraps all range and int variables required for the NMODL.
+
void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
Add the variable tqitem during get_int_variables.
Helper class for printing C/C++ code.
-
std::string hoc_function_signature(const std::string &function_or_procedure_name) const
Get the signature of the hoc <func_or_proc_name> function.
-
void print_thread_variables_structure(bool print_initializers)
Print the data structure used to access thread variables.
-
void print_compute_functions() override
Print all compute functions for every backend.
-
void print_thread_memory_callbacks()
Print thread variable (de-)initialization functions.
-
void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
Print main body of nrn_cur function.
-
void print_mechanism_register() override
Print the mechanism registration function.
-
void print_nrn_jacob()
Print nrn_jacob function definition.
- +
std::string hoc_function_signature(const std::string &function_or_procedure_name) const
Get the signature of the hoc <func_or_proc_name> function.
+
void print_thread_variables_structure(bool print_initializers)
Print the data structure used to access thread variables.
+
void print_compute_functions() override
Print all compute functions for every backend.
+
void print_thread_memory_callbacks()
Print thread variable (de-)initialization functions.
+
void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
Print main body of nrn_cur function.
+
void print_mechanism_register() override
Print the mechanism registration function.
+
void print_nrn_jacob()
Print nrn_jacob function definition.
+
Various types to store code generation specific information.
-
void print_fast_imem_calculation() override
Print fast membrane current calculation code.
-
void print_data_structures(bool print_initializers) override
Print all classes.
-
void print_sdlists_init(bool print_initializers) override
-
const std::string external_method_arguments() noexcept override
Arguments for external functions called from generated code.
-
std::string py_function_name(const std::string &function_or_procedure_name) const
In non POINT_PROCESS mechanisms all functions and procedures need a py <func_or_proc_name> to be avai...
+
void print_fast_imem_calculation() override
Print fast membrane current calculation code.
+
void print_data_structures(bool print_initializers) override
Print all classes.
+
void print_sdlists_init(bool print_initializers) override
+
const std::string external_method_arguments() noexcept override
Arguments for external functions called from generated code.
+
std::string py_function_name(const std::string &function_or_procedure_name) const
In non POINT_PROCESS mechanisms all functions and procedures need a py <func_or_proc_name> to be avai...
size_t offset
The global variables ahead of this one require offset doubles to store.
-
void print_node_data_structure(bool print_initializers)
Print the structure that wraps all node variables required for the NMODL.
+
void print_node_data_structure(bool print_initializers)
Print the structure that wraps all node variables required for the NMODL.
-
void print_make_instance() const
Print make_*_instance.
+
void print_make_instance() const
Print make_*_instance.
void print_atomic_reduction_pragma() override
Print atomic update pragma for reduction statements.
-
void print_setdata_functions()
Print NEURON functions related to setting global variables of the mechanism.
+
void print_setdata_functions()
Print NEURON functions related to setting global variables of the mechanism.
std::string get_name(const std::shared_ptr< symtab::Symbol > &sym)
Represents ion write statement during code generation.
-
void print_mechanism_variables_macros()
Print mechanism variables' related macros.
+
void print_mechanism_variables_macros()
Print mechanism variables' related macros.
Visitor for printing C++ code compatible with legacy api of NEURON
-
void print_nrn_constructor() override
Print nrn_constructor function definition.
-
void print_check_table_function_prototypes()
Print all check_* function declarations.
+
void print_nrn_constructor() override
Print nrn_constructor function definition.
Implement logger based on spdlog library.
parser::NmodlParser::symbol_type SymbolType
Definition: main_nmodl.cpp:33
BlockType
Helper to represent various block types.
size_t index
There index global variables ahead of this one.
-
void print_headers_include() override
Print all includes.
-
const ParamVector external_method_parameters(bool table=false) noexcept override
Parameters for functions in generated code that are called back from external code.
-
void visit_for_netcon(const ast::ForNetcon &node) override
visit node of type ast::ForNetcon
-
void print_function_or_procedure(const ast::Block &node, const std::string &name, const std::unordered_set< CppObjectSpecifier > &specifiers={ CppObjectSpecifier::Inline}) override
Print nmodl function or procedure (common code)
+
void print_headers_include() override
Print all includes.
+
void print_check_table_entrypoint()
Print all check_* function declarations.
+
const ParamVector external_method_parameters(bool table=false) noexcept override
Parameters for functions in generated code that are called back from external code.
+
void visit_for_netcon(const ast::ForNetcon &node) override
visit node of type ast::ForNetcon
+
void print_function_or_procedure(const ast::Block &node, const std::string &name, const std::unordered_set< CppObjectSpecifier > &specifiers={ CppObjectSpecifier::Inline}) override
Print nmodl function or procedure (common code)
const std::shared_ptr< symtab::Symbol > symbol
-
void print_nrn_current(const ast::BreakpointBlock &node) override
Print the nrn_current kernel.
+
void print_nrn_current(const ast::BreakpointBlock &node) override
Print the nrn_current kernel.
int position_of_float_var(const std::string &name) const override
Determine the position in the data array for a given float variable.
-
std::string get_variable_name(const std::string &name, bool use_instance=true) const override
Determine variable name in the structure of mechanism properties.
+
std::string get_variable_name(const std::string &name, bool use_instance=true) const override
Determine variable name in the structure of mechanism properties.
Visitor for printing C++ code compatible with legacy api of CoreNEURON
-
void print_make_node_data() const
Print make_*_node_data.
+
void print_make_node_data() const
Print make_*_node_data.
std::string simulator_name() override
Name of the simulator the code was generated for.
-
void print_function_procedure_helper(const ast::Block &node) override
Common helper function to help printing function or procedure blocks.
-
void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
Add the variable point_process during get_int_variables.
-
std::string nrn_thread_arguments() const override
Arguments for "_threadargs_" macro in neuron implementation.
+
void print_function_procedure_helper(const ast::Block &node) override
Common helper function to help printing function or procedure blocks.
+
void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
Add the variable point_process during get_int_variables.
+
std::string nrn_thread_arguments() const override
Arguments for "_threadargs_" macro in neuron implementation.
void print_net_receive_registration()
Print code to register the call-back for the NET_RECEIVE block.
-
void print_codegen_routines() override
Print entry point to code generation.
-
void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
Generate Function call statement for nrn_wrote_conc.
-
ParamVector internal_method_parameters() override
Parameters for internally defined functions.
- -
void print_net_receive()
Print net_receive call-back.
-
void print_function_prototypes() override
Print function and procedures prototype declaration.
-
void print_mechanism_global_var_structure(bool print_initializers) override
Print the structure that wraps all global variables used in the NMODL.
-
std::string namespace_name() override
Name of "our" namespace.
-
void print_global_macros()
Print NEURON global variable macros.
+
void print_codegen_routines() override
Print entry point to code generation.
+
void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
Generate Function call statement for nrn_wrote_conc.
+
ParamVector internal_method_parameters() override
Parameters for internally defined functions.
+ +
void print_net_receive()
Print net_receive call-back.
+
void print_function_prototypes() override
Print function and procedures prototype declaration.
+
void print_mechanism_global_var_structure(bool print_initializers) override
Print the structure that wraps all global variables used in the NMODL.
+
std::string namespace_name() override
Name of "our" namespace.
+
void print_global_macros()
Print NEURON global variable macros.
std::vector< std::tuple< std::string, std::string, std::string, std::string > > ParamVector
A vector of parameters represented by a 4-tuple of strings:
Concrete visitor for all AST classes.
diff --git a/doxygen/functions_func_p.html b/doxygen/functions_func_p.html index 55c81c14c..a39c2af19 100644 --- a/doxygen/functions_func_p.html +++ b/doxygen/functions_func_p.html @@ -216,8 +216,8 @@

- p -

    : nmodl::codegen::CodegenAccVisitor , nmodl::codegen::CodegenCoreneuronCppVisitor -
  • print_check_table_function_prototypes() -: nmodl::codegen::CodegenNeuronCppVisitor +
  • print_check_table_entrypoint() +: nmodl::codegen::CodegenNeuronCppVisitor
  • print_check_table_thread_function() : nmodl::codegen::CodegenCoreneuronCppVisitor diff --git a/doxygen/functions_p.html b/doxygen/functions_p.html index 8341804a5..39329d6fb 100644 --- a/doxygen/functions_p.html +++ b/doxygen/functions_p.html @@ -271,8 +271,8 @@

    - p -

      : nmodl::codegen::CodegenAccVisitor , nmodl::codegen::CodegenCoreneuronCppVisitor -
    • print_check_table_function_prototypes() -: nmodl::codegen::CodegenNeuronCppVisitor +
    • print_check_table_entrypoint() +: nmodl::codegen::CodegenNeuronCppVisitor
    • print_check_table_thread_function() : nmodl::codegen::CodegenCoreneuronCppVisitor diff --git a/doxygen/group__codegen__backends.js b/doxygen/group__codegen__backends.js index c888b7a08..b05e2144e 100644 --- a/doxygen/group__codegen__backends.js +++ b/doxygen/group__codegen__backends.js @@ -392,7 +392,7 @@ var group__codegen__backends = [ "position_of_int_var", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a8c348f1869653cec3dcc39b1866bab97", null ], [ "print_atomic_reduction_pragma", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a910f9f22e6fbdae4eefae177eb8910f9", null ], [ "print_callable_preamble_from_prop", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a47b63167dd62680e43439a45833b5666", null ], - [ "print_check_table_function_prototypes", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a4ad426a930f14bc970072c5a57fb2bc9", null ], + [ "print_check_table_entrypoint", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a389587ac35ce1f02ced547e3c8698241", null ], [ "print_codegen_routines", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a9f9d3aebd2dc5bbc05e0cb7e9a707c7f", null ], [ "print_compute_functions", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a94f4993226aa53f6095ad4a44fe47420", null ], [ "print_data_structures", "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#aeb0635e7e6e488519882d930401b5c2b", null ], diff --git a/doxygen/namespacenmodl_1_1codegen.html b/doxygen/namespacenmodl_1_1codegen.html index c9cb951d6..eda9cbe39 100644 --- a/doxygen/namespacenmodl_1_1codegen.html +++ b/doxygen/namespacenmodl_1_1codegen.html @@ -328,7 +328,7 @@

      Definition at line 1264 of file codegen_coreneuron_cpp_visitor.cpp.

      +

      Definition at line 1263 of file codegen_coreneuron_cpp_visitor.cpp.

      @@ -374,7 +374,7 @@

      x[id] = _args[1];

      So, the R in AST needs to be renamed with _args[1].

      -

      Definition at line 2300 of file codegen_neuron_cpp_visitor.cpp.

      +

      Definition at line 2302 of file codegen_neuron_cpp_visitor.cpp.

      @@ -424,7 +424,7 @@

      (*R) = 1.0;

      So, the R in AST needs to be renamed with (*R).

      -

      Definition at line 2306 of file codegen_coreneuron_cpp_visitor.cpp.

      +

      Definition at line 2305 of file codegen_coreneuron_cpp_visitor.cpp.

      diff --git a/doxygen/navtreeindex16.js b/doxygen/navtreeindex16.js index 25a7322b8..4458aeaf5 100644 --- a/doxygen/navtreeindex16.js +++ b/doxygen/navtreeindex16.js @@ -169,6 +169,7 @@ var NAVTREEINDEX16 = "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a2142740635a801ade67d99228fd71ce2":[0,1,0,5,93], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a24c55a8420d179cbbd7bc6968f3c12e3":[0,1,0,5,76], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a2e35d75bdda2d2d46e097fc217e4ce9d":[0,1,0,5,82], +"classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a389587ac35ce1f02ced547e3c8698241":[0,1,0,5,28], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a3af0b90beeeec4a0e17e84a11a76ecbc":[0,1,0,5,11], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a3c5c869470655815c71464f45fc82560":[0,1,0,5,81], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a3e4bdc70a4d2eb6169d656c1ec9c9fe1":[0,1,0,5,10], @@ -177,7 +178,6 @@ var NAVTREEINDEX16 = "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a46dadeb4b5354222f493742dba649924":[0,1,0,5,73], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a47b63167dd62680e43439a45833b5666":[0,1,0,5,27], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a480879d1374101ede3c9542951433543":[0,1,0,5,69], -"classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a4ad426a930f14bc970072c5a57fb2bc9":[0,1,0,5,28], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a4fb4ca054d85c8bc2c0bd0445633710e":[0,1,0,5,7], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a50026c205b223b41ffe682526735873c":[0,1,0,5,87], "classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a5429cb9990f7cbe91dc33522da5ad76d":[0,1,0,5,70], diff --git a/doxygen/search/all_f.js b/doxygen/search/all_f.js index 83ef091fd..78607e5a9 100644 --- a/doxygen/search/all_f.js +++ b/doxygen/search/all_f.js @@ -79,7 +79,7 @@ var searchData= ['print_5fbefore_5fafter_5fblock_1683',['print_before_after_block',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a9d1748ca9db1665902c25c32a2b458d6',1,'nmodl::codegen::CodegenCoreneuronCppVisitor']]], ['print_5fcallable_5fpreamble_5ffrom_5fprop_1684',['print_callable_preamble_from_prop',['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a47b63167dd62680e43439a45833b5666',1,'nmodl::codegen::CodegenNeuronCppVisitor']]], ['print_5fchannel_5fiteration_5fblock_5fparallel_5fhint_1685',['print_channel_iteration_block_parallel_hint',['../classnmodl_1_1codegen_1_1_codegen_acc_visitor.html#a0d8527584a257a10b5685356bea2911d',1,'nmodl::codegen::CodegenAccVisitor::print_channel_iteration_block_parallel_hint()'],['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a2e6460f6c8b0f6a0ae0732a5a0a15c45',1,'nmodl::codegen::CodegenCoreneuronCppVisitor::print_channel_iteration_block_parallel_hint()']]], - ['print_5fcheck_5ftable_5ffunction_5fprototypes_1686',['print_check_table_function_prototypes',['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a4ad426a930f14bc970072c5a57fb2bc9',1,'nmodl::codegen::CodegenNeuronCppVisitor']]], + ['print_5fcheck_5ftable_5fentrypoint_1686',['print_check_table_entrypoint',['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a389587ac35ce1f02ced547e3c8698241',1,'nmodl::codegen::CodegenNeuronCppVisitor']]], ['print_5fcheck_5ftable_5fthread_5ffunction_1687',['print_check_table_thread_function',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a45b1dc5e371bf956e045d40f19b2470c',1,'nmodl::codegen::CodegenCoreneuronCppVisitor']]], ['print_5fcodegen_5froutines_1688',['print_codegen_routines',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a92f46b65366a865118933f37c589d34e',1,'nmodl::codegen::CodegenCoreneuronCppVisitor::print_codegen_routines()'],['../classnmodl_1_1codegen_1_1_codegen_cpp_visitor.html#a20666add7b3f75937954a038c51f5b55',1,'nmodl::codegen::CodegenCppVisitor::print_codegen_routines()'],['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a9f9d3aebd2dc5bbc05e0cb7e9a707c7f',1,'nmodl::codegen::CodegenNeuronCppVisitor::print_codegen_routines()']]], ['print_5fcommon_5fgetters_1689',['print_common_getters',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#abbcf6dbd38c9ded33c7b55d5fadf7e24',1,'nmodl::codegen::CodegenCoreneuronCppVisitor']]], diff --git a/doxygen/search/functions_f.js b/doxygen/search/functions_f.js index 16e7be443..3f53c7b37 100644 --- a/doxygen/search/functions_f.js +++ b/doxygen/search/functions_f.js @@ -35,7 +35,7 @@ var searchData= ['print_5fbefore_5fafter_5fblock_4139',['print_before_after_block',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a9d1748ca9db1665902c25c32a2b458d6',1,'nmodl::codegen::CodegenCoreneuronCppVisitor']]], ['print_5fcallable_5fpreamble_5ffrom_5fprop_4140',['print_callable_preamble_from_prop',['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a47b63167dd62680e43439a45833b5666',1,'nmodl::codegen::CodegenNeuronCppVisitor']]], ['print_5fchannel_5fiteration_5fblock_5fparallel_5fhint_4141',['print_channel_iteration_block_parallel_hint',['../classnmodl_1_1codegen_1_1_codegen_acc_visitor.html#a0d8527584a257a10b5685356bea2911d',1,'nmodl::codegen::CodegenAccVisitor::print_channel_iteration_block_parallel_hint()'],['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a2e6460f6c8b0f6a0ae0732a5a0a15c45',1,'nmodl::codegen::CodegenCoreneuronCppVisitor::print_channel_iteration_block_parallel_hint()']]], - ['print_5fcheck_5ftable_5ffunction_5fprototypes_4142',['print_check_table_function_prototypes',['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a4ad426a930f14bc970072c5a57fb2bc9',1,'nmodl::codegen::CodegenNeuronCppVisitor']]], + ['print_5fcheck_5ftable_5fentrypoint_4142',['print_check_table_entrypoint',['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a389587ac35ce1f02ced547e3c8698241',1,'nmodl::codegen::CodegenNeuronCppVisitor']]], ['print_5fcheck_5ftable_5fthread_5ffunction_4143',['print_check_table_thread_function',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a45b1dc5e371bf956e045d40f19b2470c',1,'nmodl::codegen::CodegenCoreneuronCppVisitor']]], ['print_5fcodegen_5froutines_4144',['print_codegen_routines',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#a92f46b65366a865118933f37c589d34e',1,'nmodl::codegen::CodegenCoreneuronCppVisitor::print_codegen_routines()'],['../classnmodl_1_1codegen_1_1_codegen_cpp_visitor.html#a20666add7b3f75937954a038c51f5b55',1,'nmodl::codegen::CodegenCppVisitor::print_codegen_routines()'],['../classnmodl_1_1codegen_1_1_codegen_neuron_cpp_visitor.html#a9f9d3aebd2dc5bbc05e0cb7e9a707c7f',1,'nmodl::codegen::CodegenNeuronCppVisitor::print_codegen_routines()']]], ['print_5fcommon_5fgetters_4145',['print_common_getters',['../classnmodl_1_1codegen_1_1_codegen_coreneuron_cpp_visitor.html#abbcf6dbd38c9ded33c7b55d5fadf7e24',1,'nmodl::codegen::CodegenCoreneuronCppVisitor']]], diff --git a/doxygen/src_2codegen_2codegen__coreneuron__cpp__visitor_8cpp_source.html b/doxygen/src_2codegen_2codegen__coreneuron__cpp__visitor_8cpp_source.html index e7c338a9d..75d3ad8db 100644 --- a/doxygen/src_2codegen_2codegen__coreneuron__cpp__visitor_8cpp_source.html +++ b/doxygen/src_2codegen_2codegen__coreneuron__cpp__visitor_8cpp_source.html @@ -548,2644 +548,2643 @@
      452  for (const auto& i: p) {
      453  params.emplace_back("", "double", "", i->get_node_name());
      454  }
      -
      455  printer->fmt_line("double {}({})", method_name(name), get_parameter_str(params));
      -
      456  printer->push_block();
      -
      457  printer->fmt_line("double _arg[{}];", p.size());
      -
      458  for (size_t i = 0; i < p.size(); ++i) {
      -
      459  printer->fmt_line("_arg[{}] = {};", i, p[i]->get_node_name());
      -
      460  }
      -
      461  printer->fmt_line("return hoc_func_table({}, {}, _arg);",
      -
      462  get_variable_name(std::string("_ptable_" + name), true),
      -
      463  p.size());
      -
      464  printer->pop_block();
      -
      465 
      -
      466  printer->fmt_push_block("double table_{}()", method_name(name));
      -
      467  printer->fmt_line("hoc_spec_table(&{}, {});",
      -
      468  get_variable_name(std::string("_ptable_" + name)),
      -
      469  p.size());
      -
      470  printer->add_line("return 0.;");
      -
      471  printer->pop_block();
      -
      472 }
      +
      455  printer->fmt_push_block("double {}({})", method_name(name), get_parameter_str(params));
      +
      456  printer->fmt_line("double _arg[{}];", p.size());
      +
      457  for (size_t i = 0; i < p.size(); ++i) {
      +
      458  printer->fmt_line("_arg[{}] = {};", i, p[i]->get_node_name());
      +
      459  }
      +
      460  printer->fmt_line("return hoc_func_table({}, {}, _arg);",
      +
      461  get_variable_name(std::string("_ptable_" + name), true),
      +
      462  p.size());
      +
      463  printer->pop_block();
      +
      464 
      +
      465  printer->fmt_push_block("double table_{}()", method_name(name));
      +
      466  printer->fmt_line("hoc_spec_table(&{}, {});",
      +
      467  get_variable_name(std::string("_ptable_" + name)),
      +
      468  p.size());
      +
      469  printer->add_line("return 0.;");
      +
      470  printer->pop_block();
      +
      471 }
      +
      472 
      473 
      -
      474 
      -
      475 /****************************************************************************************/
      -
      476 /* Code-specific helper routines */
      -
      477 /****************************************************************************************/
      -
      478 
      -
      479 void CodegenCoreneuronCppVisitor::add_variable_tqitem(std::vector<IndexVariableInfo>& variables) {
      -
      480  // for non-artificial cell, when net_receive buffering is enabled
      -
      481  // then tqitem is an offset
      -
      482  if (info.net_send_used) {
      -
      483  if (info.artificial_cell) {
      -
      484  variables.emplace_back(make_symbol(naming::TQITEM_VARIABLE), true);
      -
      485  } else {
      -
      486  variables.emplace_back(make_symbol(naming::TQITEM_VARIABLE), false, false, true);
      -
      487  variables.back().is_constant = true;
      -
      488  }
      -
      489  info.tqitem_index = static_cast<int>(variables.size() - 1);
      -
      490  }
      -
      491 }
      -
      492 
      - -
      494  std::vector<IndexVariableInfo>& variables) {
      -
      495  /// note that this variable is not printed in neuron implementation
      -
      496  if (info.artificial_cell) {
      -
      497  variables.emplace_back(make_symbol(naming::POINT_PROCESS_VARIABLE), true);
      -
      498  } else {
      -
      499  variables.emplace_back(make_symbol(naming::POINT_PROCESS_VARIABLE), false, false, true);
      -
      500  variables.back().is_constant = true;
      -
      501  }
      -
      502 }
      -
      503 
      - -
      505  return get_arg_str(internal_method_parameters());
      -
      506 }
      -
      507 
      -
      508 
      -
      509 /**
      -
      510  * @todo: figure out how to correctly handle qualifiers
      -
      511  */
      - -
      513  ParamVector params = {{"", "int", "", "id"},
      -
      514  {"", "int", "", "pnodecount"},
      -
      515  {"", fmt::format("{}*", instance_struct()), "", "inst"}};
      -
      516  if (ion_variable_struct_required()) {
      -
      517  params.emplace_back("", "IonCurVar&", "", "ionvar");
      -
      518  }
      -
      519  ParamVector other_params = {{"", "double*", "", "data"},
      -
      520  {"const ", "Datum*", "", "indexes"},
      -
      521  {"", "ThreadDatum*", "", "thread"},
      -
      522  {"", "NrnThread*", "", "nt"},
      -
      523  {"", "double", "", "v"}};
      -
      524  params.insert(params.end(), other_params.begin(), other_params.end());
      -
      525  return params;
      -
      526 }
      +
      474 /****************************************************************************************/
      +
      475 /* Code-specific helper routines */
      +
      476 /****************************************************************************************/
      +
      477 
      +
      478 void CodegenCoreneuronCppVisitor::add_variable_tqitem(std::vector<IndexVariableInfo>& variables) {
      +
      479  // for non-artificial cell, when net_receive buffering is enabled
      +
      480  // then tqitem is an offset
      +
      481  if (info.net_send_used) {
      +
      482  if (info.artificial_cell) {
      +
      483  variables.emplace_back(make_symbol(naming::TQITEM_VARIABLE), true);
      +
      484  } else {
      +
      485  variables.emplace_back(make_symbol(naming::TQITEM_VARIABLE), false, false, true);
      +
      486  variables.back().is_constant = true;
      +
      487  }
      +
      488  info.tqitem_index = static_cast<int>(variables.size() - 1);
      +
      489  }
      +
      490 }
      +
      491 
      + +
      493  std::vector<IndexVariableInfo>& variables) {
      +
      494  /// note that this variable is not printed in neuron implementation
      +
      495  if (info.artificial_cell) {
      +
      496  variables.emplace_back(make_symbol(naming::POINT_PROCESS_VARIABLE), true);
      +
      497  } else {
      +
      498  variables.emplace_back(make_symbol(naming::POINT_PROCESS_VARIABLE), false, false, true);
      +
      499  variables.back().is_constant = true;
      +
      500  }
      +
      501 }
      +
      502 
      + +
      504  return get_arg_str(internal_method_parameters());
      +
      505 }
      +
      506 
      +
      507 
      +
      508 /**
      +
      509  * @todo: figure out how to correctly handle qualifiers
      +
      510  */
      + +
      512  ParamVector params = {{"", "int", "", "id"},
      +
      513  {"", "int", "", "pnodecount"},
      +
      514  {"", fmt::format("{}*", instance_struct()), "", "inst"}};
      +
      515  if (ion_variable_struct_required()) {
      +
      516  params.emplace_back("", "IonCurVar&", "", "ionvar");
      +
      517  }
      +
      518  ParamVector other_params = {{"", "double*", "", "data"},
      +
      519  {"const ", "Datum*", "", "indexes"},
      +
      520  {"", "ThreadDatum*", "", "thread"},
      +
      521  {"", "NrnThread*", "", "nt"},
      +
      522  {"", "double", "", "v"}};
      +
      523  params.insert(params.end(), other_params.begin(), other_params.end());
      +
      524  return params;
      +
      525 }
      +
      526 
      527 
      -
      528 
      - -
      530  return get_arg_str(external_method_parameters());
      -
      531 }
      + +
      529  return get_arg_str(external_method_parameters());
      +
      530 }
      +
      531 
      532 
      -
      533 
      - -
      535  bool table) noexcept {
      -
      536  ParamVector args = {{"", "int", "", "id"},
      -
      537  {"", "int", "", "pnodecount"},
      -
      538  {"", "double*", "", "data"},
      -
      539  {"", "Datum*", "", "indexes"},
      -
      540  {"", "ThreadDatum*", "", "thread"},
      -
      541  {"", "NrnThread*", "", "nt"},
      -
      542  {"", "Memb_list*", "", "ml"}};
      -
      543  if (table) {
      -
      544  args.emplace_back("", "int", "", "tml_id");
      -
      545  } else {
      -
      546  args.emplace_back("", "double", "", "v");
      -
      547  }
      -
      548  return args;
      -
      549 }
      + +
      534  bool table) noexcept {
      +
      535  ParamVector args = {{"", "int", "", "id"},
      +
      536  {"", "int", "", "pnodecount"},
      +
      537  {"", "double*", "", "data"},
      +
      538  {"", "Datum*", "", "indexes"},
      +
      539  {"", "ThreadDatum*", "", "thread"},
      +
      540  {"", "NrnThread*", "", "nt"},
      +
      541  {"", "Memb_list*", "", "ml"}};
      +
      542  if (table) {
      +
      543  args.emplace_back("", "int", "", "tml_id");
      +
      544  } else {
      +
      545  args.emplace_back("", "double", "", "v");
      +
      546  }
      +
      547  return args;
      +
      548 }
      +
      549 
      550 
      -
      551 
      - -
      553  if (ion_variable_struct_required()) {
      -
      554  return "id, pnodecount, ionvar, data, indexes, thread, nt, ml, v";
      -
      555  }
      -
      556  return "id, pnodecount, data, indexes, thread, nt, ml, v";
      -
      557 }
      -
      558 
      -
      559 
      -
      560 /**
      -
      561  * Function call arguments when function or procedure is defined in the
      -
      562  * same mod file itself
      -
      563  */
      - -
      565  return get_arg_str(internal_method_parameters());
      -
      566 }
      -
      567 
      -
      568 
      -
      569 /**
      -
      570  * Replace commonly used variables in the verbatim blocks into their corresponding
      -
      571  * variable name in the new code generation backend.
      -
      572  */
      - - -
      575  name = naming::VERBATIM_VARIABLES_MAPPING.at(name);
      -
      576  }
      -
      577 
      -
      578  /**
      -
      579  * if function is defined the same mod file then the arguments must
      -
      580  * contain mechanism instance as well.
      -
      581  */
      -
      582  if (name == naming::THREAD_ARGS) {
      -
      583  if (internal_method_call_encountered) {
      -
      584  name = nrn_thread_internal_arguments();
      -
      585  internal_method_call_encountered = false;
      -
      586  } else {
      -
      587  name = nrn_thread_arguments();
      -
      588  }
      -
      589  }
      -
      590  if (name == naming::THREAD_ARGS_PROTO) {
      -
      591  name = get_parameter_str(external_method_parameters());
      -
      592  }
      -
      593  return name;
      -
      594 }
      -
      595 
      -
      596 
      -
      597 /**
      -
      598  * Processing commonly used constructs in the verbatim blocks.
      -
      599  * @todo : this is still ad-hoc and requires re-implementation to
      -
      600  * handle it more elegantly.
      -
      601  */
      -
      602 std::string CodegenCoreneuronCppVisitor::process_verbatim_text(std::string const& text) {
      - -
      604  driver.scan_string(text);
      -
      605  auto tokens = driver.all_tokens();
      -
      606  std::string result;
      -
      607  for (size_t i = 0; i < tokens.size(); i++) {
      -
      608  auto token = tokens[i];
      -
      609 
      -
      610  // check if we have function call in the verbatim block where
      -
      611  // function is defined in the same mod file
      -
      612  if (program_symtab->is_method_defined(token) && tokens[i + 1] == "(") {
      -
      613  internal_method_call_encountered = true;
      -
      614  }
      -
      615  result += process_verbatim_token(token);
      -
      616  }
      -
      617  return result;
      -
      618 }
      + +
      552  if (ion_variable_struct_required()) {
      +
      553  return "id, pnodecount, ionvar, data, indexes, thread, nt, ml, v";
      +
      554  }
      +
      555  return "id, pnodecount, data, indexes, thread, nt, ml, v";
      +
      556 }
      +
      557 
      +
      558 
      +
      559 /**
      +
      560  * Function call arguments when function or procedure is defined in the
      +
      561  * same mod file itself
      +
      562  */
      + +
      564  return get_arg_str(internal_method_parameters());
      +
      565 }
      +
      566 
      +
      567 
      +
      568 /**
      +
      569  * Replace commonly used variables in the verbatim blocks into their corresponding
      +
      570  * variable name in the new code generation backend.
      +
      571  */
      + + +
      574  name = naming::VERBATIM_VARIABLES_MAPPING.at(name);
      +
      575  }
      +
      576 
      +
      577  /**
      +
      578  * if function is defined the same mod file then the arguments must
      +
      579  * contain mechanism instance as well.
      +
      580  */
      +
      581  if (name == naming::THREAD_ARGS) {
      +
      582  if (internal_method_call_encountered) {
      +
      583  name = nrn_thread_internal_arguments();
      +
      584  internal_method_call_encountered = false;
      +
      585  } else {
      +
      586  name = nrn_thread_arguments();
      +
      587  }
      +
      588  }
      +
      589  if (name == naming::THREAD_ARGS_PROTO) {
      +
      590  name = get_parameter_str(external_method_parameters());
      +
      591  }
      +
      592  return name;
      +
      593 }
      +
      594 
      +
      595 
      +
      596 /**
      +
      597  * Processing commonly used constructs in the verbatim blocks.
      +
      598  * @todo : this is still ad-hoc and requires re-implementation to
      +
      599  * handle it more elegantly.
      +
      600  */
      +
      601 std::string CodegenCoreneuronCppVisitor::process_verbatim_text(std::string const& text) {
      + +
      603  driver.scan_string(text);
      +
      604  auto tokens = driver.all_tokens();
      +
      605  std::string result;
      +
      606  for (size_t i = 0; i < tokens.size(); i++) {
      +
      607  auto token = tokens[i];
      +
      608 
      +
      609  // check if we have function call in the verbatim block where
      +
      610  // function is defined in the same mod file
      +
      611  if (program_symtab->is_method_defined(token) && tokens[i + 1] == "(") {
      +
      612  internal_method_call_encountered = true;
      +
      613  }
      +
      614  result += process_verbatim_token(token);
      +
      615  }
      +
      616  return result;
      +
      617 }
      +
      618 
      619 
      -
      620 
      - -
      622  auto nrn_channel_info_var_name = get_channel_info_var_name();
      -
      623  auto nrn_cur = nrn_cur_required() ? method_name(naming::NRN_CUR_METHOD) : "nullptr";
      -
      624  auto nrn_state = nrn_state_required() ? method_name(naming::NRN_STATE_METHOD) : "nullptr";
      -
      625  auto nrn_alloc = method_name(naming::NRN_ALLOC_METHOD);
      -
      626  auto nrn_init = method_name(naming::NRN_INIT_METHOD);
      -
      627  auto const nrn_private_constructor = method_name(naming::NRN_PRIVATE_CONSTRUCTOR_METHOD);
      -
      628  auto const nrn_private_destructor = method_name(naming::NRN_PRIVATE_DESTRUCTOR_METHOD);
      -
      629  return fmt::format("{}, {}, {}, nullptr, {}, {}, {}, {}, first_pointer_var_index()",
      -
      630  nrn_channel_info_var_name,
      -
      631  nrn_alloc,
      -
      632  nrn_cur,
      -
      633  nrn_state,
      -
      634  nrn_init,
      -
      635  nrn_private_constructor,
      -
      636  nrn_private_destructor);
      -
      637 }
      + +
      621  auto nrn_channel_info_var_name = get_channel_info_var_name();
      +
      622  auto nrn_cur = nrn_cur_required() ? method_name(naming::NRN_CUR_METHOD) : "nullptr";
      +
      623  auto nrn_state = nrn_state_required() ? method_name(naming::NRN_STATE_METHOD) : "nullptr";
      +
      624  auto nrn_alloc = method_name(naming::NRN_ALLOC_METHOD);
      +
      625  auto nrn_init = method_name(naming::NRN_INIT_METHOD);
      +
      626  auto const nrn_private_constructor = method_name(naming::NRN_PRIVATE_CONSTRUCTOR_METHOD);
      +
      627  auto const nrn_private_destructor = method_name(naming::NRN_PRIVATE_DESTRUCTOR_METHOD);
      +
      628  return fmt::format("{}, {}, {}, nullptr, {}, {}, {}, {}, first_pointer_var_index()",
      +
      629  nrn_channel_info_var_name,
      +
      630  nrn_alloc,
      +
      631  nrn_cur,
      +
      632  nrn_state,
      +
      633  nrn_init,
      +
      634  nrn_private_constructor,
      +
      635  nrn_private_destructor);
      +
      636 }
      +
      637 
      638 
      -
      639 
      - -
      641  std::vector<ShadowUseStatement>& statements,
      -
      642  const Ion& ion,
      -
      643  const std::string& concentration) {
      -
      644  int index = 0;
      -
      645  if (ion.is_intra_cell_conc(concentration)) {
      -
      646  index = 1;
      -
      647  } else if (ion.is_extra_cell_conc(concentration)) {
      -
      648  index = 2;
      -
      649  } else {
      -
      650  /// \todo Unhandled case in neuron implementation
      -
      651  throw std::logic_error(fmt::format("codegen error for {} ion", ion.name));
      -
      652  }
      -
      653  auto ion_type_name = fmt::format("{}_type", ion.name);
      -
      654  auto lhs = fmt::format("int {}", ion_type_name);
      -
      655  auto op = "=";
      -
      656  auto rhs = get_variable_name(ion_type_name);
      -
      657  statements.push_back(ShadowUseStatement{lhs, op, rhs});
      -
      658 
      -
      659  auto ion_name = ion.name;
      -
      660  auto conc_var_name = get_variable_name(naming::ION_VARNAME_PREFIX + concentration);
      -
      661  auto style_var_name = get_variable_name("style_" + ion_name);
      -
      662  auto statement = fmt::format(
      -
      663  "nrn_wrote_conc({}_type,"
      -
      664  " &({}),"
      + +
      640  std::vector<ShadowUseStatement>& statements,
      +
      641  const Ion& ion,
      +
      642  const std::string& concentration) {
      +
      643  int index = 0;
      +
      644  if (ion.is_intra_cell_conc(concentration)) {
      +
      645  index = 1;
      +
      646  } else if (ion.is_extra_cell_conc(concentration)) {
      +
      647  index = 2;
      +
      648  } else {
      +
      649  /// \todo Unhandled case in neuron implementation
      +
      650  throw std::logic_error(fmt::format("codegen error for {} ion", ion.name));
      +
      651  }
      +
      652  auto ion_type_name = fmt::format("{}_type", ion.name);
      +
      653  auto lhs = fmt::format("int {}", ion_type_name);
      +
      654  auto op = "=";
      +
      655  auto rhs = get_variable_name(ion_type_name);
      +
      656  statements.push_back(ShadowUseStatement{lhs, op, rhs});
      +
      657 
      +
      658  auto ion_name = ion.name;
      +
      659  auto conc_var_name = get_variable_name(naming::ION_VARNAME_PREFIX + concentration);
      +
      660  auto style_var_name = get_variable_name("style_" + ion_name);
      +
      661  auto statement = fmt::format(
      +
      662  "nrn_wrote_conc({}_type,"
      +
      663  " &({}),"
      +
      664  " {},"
      665  " {},"
      -
      666  " {},"
      -
      667  " nrn_ion_global_map,"
      -
      668  " {},"
      -
      669  " nt->_ml_list[{}_type]->_nodecount_padded)",
      -
      670  ion_name,
      -
      671  conc_var_name,
      -
      672  index,
      -
      673  style_var_name,
      -
      674  get_variable_name(naming::CELSIUS_VARIABLE),
      -
      675  ion_name);
      -
      676 
      -
      677  statements.push_back(ShadowUseStatement{statement, "", ""});
      -
      678 }
      +
      666  " nrn_ion_global_map,"
      +
      667  " {},"
      +
      668  " nt->_ml_list[{}_type]->_nodecount_padded)",
      +
      669  ion_name,
      +
      670  conc_var_name,
      +
      671  index,
      +
      672  style_var_name,
      +
      673  get_variable_name(naming::CELSIUS_VARIABLE),
      +
      674  ion_name);
      +
      675 
      +
      676  statements.push_back(ShadowUseStatement{statement, "", ""});
      +
      677 }
      +
      678 
      679 
      -
      680 
      -
      681 /****************************************************************************************/
      -
      682 /* Code-specific printing routines for code generation */
      -
      683 /****************************************************************************************/
      +
      680 /****************************************************************************************/
      +
      681 /* Code-specific printing routines for code generation */
      +
      682 /****************************************************************************************/
      +
      683 
      684 
      -
      685 
      - -
      687  printer->add_newline(2);
      -
      688  printer->push_block("static inline int first_pointer_var_index()");
      -
      689  printer->fmt_line("return {};", info.first_pointer_var_index);
      -
      690  printer->pop_block();
      -
      691 }
      + +
      686  printer->add_newline(2);
      +
      687  printer->push_block("static inline int first_pointer_var_index()");
      +
      688  printer->fmt_line("return {};", info.first_pointer_var_index);
      +
      689  printer->pop_block();
      +
      690 }
      +
      691 
      692 
      -
      693 
      - -
      695  printer->add_newline(2);
      -
      696  printer->push_block("static inline int first_random_var_index()");
      -
      697  printer->fmt_line("return {};", info.first_random_var_index);
      -
      698  printer->pop_block();
      -
      699 }
      + +
      694  printer->add_newline(2);
      +
      695  printer->push_block("static inline int first_random_var_index()");
      +
      696  printer->fmt_line("return {};", info.first_random_var_index);
      +
      697  printer->pop_block();
      +
      698 }
      +
      699 
      700 
      -
      701 
      - -
      703  printer->add_newline(2);
      -
      704  printer->push_block("static inline int float_variables_size()");
      -
      705  printer->fmt_line("return {};", float_variables_size());
      -
      706  printer->pop_block();
      -
      707 
      -
      708  printer->add_newline(2);
      -
      709  printer->push_block("static inline int int_variables_size()");
      -
      710  printer->fmt_line("return {};", int_variables_size());
      -
      711  printer->pop_block();
      -
      712 }
      + +
      702  printer->add_newline(2);
      +
      703  printer->push_block("static inline int float_variables_size()");
      +
      704  printer->fmt_line("return {};", float_variables_size());
      +
      705  printer->pop_block();
      +
      706 
      +
      707  printer->add_newline(2);
      +
      708  printer->push_block("static inline int int_variables_size()");
      +
      709  printer->fmt_line("return {};", int_variables_size());
      +
      710  printer->pop_block();
      +
      711 }
      +
      712 
      713 
      -
      714 
      - -
      716  if (!net_receive_exist()) {
      -
      717  return;
      -
      718  }
      -
      719  printer->add_newline(2);
      -
      720  printer->push_block("static inline int num_net_receive_args()");
      -
      721  printer->fmt_line("return {};", info.num_net_receive_parameters);
      -
      722  printer->pop_block();
      -
      723 }
      + +
      715  if (!net_receive_exist()) {
      +
      716  return;
      +
      717  }
      +
      718  printer->add_newline(2);
      +
      719  printer->push_block("static inline int num_net_receive_args()");
      +
      720  printer->fmt_line("return {};", info.num_net_receive_parameters);
      +
      721  printer->pop_block();
      +
      722 }
      +
      723 
      724 
      -
      725 
      - -
      727  printer->add_newline(2);
      -
      728  printer->push_block("static inline int get_mech_type()");
      -
      729  // false => get it from the host-only global struct, not the instance structure
      -
      730  printer->fmt_line("return {};", get_variable_name("mech_type", false));
      -
      731  printer->pop_block();
      -
      732 }
      + +
      726  printer->add_newline(2);
      +
      727  printer->push_block("static inline int get_mech_type()");
      +
      728  // false => get it from the host-only global struct, not the instance structure
      +
      729  printer->fmt_line("return {};", get_variable_name("mech_type", false));
      +
      730  printer->pop_block();
      +
      731 }
      +
      732 
      733 
      -
      734 
      - -
      736  printer->add_newline(2);
      -
      737  printer->push_block("static inline Memb_list* get_memb_list(NrnThread* nt)");
      -
      738  printer->push_block("if (!nt->_ml_list)");
      -
      739  printer->add_line("return nullptr;");
      -
      740  printer->pop_block();
      -
      741  printer->add_line("return nt->_ml_list[get_mech_type()];");
      -
      742  printer->pop_block();
      -
      743 }
      + +
      735  printer->add_newline(2);
      +
      736  printer->push_block("static inline Memb_list* get_memb_list(NrnThread* nt)");
      +
      737  printer->push_block("if (!nt->_ml_list)");
      +
      738  printer->add_line("return nullptr;");
      +
      739  printer->pop_block();
      +
      740  printer->add_line("return nt->_ml_list[get_mech_type()];");
      +
      741  printer->pop_block();
      +
      742 }
      +
      743 
      744 
      -
      745 
      - -
      747  return "coreneuron";
      -
      748 }
      -
      749 
      -
      750 /**
      -
      751  * \details There are three types of thread variables currently considered:
      -
      752  * - top local thread variables
      -
      753  * - thread variables in the mod file
      -
      754  * - thread variables for solver
      -
      755  *
      -
      756  * These variables are allocated into different thread structures and have
      -
      757  * corresponding thread ids. Thread id start from 0. In mod2c implementation,
      -
      758  * thread_data_index is increased at various places and it is used to
      -
      759  * decide the index of thread.
      -
      760  */
      - -
      762  if (info.vectorize && info.derivimplicit_used()) {
      -
      763  int tid = info.derivimplicit_var_thread_id;
      -
      764  int list = info.derivimplicit_list_num;
      -
      765 
      -
      766  // clang-format off
      -
      767  printer->add_newline(2);
      -
      768  printer->add_line("/** thread specific helper routines for derivimplicit */");
      -
      769 
      -
      770  printer->add_newline(1);
      -
      771  printer->fmt_push_block("static inline int* deriv{}_advance(ThreadDatum* thread)", list);
      -
      772  printer->fmt_line("return &(thread[{}].i);", tid);
      -
      773  printer->pop_block();
      -
      774  printer->add_newline();
      -
      775 
      -
      776  printer->fmt_push_block("static inline int dith{}()", list);
      -
      777  printer->fmt_line("return {};", tid+1);
      -
      778  printer->pop_block();
      -
      779  printer->add_newline();
      -
      780 
      -
      781  printer->fmt_push_block("static inline void** newtonspace{}(ThreadDatum* thread)", list);
      -
      782  printer->fmt_line("return &(thread[{}]._pvoid);", tid+2);
      -
      783  printer->pop_block();
      -
      784  }
      -
      785 
      -
      786  if (info.vectorize && !info.thread_variables.empty()) {
      -
      787  printer->add_newline(2);
      -
      788  printer->add_line("/** tid for thread variables */");
      -
      789  printer->push_block("static inline int thread_var_tid()");
      -
      790  printer->fmt_line("return {};", info.thread_var_thread_id);
      -
      791  printer->pop_block();
      -
      792  }
      -
      793 
      -
      794  if (info.vectorize && !info.top_local_variables.empty()) {
      -
      795  printer->add_newline(2);
      -
      796  printer->add_line("/** tid for top local tread variables */");
      -
      797  printer->push_block("static inline int top_local_var_tid()");
      -
      798  printer->fmt_line("return {};", info.top_local_thread_id);
      -
      799  printer->pop_block();
      -
      800  }
      -
      801  // clang-format on
      -
      802 }
      + +
      746  return "coreneuron";
      +
      747 }
      +
      748 
      +
      749 /**
      +
      750  * \details There are three types of thread variables currently considered:
      +
      751  * - top local thread variables
      +
      752  * - thread variables in the mod file
      +
      753  * - thread variables for solver
      +
      754  *
      +
      755  * These variables are allocated into different thread structures and have
      +
      756  * corresponding thread ids. Thread id start from 0. In mod2c implementation,
      +
      757  * thread_data_index is increased at various places and it is used to
      +
      758  * decide the index of thread.
      +
      759  */
      + +
      761  if (info.vectorize && info.derivimplicit_used()) {
      +
      762  int tid = info.derivimplicit_var_thread_id;
      +
      763  int list = info.derivimplicit_list_num;
      +
      764 
      +
      765  // clang-format off
      +
      766  printer->add_newline(2);
      +
      767  printer->add_line("/** thread specific helper routines for derivimplicit */");
      +
      768 
      +
      769  printer->add_newline(1);
      +
      770  printer->fmt_push_block("static inline int* deriv{}_advance(ThreadDatum* thread)", list);
      +
      771  printer->fmt_line("return &(thread[{}].i);", tid);
      +
      772  printer->pop_block();
      +
      773  printer->add_newline();
      +
      774 
      +
      775  printer->fmt_push_block("static inline int dith{}()", list);
      +
      776  printer->fmt_line("return {};", tid+1);
      +
      777  printer->pop_block();
      +
      778  printer->add_newline();
      +
      779 
      +
      780  printer->fmt_push_block("static inline void** newtonspace{}(ThreadDatum* thread)", list);
      +
      781  printer->fmt_line("return &(thread[{}]._pvoid);", tid+2);
      +
      782  printer->pop_block();
      +
      783  }
      +
      784 
      +
      785  if (info.vectorize && !info.thread_variables.empty()) {
      +
      786  printer->add_newline(2);
      +
      787  printer->add_line("/** tid for thread variables */");
      +
      788  printer->push_block("static inline int thread_var_tid()");
      +
      789  printer->fmt_line("return {};", info.thread_var_thread_id);
      +
      790  printer->pop_block();
      +
      791  }
      +
      792 
      +
      793  if (info.vectorize && !info.top_local_variables.empty()) {
      +
      794  printer->add_newline(2);
      +
      795  printer->add_line("/** tid for top local tread variables */");
      +
      796  printer->push_block("static inline int top_local_var_tid()");
      +
      797  printer->fmt_line("return {};", info.top_local_thread_id);
      +
      798  printer->pop_block();
      +
      799  }
      +
      800  // clang-format on
      +
      801 }
      +
      802 
      803 
      -
      804 
      -
      805 /****************************************************************************************/
      -
      806 /* Routines for returning variable name */
      -
      807 /****************************************************************************************/
      +
      804 /****************************************************************************************/
      +
      805 /* Routines for returning variable name */
      +
      806 /****************************************************************************************/
      +
      807 
      808 
      -
      809 
      - -
      811  bool use_instance) const {
      -
      812  auto name = symbol->get_name();
      -
      813  auto dimension = symbol->get_length();
      -
      814  auto position = position_of_float_var(name);
      -
      815  if (symbol->is_array()) {
      -
      816  if (use_instance) {
      -
      817  return fmt::format("(inst->{}+id*{})", name, dimension);
      -
      818  }
      -
      819  return fmt::format("(data + {}*pnodecount + id*{})", position, dimension);
      -
      820  }
      -
      821  if (use_instance) {
      -
      822  return fmt::format("inst->{}[id]", name);
      -
      823  }
      -
      824  return fmt::format("data[{}*pnodecount + id]", position);
      -
      825 }
      + +
      810  bool use_instance) const {
      +
      811  auto name = symbol->get_name();
      +
      812  auto dimension = symbol->get_length();
      +
      813  auto position = position_of_float_var(name);
      +
      814  if (symbol->is_array()) {
      +
      815  if (use_instance) {
      +
      816  return fmt::format("(inst->{}+id*{})", name, dimension);
      +
      817  }
      +
      818  return fmt::format("(data + {}*pnodecount + id*{})", position, dimension);
      +
      819  }
      +
      820  if (use_instance) {
      +
      821  return fmt::format("inst->{}[id]", name);
      +
      822  }
      +
      823  return fmt::format("data[{}*pnodecount + id]", position);
      +
      824 }
      +
      825 
      826 
      -
      827 
      - -
      829  const std::string& name,
      -
      830  bool use_instance) const {
      -
      831  auto position = position_of_int_var(name);
      -
      832  // clang-format off
      -
      833  if (symbol.is_index) {
      -
      834  if (use_instance) {
      -
      835  return fmt::format("inst->{}[{}]", name, position);
      -
      836  }
      -
      837  return fmt::format("indexes[{}]", position);
      -
      838  }
      -
      839  if (symbol.is_integer) {
      -
      840  if (use_instance) {
      -
      841  return fmt::format("inst->{}[{}*pnodecount+id]", name, position);
      -
      842  }
      -
      843  return fmt::format("indexes[{}*pnodecount+id]", position);
      -
      844  }
      -
      845  if (use_instance) {
      -
      846  return fmt::format("inst->{}[indexes[{}*pnodecount + id]]", name, position);
      -
      847  }
      -
      848  auto data = symbol.is_vdata ? "_vdata" : "_data";
      -
      849  return fmt::format("nt->{}[indexes[{}*pnodecount + id]]", data, position);
      -
      850  // clang-format on
      -
      851 }
      + +
      828  const std::string& name,
      +
      829  bool use_instance) const {
      +
      830  auto position = position_of_int_var(name);
      +
      831  // clang-format off
      +
      832  if (symbol.is_index) {
      +
      833  if (use_instance) {
      +
      834  return fmt::format("inst->{}[{}]", name, position);
      +
      835  }
      +
      836  return fmt::format("indexes[{}]", position);
      +
      837  }
      +
      838  if (symbol.is_integer) {
      +
      839  if (use_instance) {
      +
      840  return fmt::format("inst->{}[{}*pnodecount+id]", name, position);
      +
      841  }
      +
      842  return fmt::format("indexes[{}*pnodecount+id]", position);
      +
      843  }
      +
      844  if (use_instance) {
      +
      845  return fmt::format("inst->{}[indexes[{}*pnodecount + id]]", name, position);
      +
      846  }
      +
      847  auto data = symbol.is_vdata ? "_vdata" : "_data";
      +
      848  return fmt::format("nt->{}[indexes[{}*pnodecount + id]]", data, position);
      +
      849  // clang-format on
      +
      850 }
      +
      851 
      852 
      -
      853 
      - -
      855  bool use_instance) const {
      -
      856  if (use_instance) {
      -
      857  return fmt::format("inst->{}->{}", naming::INST_GLOBAL_MEMBER, symbol->get_name());
      -
      858  } else {
      -
      859  return fmt::format("{}.{}", global_struct_instance(), symbol->get_name());
      -
      860  }
      -
      861 }
      + +
      854  bool use_instance) const {
      +
      855  if (use_instance) {
      +
      856  return fmt::format("inst->{}->{}", naming::INST_GLOBAL_MEMBER, symbol->get_name());
      +
      857  } else {
      +
      858  return fmt::format("{}.{}", global_struct_instance(), symbol->get_name());
      +
      859  }
      +
      860 }
      +
      861 
      862 
      -
      863 
      -
      864 std::string CodegenCoreneuronCppVisitor::get_variable_name(const std::string& name,
      -
      865  bool use_instance) const {
      -
      866  const std::string& varname = update_if_ion_variable_name(name);
      -
      867 
      -
      868  // clang-format off
      -
      869  auto symbol_comparator = [&varname](const SymbolType& sym) {
      -
      870  return varname == sym->get_name();
      -
      871  };
      -
      872 
      -
      873  auto index_comparator = [&varname](const IndexVariableInfo& var) {
      -
      874  return varname == var.symbol->get_name();
      -
      875  };
      -
      876  // clang-format on
      -
      877 
      -
      878  // float variable
      -
      879  auto f = std::find_if(codegen_float_variables.begin(),
      -
      880  codegen_float_variables.end(),
      -
      881  symbol_comparator);
      -
      882  if (f != codegen_float_variables.end()) {
      -
      883  return float_variable_name(*f, use_instance);
      -
      884  }
      -
      885 
      -
      886  // integer variable
      -
      887  auto i =
      -
      888  std::find_if(codegen_int_variables.begin(), codegen_int_variables.end(), index_comparator);
      -
      889  if (i != codegen_int_variables.end()) {
      -
      890  return int_variable_name(*i, varname, use_instance);
      -
      891  }
      -
      892 
      -
      893  // global variable
      -
      894  auto g = std::find_if(codegen_global_variables.begin(),
      -
      895  codegen_global_variables.end(),
      -
      896  symbol_comparator);
      -
      897  if (g != codegen_global_variables.end()) {
      -
      898  return global_variable_name(*g, use_instance);
      -
      899  }
      -
      900 
      -
      901  if (varname == naming::NTHREAD_DT_VARIABLE) {
      -
      902  return std::string("nt->_") + naming::NTHREAD_DT_VARIABLE;
      -
      903  }
      -
      904 
      -
      905  // t in net_receive method is an argument to function and hence it should
      -
      906  // ne used instead of nt->_t which is current time of thread
      -
      907  if (varname == naming::NTHREAD_T_VARIABLE && !printing_net_receive) {
      -
      908  return std::string("nt->_") + naming::NTHREAD_T_VARIABLE;
      -
      909  }
      -
      910 
      -
      911  auto const iter =
      -
      912  std::find_if(info.neuron_global_variables.begin(),
      -
      913  info.neuron_global_variables.end(),
      -
      914  [&varname](auto const& entry) { return entry.first->get_name() == varname; });
      -
      915  if (iter != info.neuron_global_variables.end()) {
      -
      916  std::string ret;
      -
      917  if (use_instance) {
      -
      918  ret = "*(inst->";
      -
      919  }
      -
      920  ret.append(varname);
      -
      921  if (use_instance) {
      -
      922  ret.append(")");
      -
      923  }
      -
      924  return ret;
      -
      925  }
      -
      926 
      -
      927  // otherwise return original name
      -
      928  return varname;
      -
      929 }
      +
      863 std::string CodegenCoreneuronCppVisitor::get_variable_name(const std::string& name,
      +
      864  bool use_instance) const {
      +
      865  const std::string& varname = update_if_ion_variable_name(name);
      +
      866 
      +
      867  // clang-format off
      +
      868  auto symbol_comparator = [&varname](const SymbolType& sym) {
      +
      869  return varname == sym->get_name();
      +
      870  };
      +
      871 
      +
      872  auto index_comparator = [&varname](const IndexVariableInfo& var) {
      +
      873  return varname == var.symbol->get_name();
      +
      874  };
      +
      875  // clang-format on
      +
      876 
      +
      877  // float variable
      +
      878  auto f = std::find_if(codegen_float_variables.begin(),
      +
      879  codegen_float_variables.end(),
      +
      880  symbol_comparator);
      +
      881  if (f != codegen_float_variables.end()) {
      +
      882  return float_variable_name(*f, use_instance);
      +
      883  }
      +
      884 
      +
      885  // integer variable
      +
      886  auto i =
      +
      887  std::find_if(codegen_int_variables.begin(), codegen_int_variables.end(), index_comparator);
      +
      888  if (i != codegen_int_variables.end()) {
      +
      889  return int_variable_name(*i, varname, use_instance);
      +
      890  }
      +
      891 
      +
      892  // global variable
      +
      893  auto g = std::find_if(codegen_global_variables.begin(),
      +
      894  codegen_global_variables.end(),
      +
      895  symbol_comparator);
      +
      896  if (g != codegen_global_variables.end()) {
      +
      897  return global_variable_name(*g, use_instance);
      +
      898  }
      +
      899 
      +
      900  if (varname == naming::NTHREAD_DT_VARIABLE) {
      +
      901  return std::string("nt->_") + naming::NTHREAD_DT_VARIABLE;
      +
      902  }
      +
      903 
      +
      904  // t in net_receive method is an argument to function and hence it should
      +
      905  // ne used instead of nt->_t which is current time of thread
      +
      906  if (varname == naming::NTHREAD_T_VARIABLE && !printing_net_receive) {
      +
      907  return std::string("nt->_") + naming::NTHREAD_T_VARIABLE;
      +
      908  }
      +
      909 
      +
      910  auto const iter =
      +
      911  std::find_if(info.neuron_global_variables.begin(),
      +
      912  info.neuron_global_variables.end(),
      +
      913  [&varname](auto const& entry) { return entry.first->get_name() == varname; });
      +
      914  if (iter != info.neuron_global_variables.end()) {
      +
      915  std::string ret;
      +
      916  if (use_instance) {
      +
      917  ret = "*(inst->";
      +
      918  }
      +
      919  ret.append(varname);
      +
      920  if (use_instance) {
      +
      921  ret.append(")");
      +
      922  }
      +
      923  return ret;
      +
      924  }
      +
      925 
      +
      926  // otherwise return original name
      +
      927  return varname;
      +
      928 }
      +
      929 
      930 
      -
      931 
      -
      932 /****************************************************************************************/
      -
      933 /* Main printing routines for code generation */
      -
      934 /****************************************************************************************/
      +
      931 /****************************************************************************************/
      +
      932 /* Main printing routines for code generation */
      +
      933 /****************************************************************************************/
      +
      934 
      935 
      -
      936 
      - -
      938  printer->add_newline();
      -
      939  printer->add_multi_line(R"CODE(
      -
      940  #include <math.h>
      -
      941  #include <stdio.h>
      -
      942  #include <stdlib.h>
      -
      943  #include <string.h>
      -
      944  )CODE");
      -
      945 }
      + +
      937  printer->add_newline();
      +
      938  printer->add_multi_line(R"CODE(
      +
      939  #include <math.h>
      +
      940  #include <stdio.h>
      +
      941  #include <stdlib.h>
      +
      942  #include <string.h>
      +
      943  )CODE");
      +
      944 }
      +
      945 
      946 
      -
      947 
      - -
      949  printer->add_newline();
      -
      950  printer->add_multi_line(R"CODE(
      -
      951  #include <coreneuron/gpu/nrn_acc_manager.hpp>
      -
      952  #include <coreneuron/mechanism/mech/mod2c_core_thread.hpp>
      -
      953  #include <coreneuron/mechanism/register_mech.hpp>
      -
      954  #include <coreneuron/nrnconf.h>
      -
      955  #include <coreneuron/nrniv/nrniv_decl.h>
      -
      956  #include <coreneuron/sim/multicore.hpp>
      -
      957  #include <coreneuron/sim/scopmath/newton_thread.hpp>
      -
      958  #include <coreneuron/utils/ivocvect.hpp>
      -
      959  #include <coreneuron/utils/nrnoc_aux.hpp>
      -
      960  #include <coreneuron/utils/randoms/nrnran123.h>
      -
      961  )CODE");
      -
      962  if (info.eigen_newton_solver_exist) {
      -
      963  printer->add_multi_line(nmodl::solvers::newton_hpp);
      -
      964  }
      -
      965  if (info.eigen_linear_solver_exist) {
      -
      966  if (std::accumulate(info.state_vars.begin(),
      -
      967  info.state_vars.end(),
      -
      968  0,
      -
      969  [](int l, const SymbolType& variable) {
      -
      970  return l += variable->get_length();
      -
      971  }) > 4) {
      -
      972  printer->add_multi_line(nmodl::solvers::crout_hpp);
      -
      973  } else {
      -
      974  printer->add_line("#include <Eigen/Dense>");
      -
      975  printer->add_line("#include <Eigen/LU>");
      -
      976  }
      -
      977  }
      -
      978 }
      + +
      948  printer->add_newline();
      +
      949  printer->add_multi_line(R"CODE(
      +
      950  #include <coreneuron/gpu/nrn_acc_manager.hpp>
      +
      951  #include <coreneuron/mechanism/mech/mod2c_core_thread.hpp>
      +
      952  #include <coreneuron/mechanism/register_mech.hpp>
      +
      953  #include <coreneuron/nrnconf.h>
      +
      954  #include <coreneuron/nrniv/nrniv_decl.h>
      +
      955  #include <coreneuron/sim/multicore.hpp>
      +
      956  #include <coreneuron/sim/scopmath/newton_thread.hpp>
      +
      957  #include <coreneuron/utils/ivocvect.hpp>
      +
      958  #include <coreneuron/utils/nrnoc_aux.hpp>
      +
      959  #include <coreneuron/utils/randoms/nrnran123.h>
      +
      960  )CODE");
      +
      961  if (info.eigen_newton_solver_exist) {
      +
      962  printer->add_multi_line(nmodl::solvers::newton_hpp);
      +
      963  }
      +
      964  if (info.eigen_linear_solver_exist) {
      +
      965  if (std::accumulate(info.state_vars.begin(),
      +
      966  info.state_vars.end(),
      +
      967  0,
      +
      968  [](int l, const SymbolType& variable) {
      +
      969  return l += variable->get_length();
      +
      970  }) > 4) {
      +
      971  printer->add_multi_line(nmodl::solvers::crout_hpp);
      +
      972  } else {
      +
      973  printer->add_line("#include <Eigen/Dense>");
      +
      974  printer->add_line("#include <Eigen/LU>");
      +
      975  }
      +
      976  }
      +
      977 }
      +
      978 
      979 
      -
      980 
      -
      981 void CodegenCoreneuronCppVisitor::print_sdlists_init(bool print_initializers) {
      -
      982  if (info.primes_size == 0) {
      -
      983  return;
      -
      984  }
      -
      985  const auto count_prime_variables = [](auto size, const SymbolType& symbol) {
      -
      986  return size += symbol->get_length();
      -
      987  };
      -
      988  const auto prime_variables_by_order_size =
      -
      989  std::accumulate(info.prime_variables_by_order.begin(),
      -
      990  info.prime_variables_by_order.end(),
      -
      991  0,
      -
      992  count_prime_variables);
      -
      993  if (info.primes_size != prime_variables_by_order_size) {
      -
      994  throw std::runtime_error{
      -
      995  fmt::format("primes_size = {} differs from prime_variables_by_order.size() = {}, "
      -
      996  "this should not happen.",
      -
      997  info.primes_size,
      -
      998  info.prime_variables_by_order.size())};
      -
      999  }
      -
      1000  auto const initializer_list = [&](auto const& primes, const char* prefix) -> std::string {
      -
      1001  if (!print_initializers) {
      -
      1002  return {};
      -
      1003  }
      -
      1004  std::string list{"{"};
      -
      1005  for (auto iter = primes.begin(); iter != primes.end(); ++iter) {
      -
      1006  auto const& prime = *iter;
      -
      1007  list.append(std::to_string(position_of_float_var(prefix + prime->get_name())));
      -
      1008  if (std::next(iter) != primes.end()) {
      -
      1009  list.append(", ");
      -
      1010  }
      -
      1011  }
      -
      1012  list.append("}");
      -
      1013  return list;
      -
      1014  };
      -
      1015  printer->fmt_line("int slist1[{}]{};",
      -
      1016  info.primes_size,
      -
      1017  initializer_list(info.prime_variables_by_order, ""));
      -
      1018  printer->fmt_line("int dlist1[{}]{};",
      -
      1019  info.primes_size,
      -
      1020  initializer_list(info.prime_variables_by_order, "D"));
      -
      1021  codegen_global_variables.push_back(make_symbol("slist1"));
      -
      1022  codegen_global_variables.push_back(make_symbol("dlist1"));
      -
      1023  // additional list for derivimplicit method
      -
      1024  if (info.derivimplicit_used()) {
      -
      1025  auto primes = program_symtab->get_variables_with_properties(NmodlType::prime_name);
      -
      1026  printer->fmt_line("int slist2[{}]{};", info.primes_size, initializer_list(primes, ""));
      -
      1027  codegen_global_variables.push_back(make_symbol("slist2"));
      -
      1028  }
      -
      1029 }
      +
      980 void CodegenCoreneuronCppVisitor::print_sdlists_init(bool print_initializers) {
      +
      981  if (info.primes_size == 0) {
      +
      982  return;
      +
      983  }
      +
      984  const auto count_prime_variables = [](auto size, const SymbolType& symbol) {
      +
      985  return size += symbol->get_length();
      +
      986  };
      +
      987  const auto prime_variables_by_order_size =
      +
      988  std::accumulate(info.prime_variables_by_order.begin(),
      +
      989  info.prime_variables_by_order.end(),
      +
      990  0,
      +
      991  count_prime_variables);
      +
      992  if (info.primes_size != prime_variables_by_order_size) {
      +
      993  throw std::runtime_error{
      +
      994  fmt::format("primes_size = {} differs from prime_variables_by_order.size() = {}, "
      +
      995  "this should not happen.",
      +
      996  info.primes_size,
      +
      997  info.prime_variables_by_order.size())};
      +
      998  }
      +
      999  auto const initializer_list = [&](auto const& primes, const char* prefix) -> std::string {
      +
      1000  if (!print_initializers) {
      +
      1001  return {};
      +
      1002  }
      +
      1003  std::string list{"{"};
      +
      1004  for (auto iter = primes.begin(); iter != primes.end(); ++iter) {
      +
      1005  auto const& prime = *iter;
      +
      1006  list.append(std::to_string(position_of_float_var(prefix + prime->get_name())));
      +
      1007  if (std::next(iter) != primes.end()) {
      +
      1008  list.append(", ");
      +
      1009  }
      +
      1010  }
      +
      1011  list.append("}");
      +
      1012  return list;
      +
      1013  };
      +
      1014  printer->fmt_line("int slist1[{}]{};",
      +
      1015  info.primes_size,
      +
      1016  initializer_list(info.prime_variables_by_order, ""));
      +
      1017  printer->fmt_line("int dlist1[{}]{};",
      +
      1018  info.primes_size,
      +
      1019  initializer_list(info.prime_variables_by_order, "D"));
      +
      1020  codegen_global_variables.push_back(make_symbol("slist1"));
      +
      1021  codegen_global_variables.push_back(make_symbol("dlist1"));
      +
      1022  // additional list for derivimplicit method
      +
      1023  if (info.derivimplicit_used()) {
      +
      1024  auto primes = program_symtab->get_variables_with_properties(NmodlType::prime_name);
      +
      1025  printer->fmt_line("int slist2[{}]{};", info.primes_size, initializer_list(primes, ""));
      +
      1026  codegen_global_variables.push_back(make_symbol("slist2"));
      +
      1027  }
      +
      1028 }
      +
      1029 
      1030 
      -
      1031 
      - -
      1033  return ParamVector{{"", "NrnThread*", "", "nt"},
      -
      1034  {"", fmt::format("{}*", instance_struct()), "", "inst"},
      -
      1035  {"", "int", "", "id"},
      -
      1036  {"", "int", "", "pnodecount"},
      -
      1037  {"", "double", "", "v"},
      -
      1038  {"const ", "Datum*", "", "indexes"},
      -
      1039  {"", "double*", "", "data"},
      -
      1040  {"", "ThreadDatum*", "", "thread"}};
      -
      1041 }
      -
      1042 
      -
      1043 
      -
      1044 /**
      -
      1045  * \details Variables required for type of ion, type of point process etc. are
      -
      1046  * of static int type. For the C++ backend type, it's ok to have
      -
      1047  * these variables as file scoped static variables.
      -
      1048  *
      -
      1049  * Initial values of state variables (h0) are also defined as static
      -
      1050  * variables. Note that the state could be ion variable and it could
      -
      1051  * be also range variable. Hence lookup into symbol table before.
      -
      1052  *
      -
      1053  * When model is not vectorized (shouldn't be the case in coreneuron)
      -
      1054  * the top local variables become static variables.
      -
      1055  *
      -
      1056  * Note that static variables are already initialized to 0. We do the
      -
      1057  * same for some variables to keep same code as neuron.
      -
      1058  */
      -
      1059 // NOLINTNEXTLINE(readability-function-cognitive-complexity)
      - -
      1061  const auto value_initialize = print_initializers ? "{}" : "";
      -
      1062 
      -
      1063  auto float_type = default_float_data_type();
      -
      1064  printer->add_newline(2);
      -
      1065  printer->add_line("/** all global variables */");
      -
      1066  printer->fmt_push_block("struct {}", global_struct());
      -
      1067 
      -
      1068  for (const auto& ion: info.ions) {
      -
      1069  auto name = fmt::format("{}_type", ion.name);
      -
      1070  printer->fmt_line("int {}{};", name, value_initialize);
      -
      1071  codegen_global_variables.push_back(make_symbol(name));
      -
      1072  }
      -
      1073 
      -
      1074  if (info.point_process) {
      -
      1075  printer->fmt_line("int point_type{};", value_initialize);
      -
      1076  codegen_global_variables.push_back(make_symbol("point_type"));
      -
      1077  }
      -
      1078 
      -
      1079  for (const auto& var: info.state_vars) {
      -
      1080  auto name = var->get_name() + "0";
      -
      1081  auto symbol = program_symtab->lookup(name);
      -
      1082  if (symbol == nullptr) {
      -
      1083  printer->fmt_line("{} {}{};", float_type, name, value_initialize);
      -
      1084  codegen_global_variables.push_back(make_symbol(name));
      -
      1085  }
      -
      1086  }
      -
      1087 
      -
      1088  // Neuron and Coreneuron adds "v" to global variables when vectorize
      -
      1089  // is false. But as v is always local variable and passed as argument,
      -
      1090  // we don't need to use global variable v
      -
      1091 
      -
      1092  auto& top_locals = info.top_local_variables;
      -
      1093  if (!info.vectorize && !top_locals.empty()) {
      -
      1094  for (const auto& var: top_locals) {
      -
      1095  auto name = var->get_name();
      -
      1096  auto length = var->get_length();
      -
      1097  if (var->is_array()) {
      -
      1098  printer->fmt_line("{} {}[{}] /* TODO init top-local-array */;",
      -
      1099  float_type,
      -
      1100  name,
      -
      1101  length);
      -
      1102  } else {
      -
      1103  printer->fmt_line("{} {} /* TODO init top-local */;", float_type, name);
      -
      1104  }
      -
      1105  codegen_global_variables.push_back(var);
      -
      1106  }
      -
      1107  }
      -
      1108 
      -
      1109  if (!info.thread_variables.empty()) {
      -
      1110  printer->fmt_line("int thread_data_in_use{};", value_initialize);
      -
      1111  printer->fmt_line("{} thread_data[{}] /* TODO init thread_data */;",
      -
      1112  float_type,
      -
      1113  info.thread_var_data_size);
      -
      1114  codegen_global_variables.push_back(make_symbol("thread_data_in_use"));
      -
      1115  auto symbol = make_symbol("thread_data");
      -
      1116  symbol->set_as_array(info.thread_var_data_size);
      -
      1117  codegen_global_variables.push_back(symbol);
      -
      1118  }
      -
      1119 
      -
      1120  // TODO: remove this entirely?
      -
      1121  printer->fmt_line("int reset{};", value_initialize);
      -
      1122  codegen_global_variables.push_back(make_symbol("reset"));
      -
      1123 
      -
      1124  printer->fmt_line("int mech_type{};", value_initialize);
      -
      1125  codegen_global_variables.push_back(make_symbol("mech_type"));
      -
      1126 
      -
      1127  for (const auto& var: info.global_variables) {
      -
      1128  auto name = var->get_name();
      -
      1129  auto length = var->get_length();
      -
      1130  if (var->is_array()) {
      -
      1131  printer->fmt_line("{} {}[{}] /* TODO init const-array */;", float_type, name, length);
      -
      1132  } else {
      -
      1133  double value{};
      -
      1134  if (auto const& value_ptr = var->get_value()) {
      -
      1135  value = *value_ptr;
      -
      1136  }
      -
      1137  printer->fmt_line("{} {}{};",
      -
      1138  float_type,
      -
      1139  name,
      -
      1140  print_initializers ? fmt::format("{{{:g}}}", value) : std::string{});
      -
      1141  }
      -
      1142  codegen_global_variables.push_back(var);
      -
      1143  }
      -
      1144 
      -
      1145  for (const auto& var: info.constant_variables) {
      -
      1146  auto const name = var->get_name();
      -
      1147  auto* const value_ptr = var->get_value().get();
      -
      1148  double const value{value_ptr ? *value_ptr : 0};
      -
      1149  printer->fmt_line("{} {}{};",
      -
      1150  float_type,
      -
      1151  name,
      -
      1152  print_initializers ? fmt::format("{{{:g}}}", value) : std::string{});
      -
      1153  codegen_global_variables.push_back(var);
      -
      1154  }
      -
      1155 
      -
      1156  print_sdlists_init(print_initializers);
      -
      1157 
      -
      1158  if (info.table_count > 0) {
      -
      1159  printer->fmt_line("double usetable{};", print_initializers ? "{1}" : "");
      -
      1160  codegen_global_variables.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      -
      1161 
      -
      1162  for (const auto& block: info.functions_with_table) {
      -
      1163  const auto& name = block->get_node_name();
      -
      1164  printer->fmt_line("{} tmin_{}{};", float_type, name, value_initialize);
      -
      1165  printer->fmt_line("{} mfac_{}{};", float_type, name, value_initialize);
      -
      1166  codegen_global_variables.push_back(make_symbol("tmin_" + name));
      -
      1167  codegen_global_variables.push_back(make_symbol("mfac_" + name));
      -
      1168  }
      -
      1169 
      -
      1170  for (const auto& variable: info.table_statement_variables) {
      -
      1171  auto const name = "t_" + variable->get_name();
      -
      1172  auto const num_values = variable->get_num_values();
      -
      1173  if (variable->is_array()) {
      -
      1174  int array_len = variable->get_length();
      -
      1175  printer->fmt_line(
      -
      1176  "{} {}[{}][{}]{};", float_type, name, array_len, num_values, value_initialize);
      -
      1177  } else {
      -
      1178  printer->fmt_line("{} {}[{}]{};", float_type, name, num_values, value_initialize);
      -
      1179  }
      -
      1180  codegen_global_variables.push_back(make_symbol(name));
      -
      1181  }
      -
      1182  }
      -
      1183 
      -
      1184  for (const auto& f: info.function_tables) {
      -
      1185  printer->fmt_line("void* _ptable_{}{{}};", f->get_node_name());
      -
      1186  codegen_global_variables.push_back(make_symbol("_ptable_" + f->get_node_name()));
      -
      1187  }
      -
      1188 
      -
      1189  if (info.vectorize && info.thread_data_index) {
      -
      1190  printer->fmt_line("ThreadDatum ext_call_thread[{}]{};",
      -
      1191  info.thread_data_index,
      -
      1192  value_initialize);
      -
      1193  codegen_global_variables.push_back(make_symbol("ext_call_thread"));
      -
      1194  }
      -
      1195 
      -
      1196  printer->pop_block(";");
      -
      1197 
      -
      1198  print_global_var_struct_assertions();
      -
      1199  print_global_var_struct_decl();
      -
      1200 }
      -
      1201 
      -
      1202 
      -
      1203 /**
      -
      1204  * Print structs that encapsulate information about scalar and
      -
      1205  * vector elements of type global and thread variables.
      -
      1206  */
      - -
      1208  auto variable_printer =
      -
      1209  [&](const std::vector<SymbolType>& variables, bool if_array, bool if_vector) {
      -
      1210  for (const auto& variable: variables) {
      -
      1211  if (variable->is_array() == if_array) {
      -
      1212  // false => do not use the instance struct, which is not
      -
      1213  // defined in the global declaration that we are printing
      -
      1214  auto name = get_variable_name(variable->get_name(), false);
      -
      1215  auto ename = add_escape_quote(variable->get_name() + "_" + info.mod_suffix);
      -
      1216  auto length = variable->get_length();
      -
      1217  if (if_vector) {
      -
      1218  printer->fmt_line("{{{}, {}, {}}},", ename, name, length);
      -
      1219  } else {
      -
      1220  printer->fmt_line("{{{}, &{}}},", ename, name);
      -
      1221  }
      -
      1222  }
      -
      1223  }
      -
      1224  };
      -
      1225 
      -
      1226  auto globals = info.global_variables;
      -
      1227  auto thread_vars = info.thread_variables;
      -
      1228 
      -
      1229  if (info.table_count > 0) {
      -
      1230  globals.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      -
      1231  }
      -
      1232 
      -
      1233  printer->add_newline(2);
      -
      1234  printer->add_line("/** connect global (scalar) variables to hoc -- */");
      -
      1235  printer->add_line("static DoubScal hoc_scalar_double[] = {");
      -
      1236  printer->increase_indent();
      -
      1237  variable_printer(globals, false, false);
      -
      1238  variable_printer(thread_vars, false, false);
      -
      1239  printer->add_line("{nullptr, nullptr}");
      -
      1240  printer->decrease_indent();
      -
      1241  printer->add_line("};");
      -
      1242 
      -
      1243  printer->add_newline(2);
      -
      1244  printer->add_line("/** connect global (array) variables to hoc -- */");
      -
      1245  printer->add_line("static DoubVec hoc_vector_double[] = {");
      -
      1246  printer->increase_indent();
      -
      1247  variable_printer(globals, true, true);
      -
      1248  variable_printer(thread_vars, true, true);
      -
      1249  printer->add_line("{nullptr, nullptr, 0}");
      -
      1250  printer->decrease_indent();
      -
      1251  printer->add_line("};");
      -
      1252 }
      -
      1253 
      -
      1254 
      -
      1255 /**
      -
      1256  * Return registration type for a given BEFORE/AFTER block
      -
      1257  * /param block A BEFORE/AFTER block being registered
      -
      1258  *
      -
      1259  * Depending on a block type i.e. BEFORE or AFTER and also type
      -
      1260  * of it's associated block i.e. BREAKPOINT, INITIAL, SOLVE and
      -
      1261  * STEP, the registration type (as an integer) is calculated.
      -
      1262  * These values are then interpreted by CoreNEURON internally.
      -
      1263  */
      -
      1264 static std::string get_register_type_for_ba_block(const ast::Block* block) {
      -
      1265  std::string register_type{};
      -
      1266  BAType ba_type{};
      -
      1267  /// before block have value 10 and after block 20
      -
      1268  if (block->is_before_block()) {
      -
      1269  // NOLINTNEXTLINE(cppcoreguidelines-avoid-magic-numbers,readability-magic-numbers)
      -
      1270  register_type = "BAType::Before";
      -
      1271  ba_type =
      -
      1272  dynamic_cast<const ast::BeforeBlock*>(block)->get_bablock()->get_type()->get_value();
      -
      1273  } else {
      -
      1274  // NOLINTNEXTLINE(cppcoreguidelines-avoid-magic-numbers,readability-magic-numbers)
      -
      1275  register_type = "BAType::After";
      -
      1276  ba_type =
      -
      1277  dynamic_cast<const ast::AfterBlock*>(block)->get_bablock()->get_type()->get_value();
      -
      1278  }
      -
      1279 
      -
      1280  /// associated blocks have different values (1 to 4) based on type.
      -
      1281  /// These values are based on neuron/coreneuron implementation details.
      -
      1282  if (ba_type == BATYPE_BREAKPOINT) {
      -
      1283  register_type += " + BAType::Breakpoint";
      -
      1284  } else if (ba_type == BATYPE_SOLVE) {
      -
      1285  register_type += " + BAType::Solve";
      -
      1286  } else if (ba_type == BATYPE_INITIAL) {
      -
      1287  register_type += " + BAType::Initial";
      -
      1288  } else if (ba_type == BATYPE_STEP) {
      -
      1289  register_type += " + BAType::Step";
      -
      1290  } else {
      -
      1291  throw std::runtime_error("Unhandled Before/After type encountered during code generation");
      -
      1292  }
      -
      1293  return register_type;
      -
      1294 }
      -
      1295 
      -
      1296 
      -
      1297 /**
      -
      1298  * \details Every mod file has register function to connect with the simulator.
      -
      1299  * Various information about mechanism and callbacks get registered with
      -
      1300  * the simulator using suffix_reg() function.
      -
      1301  *
      -
      1302  * Here are details:
      -
      1303  * - We should exclude that callback based on the solver, watch statements.
      -
      1304  * - If nrn_get_mechtype is < -1 means that mechanism is not used in the
      -
      1305  * context of neuron execution and hence could be ignored in coreneuron
      -
      1306  * execution.
      -
      1307  * - Ions are internally defined and their types can be queried similar to
      -
      1308  * other mechanisms.
      -
      1309  * - hoc_register_var may not be needed in the context of coreneuron
      -
      1310  * - We assume net receive buffer is on. This is because generated code is
      -
      1311  * compatible for cpu as well as gpu target.
      -
      1312  */
      -
      1313 // NOLINTNEXTLINE(readability-function-cognitive-complexity)
      - -
      1315  printer->add_newline(2);
      -
      1316  printer->add_line("/** register channel with the simulator */");
      -
      1317  printer->fmt_push_block("void _{}_reg()", info.mod_file);
      -
      1318 
      -
      1319  // type related information
      -
      1320  auto suffix = add_escape_quote(info.mod_suffix);
      -
      1321  printer->add_newline();
      -
      1322  printer->fmt_line("int mech_type = nrn_get_mechtype({});", suffix);
      -
      1323  printer->fmt_line("{} = mech_type;", get_variable_name("mech_type", false));
      -
      1324  printer->push_block("if (mech_type == -1)");
      -
      1325  printer->add_line("return;");
      -
      1326  printer->pop_block();
      -
      1327 
      -
      1328  printer->add_newline();
      -
      1329  printer->add_line("_nrn_layout_reg(mech_type, 0);"); // 0 for SoA
      -
      1330 
      -
      1331  // register mechanism
      -
      1332  const auto mech_arguments = register_mechanism_arguments();
      -
      1333  const auto number_of_thread_objects = num_thread_objects();
      -
      1334  if (info.point_process) {
      -
      1335  printer->fmt_line("point_register_mech({}, {}, {}, {});",
      -
      1336  mech_arguments,
      -
      1337  info.constructor_node ? method_name(naming::NRN_CONSTRUCTOR_METHOD)
      -
      1338  : "nullptr",
      -
      1339  info.destructor_node ? method_name(naming::NRN_DESTRUCTOR_METHOD)
      -
      1340  : "nullptr",
      -
      1341  number_of_thread_objects);
      -
      1342  } else {
      -
      1343  printer->fmt_line("register_mech({}, {});", mech_arguments, number_of_thread_objects);
      -
      1344  if (info.constructor_node) {
      -
      1345  printer->fmt_line("register_constructor({});",
      -
      1346  method_name(naming::NRN_CONSTRUCTOR_METHOD));
      -
      1347  }
      -
      1348  }
      -
      1349 
      -
      1350  // types for ion
      -
      1351  for (const auto& ion: info.ions) {
      -
      1352  printer->fmt_line("{} = nrn_get_mechtype({});",
      -
      1353  get_variable_name(ion.name + "_type", false),
      -
      1354  add_escape_quote(ion.name + "_ion"));
      -
      1355  }
      -
      1356  printer->add_newline();
      -
      1357 
      -
      1358  /*
      -
      1359  * Register callbacks for thread allocation and cleanup. Note that thread_data_index
      -
      1360  * represent total number of thread used minus 1 (i.e. index of last thread).
      -
      1361  */
      -
      1362  if (info.vectorize && (info.thread_data_index != 0)) {
      -
      1363  // false to avoid getting the copy from the instance structure
      -
      1364  printer->fmt_line("thread_mem_init({});", get_variable_name("ext_call_thread", false));
      -
      1365  }
      -
      1366 
      -
      1367  if (!info.thread_variables.empty()) {
      -
      1368  printer->fmt_line("{} = 0;", get_variable_name("thread_data_in_use"));
      -
      1369  }
      -
      1370 
      -
      1371  if (info.thread_callback_register) {
      -
      1372  printer->add_line("_nrn_thread_reg0(mech_type, thread_mem_cleanup);");
      -
      1373  printer->add_line("_nrn_thread_reg1(mech_type, thread_mem_init);");
      -
      1374  }
      -
      1375 
      -
      1376  if (info.emit_table_thread()) {
      -
      1377  auto name = method_name("check_table_thread");
      -
      1378  printer->fmt_line("_nrn_thread_table_reg(mech_type, {});", name);
      -
      1379  }
      -
      1380 
      -
      1381  // register read/write callbacks for pointers
      -
      1382  if (info.bbcore_pointer_used) {
      -
      1383  printer->add_line("hoc_reg_bbcore_read(mech_type, bbcore_read);");
      -
      1384  printer->add_line("hoc_reg_bbcore_write(mech_type, bbcore_write);");
      -
      1385  }
      -
      1386 
      -
      1387  // register size of double and int elements
      -
      1388  // clang-format off
      -
      1389  printer->add_line("hoc_register_prop_size(mech_type, float_variables_size(), int_variables_size());");
      -
      1390  // clang-format on
      -
      1391 
      -
      1392  // register semantics for index variables
      -
      1393  for (auto& semantic: info.semantics) {
      -
      1394  auto args =
      -
      1395  fmt::format("mech_type, {}, {}", semantic.index, add_escape_quote(semantic.name));
      -
      1396  printer->fmt_line("hoc_register_dparam_semantics({});", args);
      -
      1397  }
      -
      1398 
      -
      1399  if (info.is_watch_used()) {
      -
      1400  auto watch_fun = compute_method_name(BlockType::Watch);
      -
      1401  printer->fmt_line("hoc_register_watch_check({}, mech_type);", watch_fun);
      -
      1402  }
      -
      1403 
      -
      1404  if (info.write_concentration) {
      -
      1405  printer->add_line("nrn_writes_conc(mech_type, 0);");
      -
      1406  }
      -
      1407 
      -
      1408  // register various information for point process type
      -
      1409  if (info.net_event_used) {
      -
      1410  printer->add_line("add_nrn_has_net_event(mech_type);");
      -
      1411  }
      -
      1412  if (info.artificial_cell) {
      -
      1413  printer->fmt_line("add_nrn_artcell(mech_type, {});", info.tqitem_index);
      -
      1414  }
      -
      1415  if (net_receive_buffering_required()) {
      -
      1416  printer->fmt_line("hoc_register_net_receive_buffering({}, mech_type);",
      -
      1417  method_name("net_buf_receive"));
      -
      1418  }
      -
      1419  if (info.num_net_receive_parameters != 0) {
      -
      1420  auto net_recv_init_arg = "nullptr";
      -
      1421  if (info.net_receive_initial_node != nullptr) {
      -
      1422  net_recv_init_arg = "net_init";
      -
      1423  }
      -
      1424  printer->fmt_line("set_pnt_receive(mech_type, {}, {}, num_net_receive_args());",
      -
      1425  method_name("net_receive"),
      -
      1426  net_recv_init_arg);
      -
      1427  }
      -
      1428  if (info.for_netcon_used) {
      -
      1429  const auto index = position_of_int_var(naming::FOR_NETCON_VARIABLE);
      -
      1430  printer->fmt_line("add_nrn_fornetcons(mech_type, {});", index);
      -
      1431  }
      -
      1432 
      -
      1433  if (info.net_event_used || info.net_send_used) {
      -
      1434  printer->add_line("hoc_register_net_send_buffering(mech_type);");
      -
      1435  }
      -
      1436 
      -
      1437  /// register all before/after blocks
      -
      1438  for (size_t i = 0; i < info.before_after_blocks.size(); i++) {
      -
      1439  // register type and associated function name for the block
      -
      1440  const auto& block = info.before_after_blocks[i];
      -
      1441  std::string register_type = get_register_type_for_ba_block(block);
      -
      1442  std::string function_name = method_name(fmt::format("nrn_before_after_{}", i));
      -
      1443  printer->fmt_line("hoc_reg_ba(mech_type, {}, {});", function_name, register_type);
      -
      1444  }
      -
      1445 
      -
      1446  // register variables for hoc
      -
      1447  printer->add_line("hoc_register_var(hoc_scalar_double, hoc_vector_double, NULL);");
      -
      1448  printer->pop_block();
      -
      1449 }
      + +
      1032  return ParamVector{{"", "NrnThread*", "", "nt"},
      +
      1033  {"", fmt::format("{}*", instance_struct()), "", "inst"},
      +
      1034  {"", "int", "", "id"},
      +
      1035  {"", "int", "", "pnodecount"},
      +
      1036  {"", "double", "", "v"},
      +
      1037  {"const ", "Datum*", "", "indexes"},
      +
      1038  {"", "double*", "", "data"},
      +
      1039  {"", "ThreadDatum*", "", "thread"}};
      +
      1040 }
      +
      1041 
      +
      1042 
      +
      1043 /**
      +
      1044  * \details Variables required for type of ion, type of point process etc. are
      +
      1045  * of static int type. For the C++ backend type, it's ok to have
      +
      1046  * these variables as file scoped static variables.
      +
      1047  *
      +
      1048  * Initial values of state variables (h0) are also defined as static
      +
      1049  * variables. Note that the state could be ion variable and it could
      +
      1050  * be also range variable. Hence lookup into symbol table before.
      +
      1051  *
      +
      1052  * When model is not vectorized (shouldn't be the case in coreneuron)
      +
      1053  * the top local variables become static variables.
      +
      1054  *
      +
      1055  * Note that static variables are already initialized to 0. We do the
      +
      1056  * same for some variables to keep same code as neuron.
      +
      1057  */
      +
      1058 // NOLINTNEXTLINE(readability-function-cognitive-complexity)
      + +
      1060  const auto value_initialize = print_initializers ? "{}" : "";
      +
      1061 
      +
      1062  auto float_type = default_float_data_type();
      +
      1063  printer->add_newline(2);
      +
      1064  printer->add_line("/** all global variables */");
      +
      1065  printer->fmt_push_block("struct {}", global_struct());
      +
      1066 
      +
      1067  for (const auto& ion: info.ions) {
      +
      1068  auto name = fmt::format("{}_type", ion.name);
      +
      1069  printer->fmt_line("int {}{};", name, value_initialize);
      +
      1070  codegen_global_variables.push_back(make_symbol(name));
      +
      1071  }
      +
      1072 
      +
      1073  if (info.point_process) {
      +
      1074  printer->fmt_line("int point_type{};", value_initialize);
      +
      1075  codegen_global_variables.push_back(make_symbol("point_type"));
      +
      1076  }
      +
      1077 
      +
      1078  for (const auto& var: info.state_vars) {
      +
      1079  auto name = var->get_name() + "0";
      +
      1080  auto symbol = program_symtab->lookup(name);
      +
      1081  if (symbol == nullptr) {
      +
      1082  printer->fmt_line("{} {}{};", float_type, name, value_initialize);
      +
      1083  codegen_global_variables.push_back(make_symbol(name));
      +
      1084  }
      +
      1085  }
      +
      1086 
      +
      1087  // Neuron and Coreneuron adds "v" to global variables when vectorize
      +
      1088  // is false. But as v is always local variable and passed as argument,
      +
      1089  // we don't need to use global variable v
      +
      1090 
      +
      1091  auto& top_locals = info.top_local_variables;
      +
      1092  if (!info.vectorize && !top_locals.empty()) {
      +
      1093  for (const auto& var: top_locals) {
      +
      1094  auto name = var->get_name();
      +
      1095  auto length = var->get_length();
      +
      1096  if (var->is_array()) {
      +
      1097  printer->fmt_line("{} {}[{}] /* TODO init top-local-array */;",
      +
      1098  float_type,
      +
      1099  name,
      +
      1100  length);
      +
      1101  } else {
      +
      1102  printer->fmt_line("{} {} /* TODO init top-local */;", float_type, name);
      +
      1103  }
      +
      1104  codegen_global_variables.push_back(var);
      +
      1105  }
      +
      1106  }
      +
      1107 
      +
      1108  if (!info.thread_variables.empty()) {
      +
      1109  printer->fmt_line("int thread_data_in_use{};", value_initialize);
      +
      1110  printer->fmt_line("{} thread_data[{}] /* TODO init thread_data */;",
      +
      1111  float_type,
      +
      1112  info.thread_var_data_size);
      +
      1113  codegen_global_variables.push_back(make_symbol("thread_data_in_use"));
      +
      1114  auto symbol = make_symbol("thread_data");
      +
      1115  symbol->set_as_array(info.thread_var_data_size);
      +
      1116  codegen_global_variables.push_back(symbol);
      +
      1117  }
      +
      1118 
      +
      1119  // TODO: remove this entirely?
      +
      1120  printer->fmt_line("int reset{};", value_initialize);
      +
      1121  codegen_global_variables.push_back(make_symbol("reset"));
      +
      1122 
      +
      1123  printer->fmt_line("int mech_type{};", value_initialize);
      +
      1124  codegen_global_variables.push_back(make_symbol("mech_type"));
      +
      1125 
      +
      1126  for (const auto& var: info.global_variables) {
      +
      1127  auto name = var->get_name();
      +
      1128  auto length = var->get_length();
      +
      1129  if (var->is_array()) {
      +
      1130  printer->fmt_line("{} {}[{}] /* TODO init const-array */;", float_type, name, length);
      +
      1131  } else {
      +
      1132  double value{};
      +
      1133  if (auto const& value_ptr = var->get_value()) {
      +
      1134  value = *value_ptr;
      +
      1135  }
      +
      1136  printer->fmt_line("{} {}{};",
      +
      1137  float_type,
      +
      1138  name,
      +
      1139  print_initializers ? fmt::format("{{{:g}}}", value) : std::string{});
      +
      1140  }
      +
      1141  codegen_global_variables.push_back(var);
      +
      1142  }
      +
      1143 
      +
      1144  for (const auto& var: info.constant_variables) {
      +
      1145  auto const name = var->get_name();
      +
      1146  auto* const value_ptr = var->get_value().get();
      +
      1147  double const value{value_ptr ? *value_ptr : 0};
      +
      1148  printer->fmt_line("{} {}{};",
      +
      1149  float_type,
      +
      1150  name,
      +
      1151  print_initializers ? fmt::format("{{{:g}}}", value) : std::string{});
      +
      1152  codegen_global_variables.push_back(var);
      +
      1153  }
      +
      1154 
      +
      1155  print_sdlists_init(print_initializers);
      +
      1156 
      +
      1157  if (info.table_count > 0) {
      +
      1158  printer->fmt_line("double usetable{};", print_initializers ? "{1}" : "");
      +
      1159  codegen_global_variables.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      +
      1160 
      +
      1161  for (const auto& block: info.functions_with_table) {
      +
      1162  const auto& name = block->get_node_name();
      +
      1163  printer->fmt_line("{} tmin_{}{};", float_type, name, value_initialize);
      +
      1164  printer->fmt_line("{} mfac_{}{};", float_type, name, value_initialize);
      +
      1165  codegen_global_variables.push_back(make_symbol("tmin_" + name));
      +
      1166  codegen_global_variables.push_back(make_symbol("mfac_" + name));
      +
      1167  }
      +
      1168 
      +
      1169  for (const auto& variable: info.table_statement_variables) {
      +
      1170  auto const name = "t_" + variable->get_name();
      +
      1171  auto const num_values = variable->get_num_values();
      +
      1172  if (variable->is_array()) {
      +
      1173  int array_len = variable->get_length();
      +
      1174  printer->fmt_line(
      +
      1175  "{} {}[{}][{}]{};", float_type, name, array_len, num_values, value_initialize);
      +
      1176  } else {
      +
      1177  printer->fmt_line("{} {}[{}]{};", float_type, name, num_values, value_initialize);
      +
      1178  }
      +
      1179  codegen_global_variables.push_back(make_symbol(name));
      +
      1180  }
      +
      1181  }
      +
      1182 
      +
      1183  for (const auto& f: info.function_tables) {
      +
      1184  printer->fmt_line("void* _ptable_{}{{}};", f->get_node_name());
      +
      1185  codegen_global_variables.push_back(make_symbol("_ptable_" + f->get_node_name()));
      +
      1186  }
      +
      1187 
      +
      1188  if (info.vectorize && info.thread_data_index) {
      +
      1189  printer->fmt_line("ThreadDatum ext_call_thread[{}]{};",
      +
      1190  info.thread_data_index,
      +
      1191  value_initialize);
      +
      1192  codegen_global_variables.push_back(make_symbol("ext_call_thread"));
      +
      1193  }
      +
      1194 
      +
      1195  printer->pop_block(";");
      +
      1196 
      +
      1197  print_global_var_struct_assertions();
      +
      1198  print_global_var_struct_decl();
      +
      1199 }
      +
      1200 
      +
      1201 
      +
      1202 /**
      +
      1203  * Print structs that encapsulate information about scalar and
      +
      1204  * vector elements of type global and thread variables.
      +
      1205  */
      + +
      1207  auto variable_printer =
      +
      1208  [&](const std::vector<SymbolType>& variables, bool if_array, bool if_vector) {
      +
      1209  for (const auto& variable: variables) {
      +
      1210  if (variable->is_array() == if_array) {
      +
      1211  // false => do not use the instance struct, which is not
      +
      1212  // defined in the global declaration that we are printing
      +
      1213  auto name = get_variable_name(variable->get_name(), false);
      +
      1214  auto ename = add_escape_quote(variable->get_name() + "_" + info.mod_suffix);
      +
      1215  auto length = variable->get_length();
      +
      1216  if (if_vector) {
      +
      1217  printer->fmt_line("{{{}, {}, {}}},", ename, name, length);
      +
      1218  } else {
      +
      1219  printer->fmt_line("{{{}, &{}}},", ename, name);
      +
      1220  }
      +
      1221  }
      +
      1222  }
      +
      1223  };
      +
      1224 
      +
      1225  auto globals = info.global_variables;
      +
      1226  auto thread_vars = info.thread_variables;
      +
      1227 
      +
      1228  if (info.table_count > 0) {
      +
      1229  globals.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      +
      1230  }
      +
      1231 
      +
      1232  printer->add_newline(2);
      +
      1233  printer->add_line("/** connect global (scalar) variables to hoc -- */");
      +
      1234  printer->add_line("static DoubScal hoc_scalar_double[] = {");
      +
      1235  printer->increase_indent();
      +
      1236  variable_printer(globals, false, false);
      +
      1237  variable_printer(thread_vars, false, false);
      +
      1238  printer->add_line("{nullptr, nullptr}");
      +
      1239  printer->decrease_indent();
      +
      1240  printer->add_line("};");
      +
      1241 
      +
      1242  printer->add_newline(2);
      +
      1243  printer->add_line("/** connect global (array) variables to hoc -- */");
      +
      1244  printer->add_line("static DoubVec hoc_vector_double[] = {");
      +
      1245  printer->increase_indent();
      +
      1246  variable_printer(globals, true, true);
      +
      1247  variable_printer(thread_vars, true, true);
      +
      1248  printer->add_line("{nullptr, nullptr, 0}");
      +
      1249  printer->decrease_indent();
      +
      1250  printer->add_line("};");
      +
      1251 }
      +
      1252 
      +
      1253 
      +
      1254 /**
      +
      1255  * Return registration type for a given BEFORE/AFTER block
      +
      1256  * /param block A BEFORE/AFTER block being registered
      +
      1257  *
      +
      1258  * Depending on a block type i.e. BEFORE or AFTER and also type
      +
      1259  * of it's associated block i.e. BREAKPOINT, INITIAL, SOLVE and
      +
      1260  * STEP, the registration type (as an integer) is calculated.
      +
      1261  * These values are then interpreted by CoreNEURON internally.
      +
      1262  */
      +
      1263 static std::string get_register_type_for_ba_block(const ast::Block* block) {
      +
      1264  std::string register_type{};
      +
      1265  BAType ba_type{};
      +
      1266  /// before block have value 10 and after block 20
      +
      1267  if (block->is_before_block()) {
      +
      1268  // NOLINTNEXTLINE(cppcoreguidelines-avoid-magic-numbers,readability-magic-numbers)
      +
      1269  register_type = "BAType::Before";
      +
      1270  ba_type =
      +
      1271  dynamic_cast<const ast::BeforeBlock*>(block)->get_bablock()->get_type()->get_value();
      +
      1272  } else {
      +
      1273  // NOLINTNEXTLINE(cppcoreguidelines-avoid-magic-numbers,readability-magic-numbers)
      +
      1274  register_type = "BAType::After";
      +
      1275  ba_type =
      +
      1276  dynamic_cast<const ast::AfterBlock*>(block)->get_bablock()->get_type()->get_value();
      +
      1277  }
      +
      1278 
      +
      1279  /// associated blocks have different values (1 to 4) based on type.
      +
      1280  /// These values are based on neuron/coreneuron implementation details.
      +
      1281  if (ba_type == BATYPE_BREAKPOINT) {
      +
      1282  register_type += " + BAType::Breakpoint";
      +
      1283  } else if (ba_type == BATYPE_SOLVE) {
      +
      1284  register_type += " + BAType::Solve";
      +
      1285  } else if (ba_type == BATYPE_INITIAL) {
      +
      1286  register_type += " + BAType::Initial";
      +
      1287  } else if (ba_type == BATYPE_STEP) {
      +
      1288  register_type += " + BAType::Step";
      +
      1289  } else {
      +
      1290  throw std::runtime_error("Unhandled Before/After type encountered during code generation");
      +
      1291  }
      +
      1292  return register_type;
      +
      1293 }
      +
      1294 
      +
      1295 
      +
      1296 /**
      +
      1297  * \details Every mod file has register function to connect with the simulator.
      +
      1298  * Various information about mechanism and callbacks get registered with
      +
      1299  * the simulator using suffix_reg() function.
      +
      1300  *
      +
      1301  * Here are details:
      +
      1302  * - We should exclude that callback based on the solver, watch statements.
      +
      1303  * - If nrn_get_mechtype is < -1 means that mechanism is not used in the
      +
      1304  * context of neuron execution and hence could be ignored in coreneuron
      +
      1305  * execution.
      +
      1306  * - Ions are internally defined and their types can be queried similar to
      +
      1307  * other mechanisms.
      +
      1308  * - hoc_register_var may not be needed in the context of coreneuron
      +
      1309  * - We assume net receive buffer is on. This is because generated code is
      +
      1310  * compatible for cpu as well as gpu target.
      +
      1311  */
      +
      1312 // NOLINTNEXTLINE(readability-function-cognitive-complexity)
      + +
      1314  printer->add_newline(2);
      +
      1315  printer->add_line("/** register channel with the simulator */");
      +
      1316  printer->fmt_push_block("void _{}_reg()", info.mod_file);
      +
      1317 
      +
      1318  // type related information
      +
      1319  auto suffix = add_escape_quote(info.mod_suffix);
      +
      1320  printer->add_newline();
      +
      1321  printer->fmt_line("int mech_type = nrn_get_mechtype({});", suffix);
      +
      1322  printer->fmt_line("{} = mech_type;", get_variable_name("mech_type", false));
      +
      1323  printer->push_block("if (mech_type == -1)");
      +
      1324  printer->add_line("return;");
      +
      1325  printer->pop_block();
      +
      1326 
      +
      1327  printer->add_newline();
      +
      1328  printer->add_line("_nrn_layout_reg(mech_type, 0);"); // 0 for SoA
      +
      1329 
      +
      1330  // register mechanism
      +
      1331  const auto mech_arguments = register_mechanism_arguments();
      +
      1332  const auto number_of_thread_objects = num_thread_objects();
      +
      1333  if (info.point_process) {
      +
      1334  printer->fmt_line("point_register_mech({}, {}, {}, {});",
      +
      1335  mech_arguments,
      +
      1336  info.constructor_node ? method_name(naming::NRN_CONSTRUCTOR_METHOD)
      +
      1337  : "nullptr",
      +
      1338  info.destructor_node ? method_name(naming::NRN_DESTRUCTOR_METHOD)
      +
      1339  : "nullptr",
      +
      1340  number_of_thread_objects);
      +
      1341  } else {
      +
      1342  printer->fmt_line("register_mech({}, {});", mech_arguments, number_of_thread_objects);
      +
      1343  if (info.constructor_node) {
      +
      1344  printer->fmt_line("register_constructor({});",
      +
      1345  method_name(naming::NRN_CONSTRUCTOR_METHOD));
      +
      1346  }
      +
      1347  }
      +
      1348 
      +
      1349  // types for ion
      +
      1350  for (const auto& ion: info.ions) {
      +
      1351  printer->fmt_line("{} = nrn_get_mechtype({});",
      +
      1352  get_variable_name(ion.name + "_type", false),
      +
      1353  add_escape_quote(ion.name + "_ion"));
      +
      1354  }
      +
      1355  printer->add_newline();
      +
      1356 
      +
      1357  /*
      +
      1358  * Register callbacks for thread allocation and cleanup. Note that thread_data_index
      +
      1359  * represent total number of thread used minus 1 (i.e. index of last thread).
      +
      1360  */
      +
      1361  if (info.vectorize && (info.thread_data_index != 0)) {
      +
      1362  // false to avoid getting the copy from the instance structure
      +
      1363  printer->fmt_line("thread_mem_init({});", get_variable_name("ext_call_thread", false));
      +
      1364  }
      +
      1365 
      +
      1366  if (!info.thread_variables.empty()) {
      +
      1367  printer->fmt_line("{} = 0;", get_variable_name("thread_data_in_use"));
      +
      1368  }
      +
      1369 
      +
      1370  if (info.thread_callback_register) {
      +
      1371  printer->add_line("_nrn_thread_reg0(mech_type, thread_mem_cleanup);");
      +
      1372  printer->add_line("_nrn_thread_reg1(mech_type, thread_mem_init);");
      +
      1373  }
      +
      1374 
      +
      1375  if (info.emit_table_thread()) {
      +
      1376  auto name = method_name("check_table_thread");
      +
      1377  printer->fmt_line("_nrn_thread_table_reg(mech_type, {});", name);
      +
      1378  }
      +
      1379 
      +
      1380  // register read/write callbacks for pointers
      +
      1381  if (info.bbcore_pointer_used) {
      +
      1382  printer->add_line("hoc_reg_bbcore_read(mech_type, bbcore_read);");
      +
      1383  printer->add_line("hoc_reg_bbcore_write(mech_type, bbcore_write);");
      +
      1384  }
      +
      1385 
      +
      1386  // register size of double and int elements
      +
      1387  // clang-format off
      +
      1388  printer->add_line("hoc_register_prop_size(mech_type, float_variables_size(), int_variables_size());");
      +
      1389  // clang-format on
      +
      1390 
      +
      1391  // register semantics for index variables
      +
      1392  for (auto& semantic: info.semantics) {
      +
      1393  auto args =
      +
      1394  fmt::format("mech_type, {}, {}", semantic.index, add_escape_quote(semantic.name));
      +
      1395  printer->fmt_line("hoc_register_dparam_semantics({});", args);
      +
      1396  }
      +
      1397 
      +
      1398  if (info.is_watch_used()) {
      +
      1399  auto watch_fun = compute_method_name(BlockType::Watch);
      +
      1400  printer->fmt_line("hoc_register_watch_check({}, mech_type);", watch_fun);
      +
      1401  }
      +
      1402 
      +
      1403  if (info.write_concentration) {
      +
      1404  printer->add_line("nrn_writes_conc(mech_type, 0);");
      +
      1405  }
      +
      1406 
      +
      1407  // register various information for point process type
      +
      1408  if (info.net_event_used) {
      +
      1409  printer->add_line("add_nrn_has_net_event(mech_type);");
      +
      1410  }
      +
      1411  if (info.artificial_cell) {
      +
      1412  printer->fmt_line("add_nrn_artcell(mech_type, {});", info.tqitem_index);
      +
      1413  }
      +
      1414  if (net_receive_buffering_required()) {
      +
      1415  printer->fmt_line("hoc_register_net_receive_buffering({}, mech_type);",
      +
      1416  method_name("net_buf_receive"));
      +
      1417  }
      +
      1418  if (info.num_net_receive_parameters != 0) {
      +
      1419  auto net_recv_init_arg = "nullptr";
      +
      1420  if (info.net_receive_initial_node != nullptr) {
      +
      1421  net_recv_init_arg = "net_init";
      +
      1422  }
      +
      1423  printer->fmt_line("set_pnt_receive(mech_type, {}, {}, num_net_receive_args());",
      +
      1424  method_name("net_receive"),
      +
      1425  net_recv_init_arg);
      +
      1426  }
      +
      1427  if (info.for_netcon_used) {
      +
      1428  const auto index = position_of_int_var(naming::FOR_NETCON_VARIABLE);
      +
      1429  printer->fmt_line("add_nrn_fornetcons(mech_type, {});", index);
      +
      1430  }
      +
      1431 
      +
      1432  if (info.net_event_used || info.net_send_used) {
      +
      1433  printer->add_line("hoc_register_net_send_buffering(mech_type);");
      +
      1434  }
      +
      1435 
      +
      1436  /// register all before/after blocks
      +
      1437  for (size_t i = 0; i < info.before_after_blocks.size(); i++) {
      +
      1438  // register type and associated function name for the block
      +
      1439  const auto& block = info.before_after_blocks[i];
      +
      1440  std::string register_type = get_register_type_for_ba_block(block);
      +
      1441  std::string function_name = method_name(fmt::format("nrn_before_after_{}", i));
      +
      1442  printer->fmt_line("hoc_reg_ba(mech_type, {}, {});", function_name, register_type);
      +
      1443  }
      +
      1444 
      +
      1445  // register variables for hoc
      +
      1446  printer->add_line("hoc_register_var(hoc_scalar_double, hoc_vector_double, NULL);");
      +
      1447  printer->pop_block();
      +
      1448 }
      +
      1449 
      1450 
      -
      1451 
      - -
      1453  if (!info.thread_callback_register) {
      -
      1454  return;
      -
      1455  }
      -
      1456 
      -
      1457  // thread_mem_init callback
      -
      1458  printer->add_newline(2);
      -
      1459  printer->add_line("/** thread memory allocation callback */");
      -
      1460  printer->push_block("static void thread_mem_init(ThreadDatum* thread) ");
      -
      1461 
      -
      1462  if (info.vectorize && info.derivimplicit_used()) {
      -
      1463  printer->fmt_line("thread[dith{}()].pval = nullptr;", info.derivimplicit_list_num);
      -
      1464  }
      -
      1465  if (info.vectorize && (info.top_local_thread_size != 0)) {
      -
      1466  auto length = info.top_local_thread_size;
      -
      1467  auto allocation = fmt::format("(double*)mem_alloc({}, sizeof(double))", length);
      -
      1468  printer->fmt_line("thread[top_local_var_tid()].pval = {};", allocation);
      -
      1469  }
      -
      1470  if (info.thread_var_data_size != 0) {
      -
      1471  auto length = info.thread_var_data_size;
      -
      1472  auto thread_data = get_variable_name("thread_data");
      -
      1473  auto thread_data_in_use = get_variable_name("thread_data_in_use");
      -
      1474  auto allocation = fmt::format("(double*)mem_alloc({}, sizeof(double))", length);
      -
      1475  printer->fmt_push_block("if ({})", thread_data_in_use);
      -
      1476  printer->fmt_line("thread[thread_var_tid()].pval = {};", allocation);
      -
      1477  printer->chain_block("else");
      -
      1478  printer->fmt_line("thread[thread_var_tid()].pval = {};", thread_data);
      -
      1479  printer->fmt_line("{} = 1;", thread_data_in_use);
      -
      1480  printer->pop_block();
      -
      1481  }
      -
      1482  printer->pop_block();
      -
      1483  printer->add_newline(2);
      + +
      1452  if (!info.thread_callback_register) {
      +
      1453  return;
      +
      1454  }
      +
      1455 
      +
      1456  // thread_mem_init callback
      +
      1457  printer->add_newline(2);
      +
      1458  printer->add_line("/** thread memory allocation callback */");
      +
      1459  printer->push_block("static void thread_mem_init(ThreadDatum* thread) ");
      +
      1460 
      +
      1461  if (info.vectorize && info.derivimplicit_used()) {
      +
      1462  printer->fmt_line("thread[dith{}()].pval = nullptr;", info.derivimplicit_list_num);
      +
      1463  }
      +
      1464  if (info.vectorize && (info.top_local_thread_size != 0)) {
      +
      1465  auto length = info.top_local_thread_size;
      +
      1466  auto allocation = fmt::format("(double*)mem_alloc({}, sizeof(double))", length);
      +
      1467  printer->fmt_line("thread[top_local_var_tid()].pval = {};", allocation);
      +
      1468  }
      +
      1469  if (info.thread_var_data_size != 0) {
      +
      1470  auto length = info.thread_var_data_size;
      +
      1471  auto thread_data = get_variable_name("thread_data");
      +
      1472  auto thread_data_in_use = get_variable_name("thread_data_in_use");
      +
      1473  auto allocation = fmt::format("(double*)mem_alloc({}, sizeof(double))", length);
      +
      1474  printer->fmt_push_block("if ({})", thread_data_in_use);
      +
      1475  printer->fmt_line("thread[thread_var_tid()].pval = {};", allocation);
      +
      1476  printer->chain_block("else");
      +
      1477  printer->fmt_line("thread[thread_var_tid()].pval = {};", thread_data);
      +
      1478  printer->fmt_line("{} = 1;", thread_data_in_use);
      +
      1479  printer->pop_block();
      +
      1480  }
      +
      1481  printer->pop_block();
      +
      1482  printer->add_newline(2);
      +
      1483 
      1484 
      -
      1485 
      -
      1486  // thread_mem_cleanup callback
      -
      1487  printer->add_line("/** thread memory cleanup callback */");
      -
      1488  printer->push_block("static void thread_mem_cleanup(ThreadDatum* thread) ");
      -
      1489 
      -
      1490  // clang-format off
      -
      1491  if (info.vectorize && info.derivimplicit_used()) {
      -
      1492  int n = info.derivimplicit_list_num;
      -
      1493  printer->fmt_line("free(thread[dith{}()].pval);", n);
      -
      1494  printer->fmt_line("nrn_destroy_newtonspace(static_cast<NewtonSpace*>(*newtonspace{}(thread)));", n);
      -
      1495  }
      -
      1496  // clang-format on
      -
      1497 
      -
      1498  if (info.top_local_thread_size != 0) {
      -
      1499  auto line = "free(thread[top_local_var_tid()].pval);";
      -
      1500  printer->add_line(line);
      -
      1501  }
      -
      1502  if (info.thread_var_data_size != 0) {
      -
      1503  auto thread_data = get_variable_name("thread_data");
      -
      1504  auto thread_data_in_use = get_variable_name("thread_data_in_use");
      -
      1505  printer->fmt_push_block("if (thread[thread_var_tid()].pval == {})", thread_data);
      -
      1506  printer->fmt_line("{} = 0;", thread_data_in_use);
      -
      1507  printer->chain_block("else");
      -
      1508  printer->add_line("free(thread[thread_var_tid()].pval);");
      -
      1509  printer->pop_block();
      -
      1510  }
      -
      1511  printer->pop_block();
      -
      1512 }
      +
      1485  // thread_mem_cleanup callback
      +
      1486  printer->add_line("/** thread memory cleanup callback */");
      +
      1487  printer->push_block("static void thread_mem_cleanup(ThreadDatum* thread) ");
      +
      1488 
      +
      1489  // clang-format off
      +
      1490  if (info.vectorize && info.derivimplicit_used()) {
      +
      1491  int n = info.derivimplicit_list_num;
      +
      1492  printer->fmt_line("free(thread[dith{}()].pval);", n);
      +
      1493  printer->fmt_line("nrn_destroy_newtonspace(static_cast<NewtonSpace*>(*newtonspace{}(thread)));", n);
      +
      1494  }
      +
      1495  // clang-format on
      +
      1496 
      +
      1497  if (info.top_local_thread_size != 0) {
      +
      1498  auto line = "free(thread[top_local_var_tid()].pval);";
      +
      1499  printer->add_line(line);
      +
      1500  }
      +
      1501  if (info.thread_var_data_size != 0) {
      +
      1502  auto thread_data = get_variable_name("thread_data");
      +
      1503  auto thread_data_in_use = get_variable_name("thread_data_in_use");
      +
      1504  printer->fmt_push_block("if (thread[thread_var_tid()].pval == {})", thread_data);
      +
      1505  printer->fmt_line("{} = 0;", thread_data_in_use);
      +
      1506  printer->chain_block("else");
      +
      1507  printer->add_line("free(thread[thread_var_tid()].pval);");
      +
      1508  printer->pop_block();
      +
      1509  }
      +
      1510  printer->pop_block();
      +
      1511 }
      +
      1512 
      1513 
      -
      1514 
      - -
      1516  auto const value_initialize = print_initializers ? "{}" : "";
      -
      1517  auto int_type = default_int_data_type();
      -
      1518  printer->add_newline(2);
      -
      1519  printer->add_line("/** all mechanism instance variables and global variables */");
      -
      1520  printer->fmt_push_block("struct {} ", instance_struct());
      -
      1521 
      -
      1522  for (auto const& [var, type]: info.neuron_global_variables) {
      -
      1523  auto const name = var->get_name();
      -
      1524  printer->fmt_line("{}* {}{};",
      -
      1525  type,
      -
      1526  name,
      -
      1527  print_initializers ? fmt::format("{{&coreneuron::{}}}", name)
      -
      1528  : std::string{});
      -
      1529  }
      -
      1530  for (auto& var: codegen_float_variables) {
      -
      1531  const auto& name = var->get_name();
      -
      1532  auto type = get_range_var_float_type(var);
      -
      1533  auto qualifier = is_constant_variable(name) ? "const " : "";
      -
      1534  printer->fmt_line("{}{}* {}{};", qualifier, type, name, value_initialize);
      -
      1535  }
      -
      1536  for (auto& var: codegen_int_variables) {
      -
      1537  const auto& name = var.symbol->get_name();
      -
      1538  if (var.is_index || var.is_integer) {
      -
      1539  auto qualifier = var.is_constant ? "const " : "";
      -
      1540  printer->fmt_line("{}{}* {}{};", qualifier, int_type, name, value_initialize);
      -
      1541  } else {
      -
      1542  auto qualifier = var.is_constant ? "const " : "";
      -
      1543  auto type = var.is_vdata ? "void*" : default_float_data_type();
      -
      1544  printer->fmt_line("{}{}* {}{};", qualifier, type, name, value_initialize);
      -
      1545  }
      -
      1546  }
      -
      1547 
      -
      1548  printer->fmt_line("{}* {}{};",
      -
      1549  global_struct(),
      - -
      1551  print_initializers ? fmt::format("{{&{}}}", global_struct_instance())
      -
      1552  : std::string{});
      -
      1553  printer->pop_block(";");
      -
      1554 }
      + +
      1515  auto const value_initialize = print_initializers ? "{}" : "";
      +
      1516  auto int_type = default_int_data_type();
      +
      1517  printer->add_newline(2);
      +
      1518  printer->add_line("/** all mechanism instance variables and global variables */");
      +
      1519  printer->fmt_push_block("struct {} ", instance_struct());
      +
      1520 
      +
      1521  for (auto const& [var, type]: info.neuron_global_variables) {
      +
      1522  auto const name = var->get_name();
      +
      1523  printer->fmt_line("{}* {}{};",
      +
      1524  type,
      +
      1525  name,
      +
      1526  print_initializers ? fmt::format("{{&coreneuron::{}}}", name)
      +
      1527  : std::string{});
      +
      1528  }
      +
      1529  for (auto& var: codegen_float_variables) {
      +
      1530  const auto& name = var->get_name();
      +
      1531  auto type = get_range_var_float_type(var);
      +
      1532  auto qualifier = is_constant_variable(name) ? "const " : "";
      +
      1533  printer->fmt_line("{}{}* {}{};", qualifier, type, name, value_initialize);
      +
      1534  }
      +
      1535  for (auto& var: codegen_int_variables) {
      +
      1536  const auto& name = var.symbol->get_name();
      +
      1537  if (var.is_index || var.is_integer) {
      +
      1538  auto qualifier = var.is_constant ? "const " : "";
      +
      1539  printer->fmt_line("{}{}* {}{};", qualifier, int_type, name, value_initialize);
      +
      1540  } else {
      +
      1541  auto qualifier = var.is_constant ? "const " : "";
      +
      1542  auto type = var.is_vdata ? "void*" : default_float_data_type();
      +
      1543  printer->fmt_line("{}{}* {}{};", qualifier, type, name, value_initialize);
      +
      1544  }
      +
      1545  }
      +
      1546 
      +
      1547  printer->fmt_line("{}* {}{};",
      +
      1548  global_struct(),
      + +
      1550  print_initializers ? fmt::format("{{&{}}}", global_struct_instance())
      +
      1551  : std::string{});
      +
      1552  printer->pop_block(";");
      +
      1553 }
      +
      1554 
      1555 
      -
      1556 
      - -
      1558  if (!ion_variable_struct_required()) {
      -
      1559  return;
      -
      1560  }
      -
      1561  printer->add_newline(2);
      -
      1562  printer->add_line("/** ion write variables */");
      -
      1563  printer->push_block("struct IonCurVar");
      -
      1564 
      -
      1565  std::string float_type = default_float_data_type();
      -
      1566  std::vector<std::string> members;
      -
      1567 
      -
      1568  for (auto& ion: info.ions) {
      -
      1569  for (auto& var: ion.writes) {
      -
      1570  printer->fmt_line("{} {};", float_type, var);
      -
      1571  members.push_back(var);
      -
      1572  }
      -
      1573  }
      -
      1574  for (auto& var: info.currents) {
      -
      1575  if (!info.is_ion_variable(var)) {
      -
      1576  printer->fmt_line("{} {};", float_type, var);
      -
      1577  members.push_back(var);
      -
      1578  }
      -
      1579  }
      -
      1580 
      -
      1581  print_ion_var_constructor(members);
      -
      1582 
      -
      1583  printer->pop_block(";");
      -
      1584 }
      + +
      1557  if (!ion_variable_struct_required()) {
      +
      1558  return;
      +
      1559  }
      +
      1560  printer->add_newline(2);
      +
      1561  printer->add_line("/** ion write variables */");
      +
      1562  printer->push_block("struct IonCurVar");
      +
      1563 
      +
      1564  std::string float_type = default_float_data_type();
      +
      1565  std::vector<std::string> members;
      +
      1566 
      +
      1567  for (auto& ion: info.ions) {
      +
      1568  for (auto& var: ion.writes) {
      +
      1569  printer->fmt_line("{} {};", float_type, var);
      +
      1570  members.push_back(var);
      +
      1571  }
      +
      1572  }
      +
      1573  for (auto& var: info.currents) {
      +
      1574  if (!info.is_ion_variable(var)) {
      +
      1575  printer->fmt_line("{} {};", float_type, var);
      +
      1576  members.push_back(var);
      +
      1577  }
      +
      1578  }
      +
      1579 
      +
      1580  print_ion_var_constructor(members);
      +
      1581 
      +
      1582  printer->pop_block(";");
      +
      1583 }
      +
      1584 
      1585 
      -
      1586 
      - -
      1588  const std::vector<std::string>& members) {
      -
      1589  // constructor
      -
      1590  printer->add_newline();
      -
      1591  printer->add_indent();
      -
      1592  printer->add_text("IonCurVar() : ");
      -
      1593  for (int i = 0; i < members.size(); i++) {
      -
      1594  printer->fmt_text("{}(0)", members[i]);
      -
      1595  if (i + 1 < members.size()) {
      -
      1596  printer->add_text(", ");
      -
      1597  }
      -
      1598  }
      -
      1599  printer->add_text(" {}");
      -
      1600  printer->add_newline();
      -
      1601 }
      + +
      1587  const std::vector<std::string>& members) {
      +
      1588  // constructor
      +
      1589  printer->add_newline();
      +
      1590  printer->add_indent();
      +
      1591  printer->add_text("IonCurVar() : ");
      +
      1592  for (int i = 0; i < members.size(); i++) {
      +
      1593  printer->fmt_text("{}(0)", members[i]);
      +
      1594  if (i + 1 < members.size()) {
      +
      1595  printer->add_text(", ");
      +
      1596  }
      +
      1597  }
      +
      1598  printer->add_text(" {}");
      +
      1599  printer->add_newline();
      +
      1600 }
      +
      1601 
      1602 
      -
      1603 
      - -
      1605  printer->add_line("IonCurVar ionvar;");
      -
      1606 }
      + +
      1604  printer->add_line("IonCurVar ionvar;");
      +
      1605 }
      +
      1606 
      1607 
      -
      1608 
      - -
      1610  // nothing for cpu
      -
      1611 }
      + +
      1609  // nothing for cpu
      +
      1610 }
      +
      1611 
      1612 
      -
      1613 
      - -
      1615  auto type = float_data_type();
      -
      1616  printer->add_newline(2);
      -
      1617  printer->add_line("/** allocate and setup array for range variable */");
      -
      1618  printer->fmt_push_block("static inline {}* setup_range_variable(double* variable, int n)",
      -
      1619  type);
      -
      1620  printer->fmt_line("{0}* data = ({0}*) mem_alloc(n, sizeof({0}));", type);
      -
      1621  printer->push_block("for(size_t i = 0; i < n; i++)");
      -
      1622  printer->add_line("data[i] = variable[i];");
      -
      1623  printer->pop_block();
      -
      1624  printer->add_line("return data;");
      -
      1625  printer->pop_block();
      -
      1626 }
      -
      1627 
      -
      1628 
      -
      1629 /**
      -
      1630  * \details If floating point type like "float" is specified on command line then
      -
      1631  * we can't turn all variables to new type. This is because certain variables
      -
      1632  * are pointers to internal variables (e.g. ions). Hence, we check if given
      -
      1633  * variable can be safely converted to new type. If so, return new type.
      -
      1634  */
      - -
      1636  // clang-format off
      -
      1637  auto with = NmodlType::read_ion_var
      -
      1638  | NmodlType::write_ion_var
      -
      1639  | NmodlType::pointer_var
      -
      1640  | NmodlType::bbcore_pointer_var
      -
      1641  | NmodlType::extern_neuron_variable;
      -
      1642  // clang-format on
      -
      1643  bool need_default_type = symbol->has_any_property(with);
      -
      1644  if (need_default_type) {
      -
      1645  return default_float_data_type();
      -
      1646  }
      -
      1647  return float_data_type();
      -
      1648 }
      + +
      1614  auto type = float_data_type();
      +
      1615  printer->add_newline(2);
      +
      1616  printer->add_line("/** allocate and setup array for range variable */");
      +
      1617  printer->fmt_push_block("static inline {}* setup_range_variable(double* variable, int n)",
      +
      1618  type);
      +
      1619  printer->fmt_line("{0}* data = ({0}*) mem_alloc(n, sizeof({0}));", type);
      +
      1620  printer->push_block("for(size_t i = 0; i < n; i++)");
      +
      1621  printer->add_line("data[i] = variable[i];");
      +
      1622  printer->pop_block();
      +
      1623  printer->add_line("return data;");
      +
      1624  printer->pop_block();
      +
      1625 }
      +
      1626 
      +
      1627 
      +
      1628 /**
      +
      1629  * \details If floating point type like "float" is specified on command line then
      +
      1630  * we can't turn all variables to new type. This is because certain variables
      +
      1631  * are pointers to internal variables (e.g. ions). Hence, we check if given
      +
      1632  * variable can be safely converted to new type. If so, return new type.
      +
      1633  */
      + +
      1635  // clang-format off
      +
      1636  auto with = NmodlType::read_ion_var
      +
      1637  | NmodlType::write_ion_var
      +
      1638  | NmodlType::pointer_var
      +
      1639  | NmodlType::bbcore_pointer_var
      +
      1640  | NmodlType::extern_neuron_variable;
      +
      1641  // clang-format on
      +
      1642  bool need_default_type = symbol->has_any_property(with);
      +
      1643  if (need_default_type) {
      +
      1644  return default_float_data_type();
      +
      1645  }
      +
      1646  return float_data_type();
      +
      1647 }
      +
      1648 
      1649 
      -
      1650 
      - -
      1652  if (range_variable_setup_required()) {
      -
      1653  print_setup_range_variable();
      -
      1654  }
      -
      1655 
      -
      1656  printer->add_newline();
      -
      1657  printer->add_line("// Allocate instance structure");
      -
      1658  printer->fmt_push_block("static void {}(NrnThread* nt, Memb_list* ml, int type)",
      - -
      1660  printer->add_line("assert(!ml->instance);");
      -
      1661  printer->add_line("assert(!ml->global_variables);");
      -
      1662  printer->add_line("assert(ml->global_variables_size == 0);");
      -
      1663  printer->fmt_line("auto* const inst = new {}{{}};", instance_struct());
      -
      1664  printer->fmt_line("assert(inst->{} == &{});",
      - -
      1666  global_struct_instance());
      -
      1667  printer->add_line("ml->instance = inst;");
      -
      1668  printer->fmt_line("ml->global_variables = inst->{};", naming::INST_GLOBAL_MEMBER);
      -
      1669  printer->fmt_line("ml->global_variables_size = sizeof({});", global_struct());
      -
      1670  printer->pop_block();
      -
      1671  printer->add_newline();
      -
      1672 
      -
      1673  auto const cast_inst_and_assert_validity = [&]() {
      -
      1674  printer->fmt_line("auto* const inst = static_cast<{}*>(ml->instance);", instance_struct());
      -
      1675  printer->add_line("assert(inst);");
      -
      1676  printer->fmt_line("assert(inst->{});", naming::INST_GLOBAL_MEMBER);
      -
      1677  printer->fmt_line("assert(inst->{} == &{});",
      - -
      1679  global_struct_instance());
      -
      1680  printer->fmt_line("assert(inst->{} == ml->global_variables);", naming::INST_GLOBAL_MEMBER);
      -
      1681  printer->fmt_line("assert(ml->global_variables_size == sizeof({}));", global_struct());
      -
      1682  };
      -
      1683 
      -
      1684  // Must come before print_instance_struct_copy_to_device and
      -
      1685  // print_instance_struct_delete_from_device
      -
      1686  print_instance_struct_transfer_routine_declarations();
      -
      1687 
      -
      1688  printer->add_line("// Deallocate the instance structure");
      -
      1689  printer->fmt_push_block("static void {}(NrnThread* nt, Memb_list* ml, int type)",
      - -
      1691  cast_inst_and_assert_validity();
      -
      1692 
      -
      1693  // delete random streams
      -
      1694  if (info.random_variables.size()) {
      -
      1695  printer->add_line("int pnodecount = ml->_nodecount_padded;");
      -
      1696  printer->add_line("int nodecount = ml->nodecount;");
      -
      1697  printer->add_line("Datum* indexes = ml->pdata;");
      -
      1698  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      1699  for (const auto& var: info.random_variables) {
      -
      1700  const auto& name = get_variable_name(var->get_name());
      -
      1701  printer->fmt_line("nrnran123_deletestream((nrnran123_State*){});", name);
      -
      1702  }
      -
      1703  printer->pop_block();
      -
      1704  }
      -
      1705  print_instance_struct_delete_from_device();
      -
      1706  printer->add_multi_line(R"CODE(
      -
      1707  delete inst;
      -
      1708  ml->instance = nullptr;
      -
      1709  ml->global_variables = nullptr;
      -
      1710  ml->global_variables_size = 0;
      -
      1711  )CODE");
      -
      1712  printer->pop_block();
      -
      1713  printer->add_newline();
      + +
      1651  if (range_variable_setup_required()) {
      +
      1652  print_setup_range_variable();
      +
      1653  }
      +
      1654 
      +
      1655  printer->add_newline();
      +
      1656  printer->add_line("// Allocate instance structure");
      +
      1657  printer->fmt_push_block("static void {}(NrnThread* nt, Memb_list* ml, int type)",
      + +
      1659  printer->add_line("assert(!ml->instance);");
      +
      1660  printer->add_line("assert(!ml->global_variables);");
      +
      1661  printer->add_line("assert(ml->global_variables_size == 0);");
      +
      1662  printer->fmt_line("auto* const inst = new {}{{}};", instance_struct());
      +
      1663  printer->fmt_line("assert(inst->{} == &{});",
      + +
      1665  global_struct_instance());
      +
      1666  printer->add_line("ml->instance = inst;");
      +
      1667  printer->fmt_line("ml->global_variables = inst->{};", naming::INST_GLOBAL_MEMBER);
      +
      1668  printer->fmt_line("ml->global_variables_size = sizeof({});", global_struct());
      +
      1669  printer->pop_block();
      +
      1670  printer->add_newline();
      +
      1671 
      +
      1672  auto const cast_inst_and_assert_validity = [&]() {
      +
      1673  printer->fmt_line("auto* const inst = static_cast<{}*>(ml->instance);", instance_struct());
      +
      1674  printer->add_line("assert(inst);");
      +
      1675  printer->fmt_line("assert(inst->{});", naming::INST_GLOBAL_MEMBER);
      +
      1676  printer->fmt_line("assert(inst->{} == &{});",
      + +
      1678  global_struct_instance());
      +
      1679  printer->fmt_line("assert(inst->{} == ml->global_variables);", naming::INST_GLOBAL_MEMBER);
      +
      1680  printer->fmt_line("assert(ml->global_variables_size == sizeof({}));", global_struct());
      +
      1681  };
      +
      1682 
      +
      1683  // Must come before print_instance_struct_copy_to_device and
      +
      1684  // print_instance_struct_delete_from_device
      +
      1685  print_instance_struct_transfer_routine_declarations();
      +
      1686 
      +
      1687  printer->add_line("// Deallocate the instance structure");
      +
      1688  printer->fmt_push_block("static void {}(NrnThread* nt, Memb_list* ml, int type)",
      + +
      1690  cast_inst_and_assert_validity();
      +
      1691 
      +
      1692  // delete random streams
      +
      1693  if (info.random_variables.size()) {
      +
      1694  printer->add_line("int pnodecount = ml->_nodecount_padded;");
      +
      1695  printer->add_line("int nodecount = ml->nodecount;");
      +
      1696  printer->add_line("Datum* indexes = ml->pdata;");
      +
      1697  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      1698  for (const auto& var: info.random_variables) {
      +
      1699  const auto& name = get_variable_name(var->get_name());
      +
      1700  printer->fmt_line("nrnran123_deletestream((nrnran123_State*){});", name);
      +
      1701  }
      +
      1702  printer->pop_block();
      +
      1703  }
      +
      1704  print_instance_struct_delete_from_device();
      +
      1705  printer->add_multi_line(R"CODE(
      +
      1706  delete inst;
      +
      1707  ml->instance = nullptr;
      +
      1708  ml->global_variables = nullptr;
      +
      1709  ml->global_variables_size = 0;
      +
      1710  )CODE");
      +
      1711  printer->pop_block();
      +
      1712  printer->add_newline();
      +
      1713 
      1714 
      -
      1715 
      -
      1716  printer->add_line("/** initialize mechanism instance variables */");
      -
      1717  printer->push_block("static inline void setup_instance(NrnThread* nt, Memb_list* ml)");
      -
      1718  cast_inst_and_assert_validity();
      -
      1719 
      -
      1720  std::string stride;
      -
      1721  printer->add_line("int pnodecount = ml->_nodecount_padded;");
      -
      1722  stride = "*pnodecount";
      -
      1723 
      -
      1724  printer->add_line("Datum* indexes = ml->pdata;");
      -
      1725 
      -
      1726  auto const float_type = default_float_data_type();
      -
      1727 
      -
      1728  int id = 0;
      -
      1729  std::vector<std::string> ptr_members{naming::INST_GLOBAL_MEMBER};
      -
      1730  for (auto const& [var, type]: info.neuron_global_variables) {
      -
      1731  ptr_members.push_back(var->get_name());
      -
      1732  }
      -
      1733  ptr_members.reserve(ptr_members.size() + codegen_float_variables.size() +
      -
      1734  codegen_int_variables.size());
      -
      1735  for (auto& var: codegen_float_variables) {
      -
      1736  auto name = var->get_name();
      -
      1737  auto range_var_type = get_range_var_float_type(var);
      -
      1738  if (float_type == range_var_type) {
      -
      1739  auto const variable = fmt::format("ml->data+{}{}", id, stride);
      -
      1740  printer->fmt_line("inst->{} = {};", name, variable);
      -
      1741  } else {
      -
      1742  // TODO what MOD file exercises this?
      -
      1743  printer->fmt_line("inst->{} = setup_range_variable(ml->data+{}{}, pnodecount);",
      -
      1744  name,
      -
      1745  id,
      -
      1746  stride);
      -
      1747  }
      -
      1748  ptr_members.push_back(std::move(name));
      -
      1749  id += var->get_length();
      -
      1750  }
      -
      1751 
      -
      1752  for (auto& var: codegen_int_variables) {
      -
      1753  auto name = var.symbol->get_name();
      -
      1754  auto const variable = [&var]() {
      -
      1755  if (var.is_index || var.is_integer) {
      -
      1756  return "ml->pdata";
      -
      1757  } else if (var.is_vdata) {
      -
      1758  return "nt->_vdata";
      -
      1759  } else {
      -
      1760  return "nt->_data";
      -
      1761  }
      -
      1762  }();
      -
      1763  printer->fmt_line("inst->{} = {};", name, variable);
      -
      1764  ptr_members.push_back(std::move(name));
      -
      1765  }
      -
      1766  print_instance_struct_copy_to_device();
      -
      1767  printer->pop_block(); // setup_instance
      -
      1768  printer->add_newline();
      -
      1769 
      -
      1770  print_instance_struct_transfer_routines(ptr_members);
      -
      1771 }
      +
      1715  printer->add_line("/** initialize mechanism instance variables */");
      +
      1716  printer->push_block("static inline void setup_instance(NrnThread* nt, Memb_list* ml)");
      +
      1717  cast_inst_and_assert_validity();
      +
      1718 
      +
      1719  std::string stride;
      +
      1720  printer->add_line("int pnodecount = ml->_nodecount_padded;");
      +
      1721  stride = "*pnodecount";
      +
      1722 
      +
      1723  printer->add_line("Datum* indexes = ml->pdata;");
      +
      1724 
      +
      1725  auto const float_type = default_float_data_type();
      +
      1726 
      +
      1727  int id = 0;
      +
      1728  std::vector<std::string> ptr_members{naming::INST_GLOBAL_MEMBER};
      +
      1729  for (auto const& [var, type]: info.neuron_global_variables) {
      +
      1730  ptr_members.push_back(var->get_name());
      +
      1731  }
      +
      1732  ptr_members.reserve(ptr_members.size() + codegen_float_variables.size() +
      +
      1733  codegen_int_variables.size());
      +
      1734  for (auto& var: codegen_float_variables) {
      +
      1735  auto name = var->get_name();
      +
      1736  auto range_var_type = get_range_var_float_type(var);
      +
      1737  if (float_type == range_var_type) {
      +
      1738  auto const variable = fmt::format("ml->data+{}{}", id, stride);
      +
      1739  printer->fmt_line("inst->{} = {};", name, variable);
      +
      1740  } else {
      +
      1741  // TODO what MOD file exercises this?
      +
      1742  printer->fmt_line("inst->{} = setup_range_variable(ml->data+{}{}, pnodecount);",
      +
      1743  name,
      +
      1744  id,
      +
      1745  stride);
      +
      1746  }
      +
      1747  ptr_members.push_back(std::move(name));
      +
      1748  id += var->get_length();
      +
      1749  }
      +
      1750 
      +
      1751  for (auto& var: codegen_int_variables) {
      +
      1752  auto name = var.symbol->get_name();
      +
      1753  auto const variable = [&var]() {
      +
      1754  if (var.is_index || var.is_integer) {
      +
      1755  return "ml->pdata";
      +
      1756  } else if (var.is_vdata) {
      +
      1757  return "nt->_vdata";
      +
      1758  } else {
      +
      1759  return "nt->_data";
      +
      1760  }
      +
      1761  }();
      +
      1762  printer->fmt_line("inst->{} = {};", name, variable);
      +
      1763  ptr_members.push_back(std::move(name));
      +
      1764  }
      +
      1765  print_instance_struct_copy_to_device();
      +
      1766  printer->pop_block(); // setup_instance
      +
      1767  printer->add_newline();
      +
      1768 
      +
      1769  print_instance_struct_transfer_routines(ptr_members);
      +
      1770 }
      +
      1771 
      1772 
      -
      1773 
      - -
      1775  if (info.artificial_cell) {
      -
      1776  printer->add_line("double v = 0.0;");
      -
      1777  } else {
      -
      1778  printer->add_line("int node_id = node_index[id];");
      -
      1779  printer->add_line("double v = voltage[node_id];");
      -
      1780  print_v_unused();
      -
      1781  }
      -
      1782 
      -
      1783  if (ion_variable_struct_required()) {
      -
      1784  printer->add_line("IonCurVar ionvar;");
      -
      1785  }
      -
      1786 
      -
      1787  // read ion statements
      -
      1788  auto read_statements = ion_read_statements(BlockType::Initial);
      -
      1789  for (auto& statement: read_statements) {
      -
      1790  printer->add_line(statement);
      -
      1791  }
      -
      1792 
      -
      1793  print_rename_state_vars();
      -
      1794 
      -
      1795  // initial block
      -
      1796  if (node != nullptr) {
      -
      1797  const auto& block = node->get_statement_block();
      -
      1798  print_statement_block(*block, false, false);
      -
      1799  }
      -
      1800 
      -
      1801  // write ion statements
      -
      1802  auto write_statements = ion_write_statements(BlockType::Initial);
      -
      1803  for (auto& statement: write_statements) {
      -
      1804  auto text = process_shadow_update_statement(statement, BlockType::Initial);
      -
      1805  printer->add_line(text);
      -
      1806  }
      -
      1807 }
      + +
      1774  if (info.artificial_cell) {
      +
      1775  printer->add_line("double v = 0.0;");
      +
      1776  } else {
      +
      1777  printer->add_line("int node_id = node_index[id];");
      +
      1778  printer->add_line("double v = voltage[node_id];");
      +
      1779  print_v_unused();
      +
      1780  }
      +
      1781 
      +
      1782  if (ion_variable_struct_required()) {
      +
      1783  printer->add_line("IonCurVar ionvar;");
      +
      1784  }
      +
      1785 
      +
      1786  // read ion statements
      +
      1787  auto read_statements = ion_read_statements(BlockType::Initial);
      +
      1788  for (auto& statement: read_statements) {
      +
      1789  printer->add_line(statement);
      +
      1790  }
      +
      1791 
      +
      1792  print_rename_state_vars();
      +
      1793 
      +
      1794  // initial block
      +
      1795  if (node != nullptr) {
      +
      1796  const auto& block = node->get_statement_block();
      +
      1797  print_statement_block(*block, false, false);
      +
      1798  }
      +
      1799 
      +
      1800  // write ion statements
      +
      1801  auto write_statements = ion_write_statements(BlockType::Initial);
      +
      1802  for (auto& statement: write_statements) {
      +
      1803  auto text = process_shadow_update_statement(statement, BlockType::Initial);
      +
      1804  printer->add_line(text);
      +
      1805  }
      +
      1806 }
      +
      1807 
      1808 
      -
      1809 
      - -
      1811  BlockType type,
      -
      1812  const std::string& function_name) {
      -
      1813  std::string method;
      -
      1814  if (function_name.empty()) {
      -
      1815  method = compute_method_name(type);
      -
      1816  } else {
      -
      1817  method = function_name;
      -
      1818  }
      -
      1819  auto args = "NrnThread* nt, Memb_list* ml, int type";
      -
      1820 
      -
      1821  // watch statement function doesn't have type argument
      -
      1822  if (type == BlockType::Watch) {
      -
      1823  args = "NrnThread* nt, Memb_list* ml";
      -
      1824  }
      -
      1825 
      -
      1826  print_global_method_annotation();
      -
      1827  printer->fmt_push_block("void {}({})", method, args);
      -
      1828  if (type != BlockType::Destructor && type != BlockType::Constructor) {
      -
      1829  // We do not (currently) support DESTRUCTOR and CONSTRUCTOR blocks
      -
      1830  // running anything on the GPU.
      -
      1831  print_kernel_data_present_annotation_block_begin();
      -
      1832  } else {
      -
      1833  /// TODO: Remove this when the code generation is propery done
      -
      1834  /// Related to https://github.com/BlueBrain/nmodl/issues/692
      -
      1835  printer->add_line("#ifndef CORENEURON_BUILD");
      -
      1836  }
      -
      1837  printer->add_multi_line(R"CODE(
      -
      1838  int nodecount = ml->nodecount;
      -
      1839  int pnodecount = ml->_nodecount_padded;
      -
      1840  const int* node_index = ml->nodeindices;
      -
      1841  double* data = ml->data;
      -
      1842  const double* voltage = nt->_actual_v;
      -
      1843  )CODE");
      -
      1844 
      -
      1845  if (type == BlockType::Equation) {
      -
      1846  printer->add_line("double* vec_rhs = nt->_actual_rhs;");
      -
      1847  printer->add_line("double* vec_d = nt->_actual_d;");
      -
      1848  print_rhs_d_shadow_variables();
      -
      1849  }
      -
      1850  printer->add_line("Datum* indexes = ml->pdata;");
      -
      1851  printer->add_line("ThreadDatum* thread = ml->_thread;");
      -
      1852 
      -
      1853  if (type == BlockType::Initial) {
      -
      1854  printer->add_newline();
      -
      1855  printer->add_line("setup_instance(nt, ml);");
      -
      1856  }
      -
      1857  printer->fmt_line("auto* const inst = static_cast<{}*>(ml->instance);", instance_struct());
      -
      1858  printer->add_newline(1);
      -
      1859 }
      -
      1860 
      - -
      1862  printer->add_newline(2);
      -
      1863  printer->add_line("/** initialize channel */");
      -
      1864 
      -
      1865  print_global_function_common_code(BlockType::Initial);
      -
      1866  if (info.derivimplicit_used()) {
      -
      1867  printer->add_newline();
      -
      1868  int nequation = info.num_equations;
      -
      1869  int list_num = info.derivimplicit_list_num;
      -
      1870  // clang-format off
      -
      1871  printer->fmt_line("int& deriv_advance_flag = *deriv{}_advance(thread);", list_num);
      -
      1872  printer->add_line("deriv_advance_flag = 0;");
      -
      1873  print_deriv_advance_flag_transfer_to_device();
      -
      1874  printer->fmt_line("auto ns = newtonspace{}(thread);", list_num);
      -
      1875  printer->fmt_line("auto& th = thread[dith{}()];", list_num);
      -
      1876  printer->push_block("if (*ns == nullptr)");
      -
      1877  printer->fmt_line("int vec_size = 2*{}*pnodecount*sizeof(double);", nequation);
      -
      1878  printer->fmt_line("double* vec = makevector(vec_size);", nequation);
      -
      1879  printer->fmt_line("th.pval = vec;", list_num);
      -
      1880  printer->fmt_line("*ns = nrn_cons_newtonspace({}, pnodecount);", nequation);
      -
      1881  print_newtonspace_transfer_to_device();
      -
      1882  printer->pop_block();
      -
      1883  // clang-format on
      -
      1884  }
      -
      1885 
      -
      1886  // update global variable as those might be updated via python/hoc API
      -
      1887  // NOTE: CoreNEURON has enough information to do this on its own, which
      -
      1888  // would be neater.
      -
      1889  print_global_variable_device_update_annotation();
      -
      1890 
      -
      1891  if (skip_init_check) {
      -
      1892  printer->push_block("if (_nrn_skip_initmodel == 0)");
      -
      1893  }
      -
      1894 
      -
      1895  if (!info.changed_dt.empty()) {
      -
      1896  printer->fmt_line("double _save_prev_dt = {};",
      -
      1897  get_variable_name(naming::NTHREAD_DT_VARIABLE));
      -
      1898  printer->fmt_line("{} = {};",
      -
      1899  get_variable_name(naming::NTHREAD_DT_VARIABLE),
      -
      1900  info.changed_dt);
      -
      1901  print_dt_update_to_device();
      -
      1902  }
      -
      1903 
      -
      1904  print_channel_iteration_block_parallel_hint(BlockType::Initial, info.initial_node);
      -
      1905  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      1906 
      -
      1907  if (info.net_receive_node != nullptr) {
      -
      1908  printer->fmt_line("{} = -1e20;", get_variable_name("tsave"));
      -
      1909  }
      -
      1910 
      -
      1911  print_initial_block(info.initial_node);
      -
      1912  printer->pop_block();
      -
      1913 
      -
      1914  if (!info.changed_dt.empty()) {
      -
      1915  printer->fmt_line("{} = _save_prev_dt;", get_variable_name(naming::NTHREAD_DT_VARIABLE));
      -
      1916  print_dt_update_to_device();
      -
      1917  }
      -
      1918 
      -
      1919  printer->pop_block();
      -
      1920 
      -
      1921  if (info.derivimplicit_used()) {
      -
      1922  printer->add_line("deriv_advance_flag = 1;");
      -
      1923  print_deriv_advance_flag_transfer_to_device();
      -
      1924  }
      -
      1925 
      -
      1926  if (info.net_send_used && !info.artificial_cell) {
      -
      1927  print_send_event_move();
      -
      1928  }
      -
      1929 
      -
      1930  print_kernel_data_present_annotation_block_end();
      -
      1931  if (skip_init_check) {
      -
      1932  printer->pop_block();
      -
      1933  }
      -
      1934 }
      -
      1935 
      - -
      1937  size_t block_id) {
      -
      1938  std::string ba_type;
      -
      1939  std::shared_ptr<ast::BABlock> ba_block;
      -
      1940 
      -
      1941  if (node->is_before_block()) {
      -
      1942  ba_block = dynamic_cast<const ast::BeforeBlock*>(node)->get_bablock();
      -
      1943  ba_type = "BEFORE";
      -
      1944  } else {
      -
      1945  ba_block = dynamic_cast<const ast::AfterBlock*>(node)->get_bablock();
      -
      1946  ba_type = "AFTER";
      -
      1947  }
      -
      1948 
      -
      1949  std::string ba_block_type = ba_block->get_type()->eval();
      -
      1950 
      -
      1951  /// name of the before/after function
      -
      1952  std::string function_name = method_name(fmt::format("nrn_before_after_{}", block_id));
      -
      1953 
      -
      1954  /// print common function code like init/state/current
      -
      1955  printer->add_newline(2);
      -
      1956  printer->fmt_line("/** {} of block type {} # {} */", ba_type, ba_block_type, block_id);
      -
      1957  print_global_function_common_code(BlockType::BeforeAfter, function_name);
      -
      1958 
      -
      1959  print_channel_iteration_block_parallel_hint(BlockType::BeforeAfter, node);
      -
      1960  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      1961 
      -
      1962  printer->add_line("int node_id = node_index[id];");
      -
      1963  printer->add_line("double v = voltage[node_id];");
      -
      1964  print_v_unused();
      -
      1965 
      -
      1966  // read ion statements
      -
      1967  const auto& read_statements = ion_read_statements(BlockType::Equation);
      -
      1968  for (auto& statement: read_statements) {
      -
      1969  printer->add_line(statement);
      -
      1970  }
      -
      1971 
      -
      1972  /// print main body
      -
      1973  printer->add_indent();
      -
      1974  print_statement_block(*ba_block->get_statement_block());
      -
      1975  printer->add_newline();
      -
      1976 
      -
      1977  // write ion statements
      -
      1978  const auto& write_statements = ion_write_statements(BlockType::Equation);
      -
      1979  for (auto& statement: write_statements) {
      -
      1980  auto text = process_shadow_update_statement(statement, BlockType::Equation);
      -
      1981  printer->add_line(text);
      -
      1982  }
      -
      1983 
      -
      1984  /// loop end including data annotation block
      -
      1985  printer->pop_block();
      -
      1986  printer->pop_block();
      -
      1987  print_kernel_data_present_annotation_block_end();
      -
      1988 }
      -
      1989 
      - -
      1991  printer->add_newline(2);
      -
      1992  print_global_function_common_code(BlockType::Constructor);
      -
      1993  if (info.constructor_node != nullptr) {
      -
      1994  const auto& block = info.constructor_node->get_statement_block();
      -
      1995  print_statement_block(*block, false, false);
      -
      1996  }
      -
      1997  printer->add_line("#endif");
      -
      1998  printer->pop_block();
      -
      1999 }
      + +
      1810  BlockType type,
      +
      1811  const std::string& function_name) {
      +
      1812  std::string method;
      +
      1813  if (function_name.empty()) {
      +
      1814  method = compute_method_name(type);
      +
      1815  } else {
      +
      1816  method = function_name;
      +
      1817  }
      +
      1818  auto args = "NrnThread* nt, Memb_list* ml, int type";
      +
      1819 
      +
      1820  // watch statement function doesn't have type argument
      +
      1821  if (type == BlockType::Watch) {
      +
      1822  args = "NrnThread* nt, Memb_list* ml";
      +
      1823  }
      +
      1824 
      +
      1825  print_global_method_annotation();
      +
      1826  printer->fmt_push_block("void {}({})", method, args);
      +
      1827  if (type != BlockType::Destructor && type != BlockType::Constructor) {
      +
      1828  // We do not (currently) support DESTRUCTOR and CONSTRUCTOR blocks
      +
      1829  // running anything on the GPU.
      +
      1830  print_kernel_data_present_annotation_block_begin();
      +
      1831  } else {
      +
      1832  /// TODO: Remove this when the code generation is propery done
      +
      1833  /// Related to https://github.com/BlueBrain/nmodl/issues/692
      +
      1834  printer->add_line("#ifndef CORENEURON_BUILD");
      +
      1835  }
      +
      1836  printer->add_multi_line(R"CODE(
      +
      1837  int nodecount = ml->nodecount;
      +
      1838  int pnodecount = ml->_nodecount_padded;
      +
      1839  const int* node_index = ml->nodeindices;
      +
      1840  double* data = ml->data;
      +
      1841  const double* voltage = nt->_actual_v;
      +
      1842  )CODE");
      +
      1843 
      +
      1844  if (type == BlockType::Equation) {
      +
      1845  printer->add_line("double* vec_rhs = nt->_actual_rhs;");
      +
      1846  printer->add_line("double* vec_d = nt->_actual_d;");
      +
      1847  print_rhs_d_shadow_variables();
      +
      1848  }
      +
      1849  printer->add_line("Datum* indexes = ml->pdata;");
      +
      1850  printer->add_line("ThreadDatum* thread = ml->_thread;");
      +
      1851 
      +
      1852  if (type == BlockType::Initial) {
      +
      1853  printer->add_newline();
      +
      1854  printer->add_line("setup_instance(nt, ml);");
      +
      1855  }
      +
      1856  printer->fmt_line("auto* const inst = static_cast<{}*>(ml->instance);", instance_struct());
      +
      1857  printer->add_newline(1);
      +
      1858 }
      +
      1859 
      + +
      1861  printer->add_newline(2);
      +
      1862  printer->add_line("/** initialize channel */");
      +
      1863 
      +
      1864  print_global_function_common_code(BlockType::Initial);
      +
      1865  if (info.derivimplicit_used()) {
      +
      1866  printer->add_newline();
      +
      1867  int nequation = info.num_equations;
      +
      1868  int list_num = info.derivimplicit_list_num;
      +
      1869  // clang-format off
      +
      1870  printer->fmt_line("int& deriv_advance_flag = *deriv{}_advance(thread);", list_num);
      +
      1871  printer->add_line("deriv_advance_flag = 0;");
      +
      1872  print_deriv_advance_flag_transfer_to_device();
      +
      1873  printer->fmt_line("auto ns = newtonspace{}(thread);", list_num);
      +
      1874  printer->fmt_line("auto& th = thread[dith{}()];", list_num);
      +
      1875  printer->push_block("if (*ns == nullptr)");
      +
      1876  printer->fmt_line("int vec_size = 2*{}*pnodecount*sizeof(double);", nequation);
      +
      1877  printer->fmt_line("double* vec = makevector(vec_size);", nequation);
      +
      1878  printer->fmt_line("th.pval = vec;", list_num);
      +
      1879  printer->fmt_line("*ns = nrn_cons_newtonspace({}, pnodecount);", nequation);
      +
      1880  print_newtonspace_transfer_to_device();
      +
      1881  printer->pop_block();
      +
      1882  // clang-format on
      +
      1883  }
      +
      1884 
      +
      1885  // update global variable as those might be updated via python/hoc API
      +
      1886  // NOTE: CoreNEURON has enough information to do this on its own, which
      +
      1887  // would be neater.
      +
      1888  print_global_variable_device_update_annotation();
      +
      1889 
      +
      1890  if (skip_init_check) {
      +
      1891  printer->push_block("if (_nrn_skip_initmodel == 0)");
      +
      1892  }
      +
      1893 
      +
      1894  if (!info.changed_dt.empty()) {
      +
      1895  printer->fmt_line("double _save_prev_dt = {};",
      +
      1896  get_variable_name(naming::NTHREAD_DT_VARIABLE));
      +
      1897  printer->fmt_line("{} = {};",
      +
      1898  get_variable_name(naming::NTHREAD_DT_VARIABLE),
      +
      1899  info.changed_dt);
      +
      1900  print_dt_update_to_device();
      +
      1901  }
      +
      1902 
      +
      1903  print_channel_iteration_block_parallel_hint(BlockType::Initial, info.initial_node);
      +
      1904  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      1905 
      +
      1906  if (info.net_receive_node != nullptr) {
      +
      1907  printer->fmt_line("{} = -1e20;", get_variable_name("tsave"));
      +
      1908  }
      +
      1909 
      +
      1910  print_initial_block(info.initial_node);
      +
      1911  printer->pop_block();
      +
      1912 
      +
      1913  if (!info.changed_dt.empty()) {
      +
      1914  printer->fmt_line("{} = _save_prev_dt;", get_variable_name(naming::NTHREAD_DT_VARIABLE));
      +
      1915  print_dt_update_to_device();
      +
      1916  }
      +
      1917 
      +
      1918  printer->pop_block();
      +
      1919 
      +
      1920  if (info.derivimplicit_used()) {
      +
      1921  printer->add_line("deriv_advance_flag = 1;");
      +
      1922  print_deriv_advance_flag_transfer_to_device();
      +
      1923  }
      +
      1924 
      +
      1925  if (info.net_send_used && !info.artificial_cell) {
      +
      1926  print_send_event_move();
      +
      1927  }
      +
      1928 
      +
      1929  print_kernel_data_present_annotation_block_end();
      +
      1930  if (skip_init_check) {
      +
      1931  printer->pop_block();
      +
      1932  }
      +
      1933 }
      +
      1934 
      + +
      1936  size_t block_id) {
      +
      1937  std::string ba_type;
      +
      1938  std::shared_ptr<ast::BABlock> ba_block;
      +
      1939 
      +
      1940  if (node->is_before_block()) {
      +
      1941  ba_block = dynamic_cast<const ast::BeforeBlock*>(node)->get_bablock();
      +
      1942  ba_type = "BEFORE";
      +
      1943  } else {
      +
      1944  ba_block = dynamic_cast<const ast::AfterBlock*>(node)->get_bablock();
      +
      1945  ba_type = "AFTER";
      +
      1946  }
      +
      1947 
      +
      1948  std::string ba_block_type = ba_block->get_type()->eval();
      +
      1949 
      +
      1950  /// name of the before/after function
      +
      1951  std::string function_name = method_name(fmt::format("nrn_before_after_{}", block_id));
      +
      1952 
      +
      1953  /// print common function code like init/state/current
      +
      1954  printer->add_newline(2);
      +
      1955  printer->fmt_line("/** {} of block type {} # {} */", ba_type, ba_block_type, block_id);
      +
      1956  print_global_function_common_code(BlockType::BeforeAfter, function_name);
      +
      1957 
      +
      1958  print_channel_iteration_block_parallel_hint(BlockType::BeforeAfter, node);
      +
      1959  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      1960 
      +
      1961  printer->add_line("int node_id = node_index[id];");
      +
      1962  printer->add_line("double v = voltage[node_id];");
      +
      1963  print_v_unused();
      +
      1964 
      +
      1965  // read ion statements
      +
      1966  const auto& read_statements = ion_read_statements(BlockType::Equation);
      +
      1967  for (auto& statement: read_statements) {
      +
      1968  printer->add_line(statement);
      +
      1969  }
      +
      1970 
      +
      1971  /// print main body
      +
      1972  printer->add_indent();
      +
      1973  print_statement_block(*ba_block->get_statement_block());
      +
      1974  printer->add_newline();
      +
      1975 
      +
      1976  // write ion statements
      +
      1977  const auto& write_statements = ion_write_statements(BlockType::Equation);
      +
      1978  for (auto& statement: write_statements) {
      +
      1979  auto text = process_shadow_update_statement(statement, BlockType::Equation);
      +
      1980  printer->add_line(text);
      +
      1981  }
      +
      1982 
      +
      1983  /// loop end including data annotation block
      +
      1984  printer->pop_block();
      +
      1985  printer->pop_block();
      +
      1986  print_kernel_data_present_annotation_block_end();
      +
      1987 }
      +
      1988 
      + +
      1990  printer->add_newline(2);
      +
      1991  print_global_function_common_code(BlockType::Constructor);
      +
      1992  if (info.constructor_node != nullptr) {
      +
      1993  const auto& block = info.constructor_node->get_statement_block();
      +
      1994  print_statement_block(*block, false, false);
      +
      1995  }
      +
      1996  printer->add_line("#endif");
      +
      1997  printer->pop_block();
      +
      1998 }
      +
      1999 
      2000 
      -
      2001 
      - -
      2003  printer->add_newline(2);
      -
      2004  print_global_function_common_code(BlockType::Destructor);
      -
      2005  if (info.destructor_node != nullptr) {
      -
      2006  const auto& block = info.destructor_node->get_statement_block();
      -
      2007  print_statement_block(*block, false, false);
      -
      2008  }
      -
      2009  printer->add_line("#endif");
      -
      2010  printer->pop_block();
      -
      2011 }
      + +
      2002  printer->add_newline(2);
      +
      2003  print_global_function_common_code(BlockType::Destructor);
      +
      2004  if (info.destructor_node != nullptr) {
      +
      2005  const auto& block = info.destructor_node->get_statement_block();
      +
      2006  print_statement_block(*block, false, false);
      +
      2007  }
      +
      2008  printer->add_line("#endif");
      +
      2009  printer->pop_block();
      +
      2010 }
      +
      2011 
      2012 
      -
      2013 
      - -
      2015  printer->add_newline(2);
      -
      2016  auto method = method_name(naming::NRN_ALLOC_METHOD);
      -
      2017  printer->fmt_push_block("static void {}(double* data, Datum* indexes, int type)", method);
      -
      2018  printer->add_line("// do nothing");
      -
      2019  printer->pop_block();
      -
      2020 }
      -
      2021 
      -
      2022 /**
      -
      2023  * \todo Number of watch could be more than number of statements
      -
      2024  * according to grammar. Check if this is correctly handled in neuron
      -
      2025  * and coreneuron.
      -
      2026  */
      - -
      2028  if (info.watch_statements.empty()) {
      -
      2029  return;
      -
      2030  }
      -
      2031 
      -
      2032  printer->add_newline(2);
      -
      2033  auto inst = fmt::format("{}* inst", instance_struct());
      -
      2034 
      -
      2035  printer->fmt_push_block(
      -
      2036  "static void nrn_watch_activate({}, int id, int pnodecount, int watch_id, "
      -
      2037  "double v, bool &watch_remove)",
      -
      2038  inst);
      -
      2039 
      -
      2040  // initialize all variables only during first watch statement
      -
      2041  printer->push_block("if (watch_remove == false)");
      -
      2042  for (int i = 0; i < info.watch_count; i++) {
      -
      2043  auto name = get_variable_name(fmt::format("watch{}", i + 1));
      -
      2044  printer->fmt_line("{} = 0;", name);
      -
      2045  }
      -
      2046  printer->add_line("watch_remove = true;");
      -
      2047  printer->pop_block();
      -
      2048 
      -
      2049  /**
      -
      2050  * \todo Similar to neuron/coreneuron we are using
      -
      2051  * first watch and ignoring rest.
      -
      2052  */
      -
      2053  for (int i = 0; i < info.watch_statements.size(); i++) {
      -
      2054  auto statement = info.watch_statements[i];
      -
      2055  printer->fmt_push_block("if (watch_id == {})", i);
      -
      2056 
      -
      2057  auto varname = get_variable_name(fmt::format("watch{}", i + 1));
      -
      2058  printer->add_indent();
      -
      2059  printer->fmt_text("{} = 2 + (", varname);
      -
      2060  auto watch = statement->get_statements().front();
      -
      2061  watch->get_expression()->visit_children(*this);
      -
      2062  printer->add_text(");");
      -
      2063  printer->add_newline();
      -
      2064 
      -
      2065  printer->pop_block();
      -
      2066  }
      -
      2067  printer->pop_block();
      -
      2068 }
      -
      2069 
      -
      2070 
      -
      2071 /**
      -
      2072  * \todo Similar to print_watch_activate, we are using only
      -
      2073  * first watch. need to verify with neuron/coreneuron about rest.
      -
      2074  */
      - -
      2076  if (info.watch_statements.empty()) {
      -
      2077  return;
      -
      2078  }
      -
      2079 
      -
      2080  printer->add_newline(2);
      -
      2081  printer->add_line("/** routine to check watch activation */");
      -
      2082  print_global_function_common_code(BlockType::Watch);
      -
      2083 
      -
      2084  // WATCH statements appears in NET_RECEIVE block and while printing
      -
      2085  // net_receive function we already check if it contains any MUTEX/PROTECT
      -
      2086  // constructs. As WATCH is not a top level block but list of statements,
      -
      2087  // we don't need to have ivdep pragma related check
      -
      2088  print_channel_iteration_block_parallel_hint(BlockType::Watch, nullptr);
      -
      2089 
      -
      2090  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      2091 
      -
      2092  if (info.is_voltage_used_by_watch_statements()) {
      -
      2093  printer->add_line("int node_id = node_index[id];");
      -
      2094  printer->add_line("double v = voltage[node_id];");
      -
      2095  print_v_unused();
      -
      2096  }
      -
      2097 
      -
      2098  // flat to make sure only one WATCH statement can be triggered at a time
      -
      2099  printer->add_line("bool watch_untriggered = true;");
      -
      2100 
      -
      2101  for (int i = 0; i < info.watch_statements.size(); i++) {
      -
      2102  auto statement = info.watch_statements[i];
      -
      2103  const auto& watch = statement->get_statements().front();
      -
      2104  const auto& varname = get_variable_name(fmt::format("watch{}", i + 1));
      -
      2105 
      -
      2106  // start block 1
      -
      2107  printer->fmt_push_block("if ({}&2 && watch_untriggered)", varname);
      -
      2108 
      -
      2109  // start block 2
      -
      2110  printer->add_indent();
      -
      2111  printer->add_text("if (");
      -
      2112  watch->get_expression()->accept(*this);
      -
      2113  printer->add_text(") {");
      -
      2114  printer->add_newline();
      -
      2115  printer->increase_indent();
      -
      2116 
      -
      2117  // start block 3
      -
      2118  printer->fmt_push_block("if (({}&1) == 0)", varname);
      -
      2119 
      -
      2120  printer->add_line("watch_untriggered = false;");
      -
      2121 
      -
      2122  const auto& tqitem = get_variable_name("tqitem");
      -
      2123  const auto& point_process = get_variable_name("point_process");
      -
      2124  printer->add_indent();
      -
      2125  printer->add_text("net_send_buffering(");
      -
      2126  const auto& t = get_variable_name("t");
      -
      2127  printer->fmt_text("nt, ml->_net_send_buffer, 0, {}, -1, {}, {}+0.0, ",
      -
      2128  tqitem,
      -
      2129  point_process,
      -
      2130  t);
      -
      2131  watch->get_value()->accept(*this);
      -
      2132  printer->add_text(");");
      -
      2133  printer->add_newline();
      -
      2134  printer->pop_block();
      -
      2135 
      -
      2136  printer->add_line(varname, " = 3;");
      -
      2137  // end block 3
      -
      2138 
      -
      2139  // start block 3
      -
      2140  printer->decrease_indent();
      -
      2141  printer->push_block("} else");
      -
      2142  printer->add_line(varname, " = 2;");
      -
      2143  printer->pop_block();
      -
      2144  // end block 3
      -
      2145 
      -
      2146  printer->pop_block();
      -
      2147  // end block 1
      -
      2148  }
      -
      2149 
      -
      2150  printer->pop_block();
      -
      2151  print_send_event_move();
      -
      2152  print_kernel_data_present_annotation_block_end();
      -
      2153  printer->pop_block();
      -
      2154 }
      + +
      2014  printer->add_newline(2);
      +
      2015  auto method = method_name(naming::NRN_ALLOC_METHOD);
      +
      2016  printer->fmt_push_block("static void {}(double* data, Datum* indexes, int type)", method);
      +
      2017  printer->add_line("// do nothing");
      +
      2018  printer->pop_block();
      +
      2019 }
      +
      2020 
      +
      2021 /**
      +
      2022  * \todo Number of watch could be more than number of statements
      +
      2023  * according to grammar. Check if this is correctly handled in neuron
      +
      2024  * and coreneuron.
      +
      2025  */
      + +
      2027  if (info.watch_statements.empty()) {
      +
      2028  return;
      +
      2029  }
      +
      2030 
      +
      2031  printer->add_newline(2);
      +
      2032  auto inst = fmt::format("{}* inst", instance_struct());
      +
      2033 
      +
      2034  printer->fmt_push_block(
      +
      2035  "static void nrn_watch_activate({}, int id, int pnodecount, int watch_id, "
      +
      2036  "double v, bool &watch_remove)",
      +
      2037  inst);
      +
      2038 
      +
      2039  // initialize all variables only during first watch statement
      +
      2040  printer->push_block("if (watch_remove == false)");
      +
      2041  for (int i = 0; i < info.watch_count; i++) {
      +
      2042  auto name = get_variable_name(fmt::format("watch{}", i + 1));
      +
      2043  printer->fmt_line("{} = 0;", name);
      +
      2044  }
      +
      2045  printer->add_line("watch_remove = true;");
      +
      2046  printer->pop_block();
      +
      2047 
      +
      2048  /**
      +
      2049  * \todo Similar to neuron/coreneuron we are using
      +
      2050  * first watch and ignoring rest.
      +
      2051  */
      +
      2052  for (int i = 0; i < info.watch_statements.size(); i++) {
      +
      2053  auto statement = info.watch_statements[i];
      +
      2054  printer->fmt_push_block("if (watch_id == {})", i);
      +
      2055 
      +
      2056  auto varname = get_variable_name(fmt::format("watch{}", i + 1));
      +
      2057  printer->add_indent();
      +
      2058  printer->fmt_text("{} = 2 + (", varname);
      +
      2059  auto watch = statement->get_statements().front();
      +
      2060  watch->get_expression()->visit_children(*this);
      +
      2061  printer->add_text(");");
      +
      2062  printer->add_newline();
      +
      2063 
      +
      2064  printer->pop_block();
      +
      2065  }
      +
      2066  printer->pop_block();
      +
      2067 }
      +
      2068 
      +
      2069 
      +
      2070 /**
      +
      2071  * \todo Similar to print_watch_activate, we are using only
      +
      2072  * first watch. need to verify with neuron/coreneuron about rest.
      +
      2073  */
      + +
      2075  if (info.watch_statements.empty()) {
      +
      2076  return;
      +
      2077  }
      +
      2078 
      +
      2079  printer->add_newline(2);
      +
      2080  printer->add_line("/** routine to check watch activation */");
      +
      2081  print_global_function_common_code(BlockType::Watch);
      +
      2082 
      +
      2083  // WATCH statements appears in NET_RECEIVE block and while printing
      +
      2084  // net_receive function we already check if it contains any MUTEX/PROTECT
      +
      2085  // constructs. As WATCH is not a top level block but list of statements,
      +
      2086  // we don't need to have ivdep pragma related check
      +
      2087  print_channel_iteration_block_parallel_hint(BlockType::Watch, nullptr);
      +
      2088 
      +
      2089  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      2090 
      +
      2091  if (info.is_voltage_used_by_watch_statements()) {
      +
      2092  printer->add_line("int node_id = node_index[id];");
      +
      2093  printer->add_line("double v = voltage[node_id];");
      +
      2094  print_v_unused();
      +
      2095  }
      +
      2096 
      +
      2097  // flat to make sure only one WATCH statement can be triggered at a time
      +
      2098  printer->add_line("bool watch_untriggered = true;");
      +
      2099 
      +
      2100  for (int i = 0; i < info.watch_statements.size(); i++) {
      +
      2101  auto statement = info.watch_statements[i];
      +
      2102  const auto& watch = statement->get_statements().front();
      +
      2103  const auto& varname = get_variable_name(fmt::format("watch{}", i + 1));
      +
      2104 
      +
      2105  // start block 1
      +
      2106  printer->fmt_push_block("if ({}&2 && watch_untriggered)", varname);
      +
      2107 
      +
      2108  // start block 2
      +
      2109  printer->add_indent();
      +
      2110  printer->add_text("if (");
      +
      2111  watch->get_expression()->accept(*this);
      +
      2112  printer->add_text(") {");
      +
      2113  printer->add_newline();
      +
      2114  printer->increase_indent();
      +
      2115 
      +
      2116  // start block 3
      +
      2117  printer->fmt_push_block("if (({}&1) == 0)", varname);
      +
      2118 
      +
      2119  printer->add_line("watch_untriggered = false;");
      +
      2120 
      +
      2121  const auto& tqitem = get_variable_name("tqitem");
      +
      2122  const auto& point_process = get_variable_name("point_process");
      +
      2123  printer->add_indent();
      +
      2124  printer->add_text("net_send_buffering(");
      +
      2125  const auto& t = get_variable_name("t");
      +
      2126  printer->fmt_text("nt, ml->_net_send_buffer, 0, {}, -1, {}, {}+0.0, ",
      +
      2127  tqitem,
      +
      2128  point_process,
      +
      2129  t);
      +
      2130  watch->get_value()->accept(*this);
      +
      2131  printer->add_text(");");
      +
      2132  printer->add_newline();
      +
      2133  printer->pop_block();
      +
      2134 
      +
      2135  printer->add_line(varname, " = 3;");
      +
      2136  // end block 3
      +
      2137 
      +
      2138  // start block 3
      +
      2139  printer->decrease_indent();
      +
      2140  printer->push_block("} else");
      +
      2141  printer->add_line(varname, " = 2;");
      +
      2142  printer->pop_block();
      +
      2143  // end block 3
      +
      2144 
      +
      2145  printer->pop_block();
      +
      2146  // end block 1
      +
      2147  }
      +
      2148 
      +
      2149  printer->pop_block();
      +
      2150  print_send_event_move();
      +
      2151  print_kernel_data_present_annotation_block_end();
      +
      2152  printer->pop_block();
      +
      2153 }
      +
      2154 
      2155 
      -
      2156 
      - -
      2158  bool need_mech_inst) {
      -
      2159  printer->add_multi_line(R"CODE(
      -
      2160  int tid = pnt->_tid;
      -
      2161  int id = pnt->_i_instance;
      -
      2162  double v = 0;
      -
      2163  )CODE");
      -
      2164 
      -
      2165  if (info.artificial_cell || node.is_initial_block()) {
      -
      2166  printer->add_line("NrnThread* nt = nrn_threads + tid;");
      -
      2167  printer->add_line("Memb_list* ml = nt->_ml_list[pnt->_type];");
      -
      2168  }
      -
      2169  if (node.is_initial_block()) {
      -
      2170  print_kernel_data_present_annotation_block_begin();
      -
      2171  }
      -
      2172 
      -
      2173  printer->add_multi_line(R"CODE(
      -
      2174  int nodecount = ml->nodecount;
      -
      2175  int pnodecount = ml->_nodecount_padded;
      -
      2176  double* data = ml->data;
      -
      2177  double* weights = nt->weights;
      -
      2178  Datum* indexes = ml->pdata;
      -
      2179  ThreadDatum* thread = ml->_thread;
      -
      2180  )CODE");
      -
      2181  if (need_mech_inst) {
      -
      2182  printer->fmt_line("auto* const inst = static_cast<{0}*>(ml->instance);", instance_struct());
      -
      2183  }
      -
      2184 
      -
      2185  if (node.is_initial_block()) {
      -
      2186  print_net_init_acc_serial_annotation_block_begin();
      -
      2187  }
      -
      2188 
      -
      2189  // rename variables but need to see if they are actually used
      -
      2190  auto parameters = info.net_receive_node->get_parameters();
      -
      2191  if (!parameters.empty()) {
      -
      2192  int i = 0;
      -
      2193  printer->add_newline();
      -
      2194  for (auto& parameter: parameters) {
      -
      2195  auto name = parameter->get_node_name();
      -
      2196  bool var_used = VarUsageVisitor().variable_used(node, "(*" + name + ")");
      -
      2197  if (var_used) {
      -
      2198  printer->fmt_line("double* {} = weights + weight_index + {};", name, i);
      -
      2199  RenameVisitor vr(name, "*" + name);
      -
      2200  node.visit_children(vr);
      -
      2201  }
      -
      2202  i++;
      -
      2203  }
      -
      2204  }
      -
      2205 }
      + +
      2157  bool need_mech_inst) {
      +
      2158  printer->add_multi_line(R"CODE(
      +
      2159  int tid = pnt->_tid;
      +
      2160  int id = pnt->_i_instance;
      +
      2161  double v = 0;
      +
      2162  )CODE");
      +
      2163 
      +
      2164  if (info.artificial_cell || node.is_initial_block()) {
      +
      2165  printer->add_line("NrnThread* nt = nrn_threads + tid;");
      +
      2166  printer->add_line("Memb_list* ml = nt->_ml_list[pnt->_type];");
      +
      2167  }
      +
      2168  if (node.is_initial_block()) {
      +
      2169  print_kernel_data_present_annotation_block_begin();
      +
      2170  }
      +
      2171 
      +
      2172  printer->add_multi_line(R"CODE(
      +
      2173  int nodecount = ml->nodecount;
      +
      2174  int pnodecount = ml->_nodecount_padded;
      +
      2175  double* data = ml->data;
      +
      2176  double* weights = nt->weights;
      +
      2177  Datum* indexes = ml->pdata;
      +
      2178  ThreadDatum* thread = ml->_thread;
      +
      2179  )CODE");
      +
      2180  if (need_mech_inst) {
      +
      2181  printer->fmt_line("auto* const inst = static_cast<{0}*>(ml->instance);", instance_struct());
      +
      2182  }
      +
      2183 
      +
      2184  if (node.is_initial_block()) {
      +
      2185  print_net_init_acc_serial_annotation_block_begin();
      +
      2186  }
      +
      2187 
      +
      2188  // rename variables but need to see if they are actually used
      +
      2189  auto parameters = info.net_receive_node->get_parameters();
      +
      2190  if (!parameters.empty()) {
      +
      2191  int i = 0;
      +
      2192  printer->add_newline();
      +
      2193  for (auto& parameter: parameters) {
      +
      2194  auto name = parameter->get_node_name();
      +
      2195  bool var_used = VarUsageVisitor().variable_used(node, "(*" + name + ")");
      +
      2196  if (var_used) {
      +
      2197  printer->fmt_line("double* {} = weights + weight_index + {};", name, i);
      +
      2198  RenameVisitor vr(name, "*" + name);
      +
      2199  node.visit_children(vr);
      +
      2200  }
      +
      2201  i++;
      +
      2202  }
      +
      2203  }
      +
      2204 }
      +
      2205 
      2206 
      -
      2207 
      - -
      2209  auto const& arguments = node.get_arguments();
      -
      2210  const auto& tqitem = get_variable_name("tqitem");
      -
      2211  std::string weight_index = "weight_index";
      -
      2212  std::string pnt = "pnt";
      -
      2213 
      -
      2214  // for functions not generated from NET_RECEIVE blocks (i.e. top level INITIAL block)
      -
      2215  // the weight_index argument is 0.
      -
      2216  if (!printing_net_receive && !printing_net_init) {
      -
      2217  weight_index = "0";
      -
      2218  auto var = get_variable_name("point_process");
      -
      2219  if (info.artificial_cell) {
      -
      2220  pnt = "(Point_process*)" + var;
      -
      2221  }
      -
      2222  }
      -
      2223 
      -
      2224  // artificial cells don't use spike buffering
      -
      2225  // clang-format off
      -
      2226  if (info.artificial_cell) {
      -
      2227  printer->fmt_text("artcell_net_send(&{}, {}, {}, nt->_t+", tqitem, weight_index, pnt);
      -
      2228  } else {
      -
      2229  const auto& point_process = get_variable_name("point_process");
      -
      2230  const auto& t = get_variable_name("t");
      -
      2231  printer->add_text("net_send_buffering(");
      -
      2232  printer->fmt_text("nt, ml->_net_send_buffer, 0, {}, {}, {}, {}+", tqitem, weight_index, point_process, t);
      -
      2233  }
      -
      2234  // clang-format off
      -
      2235  print_vector_elements(arguments, ", ");
      -
      2236  printer->add_text(')');
      -
      2237 }
      + +
      2208  auto const& arguments = node.get_arguments();
      +
      2209  const auto& tqitem = get_variable_name("tqitem");
      +
      2210  std::string weight_index = "weight_index";
      +
      2211  std::string pnt = "pnt";
      +
      2212 
      +
      2213  // for functions not generated from NET_RECEIVE blocks (i.e. top level INITIAL block)
      +
      2214  // the weight_index argument is 0.
      +
      2215  if (!printing_net_receive && !printing_net_init) {
      +
      2216  weight_index = "0";
      +
      2217  auto var = get_variable_name("point_process");
      +
      2218  if (info.artificial_cell) {
      +
      2219  pnt = "(Point_process*)" + var;
      +
      2220  }
      +
      2221  }
      +
      2222 
      +
      2223  // artificial cells don't use spike buffering
      +
      2224  // clang-format off
      +
      2225  if (info.artificial_cell) {
      +
      2226  printer->fmt_text("artcell_net_send(&{}, {}, {}, nt->_t+", tqitem, weight_index, pnt);
      +
      2227  } else {
      +
      2228  const auto& point_process = get_variable_name("point_process");
      +
      2229  const auto& t = get_variable_name("t");
      +
      2230  printer->add_text("net_send_buffering(");
      +
      2231  printer->fmt_text("nt, ml->_net_send_buffer, 0, {}, {}, {}, {}+", tqitem, weight_index, point_process, t);
      +
      2232  }
      +
      2233  // clang-format off
      +
      2234  print_vector_elements(arguments, ", ");
      +
      2235  printer->add_text(')');
      +
      2236 }
      +
      2237 
      2238 
      -
      2239 
      - -
      2241  if (!printing_net_receive && !printing_net_init) {
      -
      2242  throw std::runtime_error("Error : net_move only allowed in NET_RECEIVE block");
      -
      2243  }
      -
      2244 
      -
      2245  auto const& arguments = node.get_arguments();
      -
      2246  const auto& tqitem = get_variable_name("tqitem");
      -
      2247  std::string weight_index = "-1";
      -
      2248  std::string pnt = "pnt";
      -
      2249 
      -
      2250  // artificial cells don't use spike buffering
      -
      2251  // clang-format off
      -
      2252  if (info.artificial_cell) {
      -
      2253  printer->fmt_text("artcell_net_move(&{}, {}, ", tqitem, pnt);
      -
      2254  print_vector_elements(arguments, ", ");
      -
      2255  printer->add_text(")");
      -
      2256  } else {
      -
      2257  const auto& point_process = get_variable_name("point_process");
      -
      2258  printer->add_text("net_send_buffering(");
      -
      2259  printer->fmt_text("nt, ml->_net_send_buffer, 2, {}, {}, {}, ", tqitem, weight_index, point_process);
      -
      2260  print_vector_elements(arguments, ", ");
      -
      2261  printer->add_text(", 0.0");
      -
      2262  printer->add_text(")");
      -
      2263  }
      -
      2264 }
      + +
      2240  if (!printing_net_receive && !printing_net_init) {
      +
      2241  throw std::runtime_error("Error : net_move only allowed in NET_RECEIVE block");
      +
      2242  }
      +
      2243 
      +
      2244  auto const& arguments = node.get_arguments();
      +
      2245  const auto& tqitem = get_variable_name("tqitem");
      +
      2246  std::string weight_index = "-1";
      +
      2247  std::string pnt = "pnt";
      +
      2248 
      +
      2249  // artificial cells don't use spike buffering
      +
      2250  // clang-format off
      +
      2251  if (info.artificial_cell) {
      +
      2252  printer->fmt_text("artcell_net_move(&{}, {}, ", tqitem, pnt);
      +
      2253  print_vector_elements(arguments, ", ");
      +
      2254  printer->add_text(")");
      +
      2255  } else {
      +
      2256  const auto& point_process = get_variable_name("point_process");
      +
      2257  printer->add_text("net_send_buffering(");
      +
      2258  printer->fmt_text("nt, ml->_net_send_buffer, 2, {}, {}, {}, ", tqitem, weight_index, point_process);
      +
      2259  print_vector_elements(arguments, ", ");
      +
      2260  printer->add_text(", 0.0");
      +
      2261  printer->add_text(")");
      +
      2262  }
      +
      2263 }
      +
      2264 
      2265 
      -
      2266 
      - -
      2268  const auto& arguments = node.get_arguments();
      -
      2269  if (info.artificial_cell) {
      -
      2270  printer->add_text("net_event(pnt, ");
      -
      2271  print_vector_elements(arguments, ", ");
      -
      2272  } else {
      -
      2273  const auto& point_process = get_variable_name("point_process");
      -
      2274  printer->add_text("net_send_buffering(");
      -
      2275  printer->fmt_text("nt, ml->_net_send_buffer, 1, -1, -1, {}, ", point_process);
      -
      2276  print_vector_elements(arguments, ", ");
      -
      2277  printer->add_text(", 0.0");
      -
      2278  }
      -
      2279  printer->add_text(")");
      -
      2280 }
      -
      2281 
      -
      2282 /**
      -
      2283  * Rename arguments to NET_RECEIVE block with corresponding pointer variable
      -
      2284  *
      -
      2285  * Arguments to NET_RECEIVE block are packed and passed via weight vector. These
      -
      2286  * variables need to be replaced with corresponding pointer variable. For example,
      -
      2287  * if mod file is like
      -
      2288  *
      -
      2289  * \code{.mod}
      -
      2290  * NET_RECEIVE (weight, R){
      -
      2291  * INITIAL {
      -
      2292  * R=1
      -
      2293  * }
      -
      2294  * }
      -
      2295  * \endcode
      -
      2296  *
      -
      2297  * then generated code for initial block should be:
      -
      2298  *
      -
      2299  * \code{.cpp}
      -
      2300  * double* R = weights + weight_index + 0;
      -
      2301  * (*R) = 1.0;
      -
      2302  * \endcode
      -
      2303  *
      -
      2304  * So, the `R` in AST needs to be renamed with `(*R)`.
      -
      2305  */
      -
      2306 static void rename_net_receive_arguments(const ast::NetReceiveBlock& net_receive_node, const ast::Node& node) {
      -
      2307  const auto& parameters = net_receive_node.get_parameters();
      -
      2308  for (auto& parameter: parameters) {
      -
      2309  const auto& name = parameter->get_node_name();
      -
      2310  auto var_used = VarUsageVisitor().variable_used(node, name);
      -
      2311  if (var_used) {
      -
      2312  RenameVisitor vr(name, "(*" + name + ")");
      -
      2313  node.get_statement_block()->visit_children(vr);
      -
      2314  }
      -
      2315  }
      -
      2316 }
      + +
      2267  const auto& arguments = node.get_arguments();
      +
      2268  if (info.artificial_cell) {
      +
      2269  printer->add_text("net_event(pnt, ");
      +
      2270  print_vector_elements(arguments, ", ");
      +
      2271  } else {
      +
      2272  const auto& point_process = get_variable_name("point_process");
      +
      2273  printer->add_text("net_send_buffering(");
      +
      2274  printer->fmt_text("nt, ml->_net_send_buffer, 1, -1, -1, {}, ", point_process);
      +
      2275  print_vector_elements(arguments, ", ");
      +
      2276  printer->add_text(", 0.0");
      +
      2277  }
      +
      2278  printer->add_text(")");
      +
      2279 }
      +
      2280 
      +
      2281 /**
      +
      2282  * Rename arguments to NET_RECEIVE block with corresponding pointer variable
      +
      2283  *
      +
      2284  * Arguments to NET_RECEIVE block are packed and passed via weight vector. These
      +
      2285  * variables need to be replaced with corresponding pointer variable. For example,
      +
      2286  * if mod file is like
      +
      2287  *
      +
      2288  * \code{.mod}
      +
      2289  * NET_RECEIVE (weight, R){
      +
      2290  * INITIAL {
      +
      2291  * R=1
      +
      2292  * }
      +
      2293  * }
      +
      2294  * \endcode
      +
      2295  *
      +
      2296  * then generated code for initial block should be:
      +
      2297  *
      +
      2298  * \code{.cpp}
      +
      2299  * double* R = weights + weight_index + 0;
      +
      2300  * (*R) = 1.0;
      +
      2301  * \endcode
      +
      2302  *
      +
      2303  * So, the `R` in AST needs to be renamed with `(*R)`.
      +
      2304  */
      +
      2305 static void rename_net_receive_arguments(const ast::NetReceiveBlock& net_receive_node, const ast::Node& node) {
      +
      2306  const auto& parameters = net_receive_node.get_parameters();
      +
      2307  for (auto& parameter: parameters) {
      +
      2308  const auto& name = parameter->get_node_name();
      +
      2309  auto var_used = VarUsageVisitor().variable_used(node, name);
      +
      2310  if (var_used) {
      +
      2311  RenameVisitor vr(name, "(*" + name + ")");
      +
      2312  node.get_statement_block()->visit_children(vr);
      +
      2313  }
      +
      2314  }
      +
      2315 }
      +
      2316 
      2317 
      -
      2318 
      - -
      2320  const auto node = info.net_receive_initial_node;
      -
      2321  if (node == nullptr) {
      -
      2322  return;
      -
      2323  }
      -
      2324 
      -
      2325  // rename net_receive arguments used in the initial block of net_receive
      -
      2326  rename_net_receive_arguments(*info.net_receive_node, *node);
      -
      2327 
      -
      2328  printing_net_init = true;
      -
      2329  auto args = "Point_process* pnt, int weight_index, double flag";
      -
      2330  printer->add_newline(2);
      -
      2331  printer->add_line("/** initialize block for net receive */");
      -
      2332  printer->fmt_push_block("static void net_init({})", args);
      -
      2333  auto block = node->get_statement_block().get();
      -
      2334  if (block->get_statements().empty()) {
      -
      2335  printer->add_line("// do nothing");
      -
      2336  } else {
      -
      2337  print_net_receive_common_code(*node);
      -
      2338  print_statement_block(*block, false, false);
      -
      2339  if (node->is_initial_block()) {
      -
      2340  print_net_init_acc_serial_annotation_block_end();
      -
      2341  print_kernel_data_present_annotation_block_end();
      -
      2342  printer->add_line("auto& nsb = ml->_net_send_buffer;");
      -
      2343  print_net_send_buf_update_to_host();
      -
      2344  }
      -
      2345  }
      -
      2346  printer->pop_block();
      -
      2347  printing_net_init = false;
      -
      2348 }
      + +
      2319  const auto node = info.net_receive_initial_node;
      +
      2320  if (node == nullptr) {
      +
      2321  return;
      +
      2322  }
      +
      2323 
      +
      2324  // rename net_receive arguments used in the initial block of net_receive
      +
      2325  rename_net_receive_arguments(*info.net_receive_node, *node);
      +
      2326 
      +
      2327  printing_net_init = true;
      +
      2328  auto args = "Point_process* pnt, int weight_index, double flag";
      +
      2329  printer->add_newline(2);
      +
      2330  printer->add_line("/** initialize block for net receive */");
      +
      2331  printer->fmt_push_block("static void net_init({})", args);
      +
      2332  auto block = node->get_statement_block().get();
      +
      2333  if (block->get_statements().empty()) {
      +
      2334  printer->add_line("// do nothing");
      +
      2335  } else {
      +
      2336  print_net_receive_common_code(*node);
      +
      2337  print_statement_block(*block, false, false);
      +
      2338  if (node->is_initial_block()) {
      +
      2339  print_net_init_acc_serial_annotation_block_end();
      +
      2340  print_kernel_data_present_annotation_block_end();
      +
      2341  printer->add_line("auto& nsb = ml->_net_send_buffer;");
      +
      2342  print_net_send_buf_update_to_host();
      +
      2343  }
      +
      2344  }
      +
      2345  printer->pop_block();
      +
      2346  printing_net_init = false;
      +
      2347 }
      +
      2348 
      2349 
      -
      2350 
      - -
      2352  printer->add_newline();
      -
      2353  printer->add_line("NetSendBuffer_t* nsb = ml->_net_send_buffer;");
      -
      2354  print_net_send_buf_update_to_host();
      -
      2355  printer->push_block("for (int i=0; i < nsb->_cnt; i++)");
      -
      2356  printer->add_multi_line(R"CODE(
      -
      2357  int type = nsb->_sendtype[i];
      -
      2358  int tid = nt->id;
      -
      2359  double t = nsb->_nsb_t[i];
      -
      2360  double flag = nsb->_nsb_flag[i];
      -
      2361  int vdata_index = nsb->_vdata_index[i];
      -
      2362  int weight_index = nsb->_weight_index[i];
      -
      2363  int point_index = nsb->_pnt_index[i];
      -
      2364  net_sem_from_gpu(type, vdata_index, weight_index, tid, point_index, t, flag);
      -
      2365  )CODE");
      -
      2366  printer->pop_block();
      -
      2367  printer->add_line("nsb->_cnt = 0;");
      -
      2368  print_net_send_buf_count_update_to_device();
      -
      2369 }
      + +
      2351  printer->add_newline();
      +
      2352  printer->add_line("NetSendBuffer_t* nsb = ml->_net_send_buffer;");
      +
      2353  print_net_send_buf_update_to_host();
      +
      2354  printer->push_block("for (int i=0; i < nsb->_cnt; i++)");
      +
      2355  printer->add_multi_line(R"CODE(
      +
      2356  int type = nsb->_sendtype[i];
      +
      2357  int tid = nt->id;
      +
      2358  double t = nsb->_nsb_t[i];
      +
      2359  double flag = nsb->_nsb_flag[i];
      +
      2360  int vdata_index = nsb->_vdata_index[i];
      +
      2361  int weight_index = nsb->_weight_index[i];
      +
      2362  int point_index = nsb->_pnt_index[i];
      +
      2363  net_sem_from_gpu(type, vdata_index, weight_index, tid, point_index, t, flag);
      +
      2364  )CODE");
      +
      2365  printer->pop_block();
      +
      2366  printer->add_line("nsb->_cnt = 0;");
      +
      2367  print_net_send_buf_count_update_to_device();
      +
      2368 }
      +
      2369 
      2370 
      -
      2371 
      - -
      2373  return fmt::format("void {}(NrnThread* nt)", method_name("net_buf_receive"));
      -
      2374 }
      + +
      2372  return fmt::format("void {}(NrnThread* nt)", method_name("net_buf_receive"));
      +
      2373 }
      +
      2374 
      2375 
      -
      2376 
      - -
      2378  printer->add_line("Memb_list* ml = get_memb_list(nt);");
      -
      2379  printer->push_block("if (!ml)");
      -
      2380  printer->add_line("return;");
      -
      2381  printer->pop_block();
      -
      2382  printer->add_newline();
      -
      2383 }
      + +
      2377  printer->add_line("Memb_list* ml = get_memb_list(nt);");
      +
      2378  printer->push_block("if (!ml)");
      +
      2379  printer->add_line("return;");
      +
      2380  printer->pop_block();
      +
      2381  printer->add_newline();
      +
      2382 }
      +
      2383 
      2384 
      -
      2385 
      - -
      2387  printer->add_line("int count = nrb->_displ_cnt;");
      -
      2388  print_channel_iteration_block_parallel_hint(BlockType::NetReceive, info.net_receive_node);
      -
      2389  printer->push_block("for (int i = 0; i < count; i++)");
      -
      2390 }
      + +
      2386  printer->add_line("int count = nrb->_displ_cnt;");
      +
      2387  print_channel_iteration_block_parallel_hint(BlockType::NetReceive, info.net_receive_node);
      +
      2388  printer->push_block("for (int i = 0; i < count; i++)");
      +
      2389 }
      +
      2390 
      2391 
      -
      2392 
      - -
      2394  printer->pop_block();
      -
      2395 }
      + +
      2393  printer->pop_block();
      +
      2394 }
      +
      2395 
      2396 
      -
      2397 
      - -
      2399  if (!net_receive_required() || info.artificial_cell) {
      -
      2400  return;
      -
      2401  }
      -
      2402  printer->add_newline(2);
      -
      2403  printer->push_block(net_receive_buffering_declaration());
      -
      2404 
      -
      2405  print_get_memb_list();
      -
      2406 
      -
      2407  const auto& net_receive = method_name("net_receive_kernel");
      -
      2408 
      -
      2409  print_kernel_data_present_annotation_block_begin();
      -
      2410 
      -
      2411  printer->add_line("NetReceiveBuffer_t* nrb = ml->_net_receive_buffer;");
      -
      2412  if (need_mech_inst) {
      -
      2413  printer->fmt_line("auto* const inst = static_cast<{0}*>(ml->instance);", instance_struct());
      -
      2414  }
      -
      2415  print_net_receive_loop_begin();
      -
      2416  printer->add_line("int start = nrb->_displ[i];");
      -
      2417  printer->add_line("int end = nrb->_displ[i+1];");
      -
      2418  printer->push_block("for (int j = start; j < end; j++)");
      -
      2419  printer->add_multi_line(R"CODE(
      -
      2420  int index = nrb->_nrb_index[j];
      -
      2421  int offset = nrb->_pnt_index[index];
      -
      2422  double t = nrb->_nrb_t[index];
      -
      2423  int weight_index = nrb->_weight_index[index];
      -
      2424  double flag = nrb->_nrb_flag[index];
      -
      2425  Point_process* point_process = nt->pntprocs + offset;
      -
      2426  )CODE");
      -
      2427  printer->add_line(net_receive, "(t, point_process, inst, nt, ml, weight_index, flag);");
      -
      2428  printer->pop_block();
      -
      2429  print_net_receive_loop_end();
      -
      2430 
      -
      2431  print_device_stream_wait();
      -
      2432  printer->add_line("nrb->_displ_cnt = 0;");
      -
      2433  printer->add_line("nrb->_cnt = 0;");
      -
      2434 
      -
      2435  if (info.net_send_used || info.net_event_used) {
      -
      2436  print_send_event_move();
      -
      2437  }
      -
      2438 
      -
      2439  print_kernel_data_present_annotation_block_end();
      -
      2440  printer->pop_block();
      -
      2441 }
      + +
      2398  if (!net_receive_required() || info.artificial_cell) {
      +
      2399  return;
      +
      2400  }
      +
      2401  printer->add_newline(2);
      +
      2402  printer->push_block(net_receive_buffering_declaration());
      +
      2403 
      +
      2404  print_get_memb_list();
      +
      2405 
      +
      2406  const auto& net_receive = method_name("net_receive_kernel");
      +
      2407 
      +
      2408  print_kernel_data_present_annotation_block_begin();
      +
      2409 
      +
      2410  printer->add_line("NetReceiveBuffer_t* nrb = ml->_net_receive_buffer;");
      +
      2411  if (need_mech_inst) {
      +
      2412  printer->fmt_line("auto* const inst = static_cast<{0}*>(ml->instance);", instance_struct());
      +
      2413  }
      +
      2414  print_net_receive_loop_begin();
      +
      2415  printer->add_line("int start = nrb->_displ[i];");
      +
      2416  printer->add_line("int end = nrb->_displ[i+1];");
      +
      2417  printer->push_block("for (int j = start; j < end; j++)");
      +
      2418  printer->add_multi_line(R"CODE(
      +
      2419  int index = nrb->_nrb_index[j];
      +
      2420  int offset = nrb->_pnt_index[index];
      +
      2421  double t = nrb->_nrb_t[index];
      +
      2422  int weight_index = nrb->_weight_index[index];
      +
      2423  double flag = nrb->_nrb_flag[index];
      +
      2424  Point_process* point_process = nt->pntprocs + offset;
      +
      2425  )CODE");
      +
      2426  printer->add_line(net_receive, "(t, point_process, inst, nt, ml, weight_index, flag);");
      +
      2427  printer->pop_block();
      +
      2428  print_net_receive_loop_end();
      +
      2429 
      +
      2430  print_device_stream_wait();
      +
      2431  printer->add_line("nrb->_displ_cnt = 0;");
      +
      2432  printer->add_line("nrb->_cnt = 0;");
      +
      2433 
      +
      2434  if (info.net_send_used || info.net_event_used) {
      +
      2435  print_send_event_move();
      +
      2436  }
      +
      2437 
      +
      2438  print_kernel_data_present_annotation_block_end();
      +
      2439  printer->pop_block();
      +
      2440 }
      +
      2441 
      2442 
      -
      2443 
      - -
      2445  printer->add_line("i = nsb->_cnt++;");
      -
      2446 }
      + +
      2444  printer->add_line("i = nsb->_cnt++;");
      +
      2445 }
      +
      2446 
      2447 
      -
      2448 
      - -
      2450  printer->push_block("if (i >= nsb->_size)");
      -
      2451  printer->add_line("nsb->grow();");
      -
      2452  printer->pop_block();
      -
      2453 }
      + +
      2449  printer->push_block("if (i >= nsb->_size)");
      +
      2450  printer->add_line("nsb->grow();");
      +
      2451  printer->pop_block();
      +
      2452 }
      +
      2453 
      2454 
      -
      2455 
      - -
      2457  if (!net_send_buffer_required()) {
      -
      2458  return;
      -
      2459  }
      -
      2460 
      -
      2461  printer->add_newline(2);
      -
      2462  auto args =
      -
      2463  "const NrnThread* nt, NetSendBuffer_t* nsb, int type, int vdata_index, "
      -
      2464  "int weight_index, int point_index, double t, double flag";
      -
      2465  printer->fmt_push_block("static inline void net_send_buffering({})", args);
      -
      2466  printer->add_line("int i = 0;");
      -
      2467  print_net_send_buffering_cnt_update();
      -
      2468  print_net_send_buffering_grow();
      -
      2469  printer->push_block("if (i < nsb->_size)");
      -
      2470  printer->add_multi_line(R"CODE(
      -
      2471  nsb->_sendtype[i] = type;
      -
      2472  nsb->_vdata_index[i] = vdata_index;
      -
      2473  nsb->_weight_index[i] = weight_index;
      -
      2474  nsb->_pnt_index[i] = point_index;
      -
      2475  nsb->_nsb_t[i] = t;
      -
      2476  nsb->_nsb_flag[i] = flag;
      -
      2477  )CODE");
      + +
      2456  if (!net_send_buffer_required()) {
      +
      2457  return;
      +
      2458  }
      +
      2459 
      +
      2460  printer->add_newline(2);
      +
      2461  auto args =
      +
      2462  "const NrnThread* nt, NetSendBuffer_t* nsb, int type, int vdata_index, "
      +
      2463  "int weight_index, int point_index, double t, double flag";
      +
      2464  printer->fmt_push_block("static inline void net_send_buffering({})", args);
      +
      2465  printer->add_line("int i = 0;");
      +
      2466  print_net_send_buffering_cnt_update();
      +
      2467  print_net_send_buffering_grow();
      +
      2468  printer->push_block("if (i < nsb->_size)");
      +
      2469  printer->add_multi_line(R"CODE(
      +
      2470  nsb->_sendtype[i] = type;
      +
      2471  nsb->_vdata_index[i] = vdata_index;
      +
      2472  nsb->_weight_index[i] = weight_index;
      +
      2473  nsb->_pnt_index[i] = point_index;
      +
      2474  nsb->_nsb_t[i] = t;
      +
      2475  nsb->_nsb_flag[i] = flag;
      +
      2476  )CODE");
      +
      2477  printer->pop_block();
      2478  printer->pop_block();
      -
      2479  printer->pop_block();
      -
      2480 }
      +
      2479 }
      +
      2480 
      2481 
      -
      2482 
      - -
      2484  if (!net_receive_required()) {
      -
      2485  return;
      -
      2486  }
      -
      2487 
      -
      2488  printing_net_receive = true;
      -
      2489  const auto node = info.net_receive_node;
      -
      2490 
      -
      2491  // rename net_receive arguments used in the block itself
      -
      2492  rename_net_receive_arguments(*info.net_receive_node, *node);
      -
      2493 
      -
      2494  std::string name;
      -
      2495  ParamVector params;
      -
      2496  if (!info.artificial_cell) {
      -
      2497  name = method_name("net_receive_kernel");
      -
      2498  params.emplace_back("", "double", "", "t");
      -
      2499  params.emplace_back("", "Point_process*", "", "pnt");
      -
      2500  params.emplace_back("", fmt::format("{}*", instance_struct()),
      -
      2501  "", "inst");
      -
      2502  params.emplace_back("", "NrnThread*", "", "nt");
      -
      2503  params.emplace_back("", "Memb_list*", "", "ml");
      -
      2504  params.emplace_back("", "int", "", "weight_index");
      -
      2505  params.emplace_back("", "double", "", "flag");
      -
      2506  } else {
      -
      2507  name = method_name("net_receive");
      -
      2508  params.emplace_back("", "Point_process*", "", "pnt");
      -
      2509  params.emplace_back("", "int", "", "weight_index");
      -
      2510  params.emplace_back("", "double", "", "flag");
      -
      2511  }
      -
      2512 
      -
      2513  printer->add_newline(2);
      -
      2514  printer->fmt_push_block("static inline void {}({})", name, get_parameter_str(params));
      -
      2515  print_net_receive_common_code(*node, info.artificial_cell);
      -
      2516  if (info.artificial_cell) {
      -
      2517  printer->add_line("double t = nt->_t;");
      -
      2518  }
      -
      2519 
      -
      2520  // set voltage variable if it is used in the block (e.g. for WATCH statement)
      -
      2521  auto v_used = VarUsageVisitor().variable_used(*node->get_statement_block(), "v");
      -
      2522  if (v_used) {
      -
      2523  printer->add_line("int node_id = ml->nodeindices[id];");
      -
      2524  printer->add_line("v = nt->_actual_v[node_id];");
      -
      2525  }
      -
      2526 
      -
      2527  printer->fmt_line("{} = t;", get_variable_name("tsave"));
      -
      2528 
      -
      2529  if (info.is_watch_used()) {
      -
      2530  printer->add_line("bool watch_remove = false;");
      -
      2531  }
      -
      2532 
      -
      2533  printer->add_indent();
      -
      2534  node->get_statement_block()->accept(*this);
      -
      2535  printer->add_newline();
      -
      2536  printer->pop_block();
      -
      2537 
      -
      2538  printing_net_receive = false;
      -
      2539 }
      + +
      2483  if (!net_receive_required()) {
      +
      2484  return;
      +
      2485  }
      +
      2486 
      +
      2487  printing_net_receive = true;
      +
      2488  const auto node = info.net_receive_node;
      +
      2489 
      +
      2490  // rename net_receive arguments used in the block itself
      +
      2491  rename_net_receive_arguments(*info.net_receive_node, *node);
      +
      2492 
      +
      2493  std::string name;
      +
      2494  ParamVector params;
      +
      2495  if (!info.artificial_cell) {
      +
      2496  name = method_name("net_receive_kernel");
      +
      2497  params.emplace_back("", "double", "", "t");
      +
      2498  params.emplace_back("", "Point_process*", "", "pnt");
      +
      2499  params.emplace_back("", fmt::format("{}*", instance_struct()),
      +
      2500  "", "inst");
      +
      2501  params.emplace_back("", "NrnThread*", "", "nt");
      +
      2502  params.emplace_back("", "Memb_list*", "", "ml");
      +
      2503  params.emplace_back("", "int", "", "weight_index");
      +
      2504  params.emplace_back("", "double", "", "flag");
      +
      2505  } else {
      +
      2506  name = method_name("net_receive");
      +
      2507  params.emplace_back("", "Point_process*", "", "pnt");
      +
      2508  params.emplace_back("", "int", "", "weight_index");
      +
      2509  params.emplace_back("", "double", "", "flag");
      +
      2510  }
      +
      2511 
      +
      2512  printer->add_newline(2);
      +
      2513  printer->fmt_push_block("static inline void {}({})", name, get_parameter_str(params));
      +
      2514  print_net_receive_common_code(*node, info.artificial_cell);
      +
      2515  if (info.artificial_cell) {
      +
      2516  printer->add_line("double t = nt->_t;");
      +
      2517  }
      +
      2518 
      +
      2519  // set voltage variable if it is used in the block (e.g. for WATCH statement)
      +
      2520  auto v_used = VarUsageVisitor().variable_used(*node->get_statement_block(), "v");
      +
      2521  if (v_used) {
      +
      2522  printer->add_line("int node_id = ml->nodeindices[id];");
      +
      2523  printer->add_line("v = nt->_actual_v[node_id];");
      +
      2524  }
      +
      2525 
      +
      2526  printer->fmt_line("{} = t;", get_variable_name("tsave"));
      +
      2527 
      +
      2528  if (info.is_watch_used()) {
      +
      2529  printer->add_line("bool watch_remove = false;");
      +
      2530  }
      +
      2531 
      +
      2532  printer->add_indent();
      +
      2533  node->get_statement_block()->accept(*this);
      +
      2534  printer->add_newline();
      +
      2535  printer->pop_block();
      +
      2536 
      +
      2537  printing_net_receive = false;
      +
      2538 }
      +
      2539 
      2540 
      -
      2541 
      - -
      2543  if (!net_receive_required()) {
      -
      2544  return;
      -
      2545  }
      -
      2546 
      -
      2547  printing_net_receive = true;
      -
      2548  if (!info.artificial_cell) {
      -
      2549  const auto& name = method_name("net_receive");
      -
      2550  ParamVector params = {
      -
      2551  {"", "Point_process*", "", "pnt"},
      -
      2552  {"", "int", "", "weight_index"},
      -
      2553  {"", "double", "", "flag"}};
      -
      2554  printer->add_newline(2);
      -
      2555  printer->fmt_push_block("static void {}({})", name, get_parameter_str(params));
      -
      2556  printer->add_line("NrnThread* nt = nrn_threads + pnt->_tid;");
      -
      2557  printer->add_line("Memb_list* ml = get_memb_list(nt);");
      -
      2558  printer->add_line("NetReceiveBuffer_t* nrb = ml->_net_receive_buffer;");
      -
      2559  printer->push_block("if (nrb->_cnt >= nrb->_size)");
      -
      2560  printer->add_line("realloc_net_receive_buffer(nt, ml);");
      -
      2561  printer->pop_block();
      -
      2562  printer->add_multi_line(R"CODE(
      -
      2563  int id = nrb->_cnt;
      -
      2564  nrb->_pnt_index[id] = pnt-nt->pntprocs;
      -
      2565  nrb->_weight_index[id] = weight_index;
      -
      2566  nrb->_nrb_t[id] = nt->_t;
      -
      2567  nrb->_nrb_flag[id] = flag;
      -
      2568  nrb->_cnt++;
      -
      2569  )CODE");
      -
      2570  printer->pop_block();
      -
      2571  }
      -
      2572  printing_net_receive = false;
      -
      2573 }
      -
      2574 
      -
      2575 
      -
      2576 /**
      -
      2577  * \todo Data is not derived. Need to add instance into instance struct?
      -
      2578  * data used here is wrong in AoS because as in original implementation,
      -
      2579  * data is not incremented every iteration for AoS. May be better to derive
      -
      2580  * actual variable names? [resolved now?]
      -
      2581  * slist needs to added as local variable
      -
      2582  */
      - -
      2584  auto ext_args = external_method_arguments();
      -
      2585  auto ext_params = get_parameter_str(external_method_parameters());
      -
      2586  auto suffix = info.mod_suffix;
      -
      2587  auto list_num = info.derivimplicit_list_num;
      -
      2588  auto block_name = block.get_node_name();
      -
      2589  auto primes_size = info.primes_size;
      -
      2590  auto stride = "*pnodecount+id";
      -
      2591 
      -
      2592  printer->add_newline(2);
      -
      2593 
      -
      2594  printer->push_block("namespace");
      -
      2595  printer->fmt_push_block("struct _newton_{}_{}", block_name, info.mod_suffix);
      -
      2596  printer->fmt_push_block("int operator()({}) const", get_parameter_str(external_method_parameters()));
      -
      2597  auto const instance = fmt::format("auto* const inst = static_cast<{0}*>(ml->instance);",
      -
      2598  instance_struct());
      -
      2599  auto const slist1 = fmt::format("auto const& slist{} = {};",
      -
      2600  list_num,
      -
      2601  get_variable_name(fmt::format("slist{}", list_num)));
      -
      2602  auto const slist2 = fmt::format("auto& slist{} = {};",
      -
      2603  list_num + 1,
      -
      2604  get_variable_name(fmt::format("slist{}", list_num + 1)));
      -
      2605  auto const dlist1 = fmt::format("auto const& dlist{} = {};",
      -
      2606  list_num,
      -
      2607  get_variable_name(fmt::format("dlist{}", list_num)));
      -
      2608  auto const dlist2 = fmt::format(
      -
      2609  "double* dlist{} = static_cast<double*>(thread[dith{}()].pval) + ({}*pnodecount);",
      -
      2610  list_num + 1,
      -
      2611  list_num,
      -
      2612  info.primes_size);
      -
      2613  printer->add_line(instance);
      -
      2614  if (ion_variable_struct_required()) {
      -
      2615  print_ion_variable();
      -
      2616  }
      -
      2617  printer->fmt_line("double* savstate{} = static_cast<double*>(thread[dith{}()].pval);",
      -
      2618  list_num,
      -
      2619  list_num);
      -
      2620  printer->add_line(slist1);
      -
      2621  printer->add_line(dlist1);
      -
      2622  printer->add_line(dlist2);
      -
      2623 
      -
      2624  print_statement_block(*block.get_statement_block(), false, false);
      -
      2625 
      -
      2626  printer->add_line("int counter = -1;");
      -
      2627  printer->fmt_push_block("for (int i=0; i<{}; i++)", info.num_primes);
      -
      2628  printer->fmt_push_block("if (*deriv{}_advance(thread))", list_num);
      -
      2629  printer->fmt_line(
      -
      2630  "dlist{0}[(++counter){1}] = "
      -
      2631  "data[dlist{2}[i]{1}]-(data[slist{2}[i]{1}]-savstate{2}[i{1}])/nt->_dt;",
      -
      2632  list_num + 1,
      -
      2633  stride,
      -
      2634  list_num);
      -
      2635  printer->chain_block("else");
      -
      2636  printer->fmt_line("dlist{0}[(++counter){1}] = data[slist{2}[i]{1}]-savstate{2}[i{1}];",
      -
      2637  list_num + 1,
      -
      2638  stride,
      -
      2639  list_num);
      + +
      2542  if (!net_receive_required()) {
      +
      2543  return;
      +
      2544  }
      +
      2545 
      +
      2546  printing_net_receive = true;
      +
      2547  if (!info.artificial_cell) {
      +
      2548  const auto& name = method_name("net_receive");
      +
      2549  ParamVector params = {
      +
      2550  {"", "Point_process*", "", "pnt"},
      +
      2551  {"", "int", "", "weight_index"},
      +
      2552  {"", "double", "", "flag"}};
      +
      2553  printer->add_newline(2);
      +
      2554  printer->fmt_push_block("static void {}({})", name, get_parameter_str(params));
      +
      2555  printer->add_line("NrnThread* nt = nrn_threads + pnt->_tid;");
      +
      2556  printer->add_line("Memb_list* ml = get_memb_list(nt);");
      +
      2557  printer->add_line("NetReceiveBuffer_t* nrb = ml->_net_receive_buffer;");
      +
      2558  printer->push_block("if (nrb->_cnt >= nrb->_size)");
      +
      2559  printer->add_line("realloc_net_receive_buffer(nt, ml);");
      +
      2560  printer->pop_block();
      +
      2561  printer->add_multi_line(R"CODE(
      +
      2562  int id = nrb->_cnt;
      +
      2563  nrb->_pnt_index[id] = pnt-nt->pntprocs;
      +
      2564  nrb->_weight_index[id] = weight_index;
      +
      2565  nrb->_nrb_t[id] = nt->_t;
      +
      2566  nrb->_nrb_flag[id] = flag;
      +
      2567  nrb->_cnt++;
      +
      2568  )CODE");
      +
      2569  printer->pop_block();
      +
      2570  }
      +
      2571  printing_net_receive = false;
      +
      2572 }
      +
      2573 
      +
      2574 
      +
      2575 /**
      +
      2576  * \todo Data is not derived. Need to add instance into instance struct?
      +
      2577  * data used here is wrong in AoS because as in original implementation,
      +
      2578  * data is not incremented every iteration for AoS. May be better to derive
      +
      2579  * actual variable names? [resolved now?]
      +
      2580  * slist needs to added as local variable
      +
      2581  */
      + +
      2583  auto ext_args = external_method_arguments();
      +
      2584  auto ext_params = get_parameter_str(external_method_parameters());
      +
      2585  auto suffix = info.mod_suffix;
      +
      2586  auto list_num = info.derivimplicit_list_num;
      +
      2587  auto block_name = block.get_node_name();
      +
      2588  auto primes_size = info.primes_size;
      +
      2589  auto stride = "*pnodecount+id";
      +
      2590 
      +
      2591  printer->add_newline(2);
      +
      2592 
      +
      2593  printer->push_block("namespace");
      +
      2594  printer->fmt_push_block("struct _newton_{}_{}", block_name, info.mod_suffix);
      +
      2595  printer->fmt_push_block("int operator()({}) const", get_parameter_str(external_method_parameters()));
      +
      2596  auto const instance = fmt::format("auto* const inst = static_cast<{0}*>(ml->instance);",
      +
      2597  instance_struct());
      +
      2598  auto const slist1 = fmt::format("auto const& slist{} = {};",
      +
      2599  list_num,
      +
      2600  get_variable_name(fmt::format("slist{}", list_num)));
      +
      2601  auto const slist2 = fmt::format("auto& slist{} = {};",
      +
      2602  list_num + 1,
      +
      2603  get_variable_name(fmt::format("slist{}", list_num + 1)));
      +
      2604  auto const dlist1 = fmt::format("auto const& dlist{} = {};",
      +
      2605  list_num,
      +
      2606  get_variable_name(fmt::format("dlist{}", list_num)));
      +
      2607  auto const dlist2 = fmt::format(
      +
      2608  "double* dlist{} = static_cast<double*>(thread[dith{}()].pval) + ({}*pnodecount);",
      +
      2609  list_num + 1,
      +
      2610  list_num,
      +
      2611  info.primes_size);
      +
      2612  printer->add_line(instance);
      +
      2613  if (ion_variable_struct_required()) {
      +
      2614  print_ion_variable();
      +
      2615  }
      +
      2616  printer->fmt_line("double* savstate{} = static_cast<double*>(thread[dith{}()].pval);",
      +
      2617  list_num,
      +
      2618  list_num);
      +
      2619  printer->add_line(slist1);
      +
      2620  printer->add_line(dlist1);
      +
      2621  printer->add_line(dlist2);
      +
      2622 
      +
      2623  print_statement_block(*block.get_statement_block(), false, false);
      +
      2624 
      +
      2625  printer->add_line("int counter = -1;");
      +
      2626  printer->fmt_push_block("for (int i=0; i<{}; i++)", info.num_primes);
      +
      2627  printer->fmt_push_block("if (*deriv{}_advance(thread))", list_num);
      +
      2628  printer->fmt_line(
      +
      2629  "dlist{0}[(++counter){1}] = "
      +
      2630  "data[dlist{2}[i]{1}]-(data[slist{2}[i]{1}]-savstate{2}[i{1}])/nt->_dt;",
      +
      2631  list_num + 1,
      +
      2632  stride,
      +
      2633  list_num);
      +
      2634  printer->chain_block("else");
      +
      2635  printer->fmt_line("dlist{0}[(++counter){1}] = data[slist{2}[i]{1}]-savstate{2}[i{1}];",
      +
      2636  list_num + 1,
      +
      2637  stride,
      +
      2638  list_num);
      +
      2639  printer->pop_block();
      2640  printer->pop_block();
      -
      2641  printer->pop_block();
      -
      2642  printer->add_line("return 0;");
      -
      2643  printer->pop_block(); // operator()
      -
      2644  printer->pop_block(";"); // struct
      -
      2645  printer->pop_block(); // namespace
      -
      2646  printer->add_newline();
      -
      2647  printer->fmt_push_block("int {}_{}({})", block_name, suffix, ext_params);
      -
      2648  printer->add_line(instance);
      -
      2649  printer->fmt_line("double* savstate{} = (double*) thread[dith{}()].pval;", list_num, list_num);
      -
      2650  printer->add_line(slist1);
      -
      2651  printer->add_line(slist2);
      -
      2652  printer->add_line(dlist2);
      -
      2653  printer->fmt_push_block("for (int i=0; i<{}; i++)", info.num_primes);
      -
      2654  printer->fmt_line("savstate{}[i{}] = data[slist{}[i]{}];", list_num, stride, list_num, stride);
      -
      2655  printer->pop_block();
      -
      2656  printer->fmt_line(
      -
      2657  "int reset = nrn_newton_thread(static_cast<NewtonSpace*>(*newtonspace{}(thread)), {}, "
      -
      2658  "slist{}, _newton_{}_{}{{}}, dlist{}, {});",
      -
      2659  list_num,
      -
      2660  primes_size,
      -
      2661  list_num + 1,
      -
      2662  block_name,
      -
      2663  suffix,
      -
      2664  list_num + 1,
      -
      2665  ext_args);
      -
      2666  printer->add_line("return reset;");
      -
      2667  printer->pop_block();
      -
      2668  printer->add_newline(2);
      -
      2669 }
      +
      2641  printer->add_line("return 0;");
      +
      2642  printer->pop_block(); // operator()
      +
      2643  printer->pop_block(";"); // struct
      +
      2644  printer->pop_block(); // namespace
      +
      2645  printer->add_newline();
      +
      2646  printer->fmt_push_block("int {}_{}({})", block_name, suffix, ext_params);
      +
      2647  printer->add_line(instance);
      +
      2648  printer->fmt_line("double* savstate{} = (double*) thread[dith{}()].pval;", list_num, list_num);
      +
      2649  printer->add_line(slist1);
      +
      2650  printer->add_line(slist2);
      +
      2651  printer->add_line(dlist2);
      +
      2652  printer->fmt_push_block("for (int i=0; i<{}; i++)", info.num_primes);
      +
      2653  printer->fmt_line("savstate{}[i{}] = data[slist{}[i]{}];", list_num, stride, list_num, stride);
      +
      2654  printer->pop_block();
      +
      2655  printer->fmt_line(
      +
      2656  "int reset = nrn_newton_thread(static_cast<NewtonSpace*>(*newtonspace{}(thread)), {}, "
      +
      2657  "slist{}, _newton_{}_{}{{}}, dlist{}, {});",
      +
      2658  list_num,
      +
      2659  primes_size,
      +
      2660  list_num + 1,
      +
      2661  block_name,
      +
      2662  suffix,
      +
      2663  list_num + 1,
      +
      2664  ext_args);
      +
      2665  printer->add_line("return reset;");
      +
      2666  printer->pop_block();
      +
      2667  printer->add_newline(2);
      +
      2668 }
      +
      2669 
      2670 
      -
      2671 
      - -
      2673  // nothing to do on cpu
      -
      2674 }
      + +
      2672  // nothing to do on cpu
      +
      2673 }
      +
      2674 
      2675 
      -
      2676 
      -
      2677 /****************************************************************************************/
      -
      2678 /* Print nrn_state routine */
      -
      2679 /****************************************************************************************/
      +
      2676 /****************************************************************************************/
      +
      2677 /* Print nrn_state routine */
      +
      2678 /****************************************************************************************/
      +
      2679 
      2680 
      -
      2681 
      - -
      2683  if (!nrn_state_required()) {
      -
      2684  return;
      -
      2685  }
      -
      2686 
      -
      2687  printer->add_newline(2);
      -
      2688  printer->add_line("/** update state */");
      -
      2689  print_global_function_common_code(BlockType::State);
      -
      2690  print_channel_iteration_block_parallel_hint(BlockType::State, info.nrn_state_block);
      -
      2691  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      2692 
      -
      2693  printer->add_line("int node_id = node_index[id];");
      -
      2694  printer->add_line("double v = voltage[node_id];");
      -
      2695  print_v_unused();
      -
      2696 
      -
      2697  /**
      -
      2698  * \todo Eigen solver node also emits IonCurVar variable in the functor
      -
      2699  * but that shouldn't update ions in derivative block
      -
      2700  */
      -
      2701  if (ion_variable_struct_required()) {
      -
      2702  print_ion_variable();
      -
      2703  }
      -
      2704 
      -
      2705  auto read_statements = ion_read_statements(BlockType::State);
      -
      2706  for (auto& statement: read_statements) {
      -
      2707  printer->add_line(statement);
      -
      2708  }
      -
      2709 
      -
      2710  if (info.nrn_state_block) {
      -
      2711  info.nrn_state_block->visit_children(*this);
      -
      2712  }
      -
      2713 
      -
      2714  if (info.currents.empty() && info.breakpoint_node != nullptr) {
      -
      2715  auto block = info.breakpoint_node->get_statement_block();
      -
      2716  print_statement_block(*block, false, false);
      -
      2717  }
      -
      2718 
      -
      2719  const auto& write_statements = ion_write_statements(BlockType::State);
      -
      2720  for (auto& statement: write_statements) {
      -
      2721  const auto& text = process_shadow_update_statement(statement, BlockType::State);
      -
      2722  printer->add_line(text);
      -
      2723  }
      -
      2724  printer->pop_block();
      -
      2725 
      -
      2726  print_kernel_data_present_annotation_block_end();
      -
      2727 
      -
      2728  printer->pop_block();
      -
      2729 }
      + +
      2682  if (!nrn_state_required()) {
      +
      2683  return;
      +
      2684  }
      +
      2685 
      +
      2686  printer->add_newline(2);
      +
      2687  printer->add_line("/** update state */");
      +
      2688  print_global_function_common_code(BlockType::State);
      +
      2689  print_channel_iteration_block_parallel_hint(BlockType::State, info.nrn_state_block);
      +
      2690  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      2691 
      +
      2692  printer->add_line("int node_id = node_index[id];");
      +
      2693  printer->add_line("double v = voltage[node_id];");
      +
      2694  print_v_unused();
      +
      2695 
      +
      2696  /**
      +
      2697  * \todo Eigen solver node also emits IonCurVar variable in the functor
      +
      2698  * but that shouldn't update ions in derivative block
      +
      2699  */
      +
      2700  if (ion_variable_struct_required()) {
      +
      2701  print_ion_variable();
      +
      2702  }
      +
      2703 
      +
      2704  auto read_statements = ion_read_statements(BlockType::State);
      +
      2705  for (auto& statement: read_statements) {
      +
      2706  printer->add_line(statement);
      +
      2707  }
      +
      2708 
      +
      2709  if (info.nrn_state_block) {
      +
      2710  info.nrn_state_block->visit_children(*this);
      +
      2711  }
      +
      2712 
      +
      2713  if (info.currents.empty() && info.breakpoint_node != nullptr) {
      +
      2714  auto block = info.breakpoint_node->get_statement_block();
      +
      2715  print_statement_block(*block, false, false);
      +
      2716  }
      +
      2717 
      +
      2718  const auto& write_statements = ion_write_statements(BlockType::State);
      +
      2719  for (auto& statement: write_statements) {
      +
      2720  const auto& text = process_shadow_update_statement(statement, BlockType::State);
      +
      2721  printer->add_line(text);
      +
      2722  }
      +
      2723  printer->pop_block();
      +
      2724 
      +
      2725  print_kernel_data_present_annotation_block_end();
      +
      2726 
      +
      2727  printer->pop_block();
      +
      2728 }
      +
      2729 
      2730 
      -
      2731 
      -
      2732 /****************************************************************************************/
      -
      2733 /* Print nrn_cur related routines */
      -
      2734 /****************************************************************************************/
      +
      2731 /****************************************************************************************/
      +
      2732 /* Print nrn_cur related routines */
      +
      2733 /****************************************************************************************/
      +
      2734 
      2735 
      -
      2736 
      - -
      2738  const auto& args = internal_method_parameters();
      -
      2739  const auto& block = node.get_statement_block();
      -
      2740  printer->add_newline(2);
      -
      2741  printer->fmt_push_block("inline double nrn_current_{}({})",
      -
      2742  info.mod_suffix,
      -
      2743  get_parameter_str(args));
      -
      2744  printer->add_line("double current = 0.0;");
      -
      2745  print_statement_block(*block, false, false);
      -
      2746  for (auto& current: info.currents) {
      -
      2747  const auto& name = get_variable_name(current);
      -
      2748  printer->fmt_line("current += {};", name);
      -
      2749  }
      -
      2750  printer->add_line("return current;");
      -
      2751  printer->pop_block();
      -
      2752 }
      + +
      2737  const auto& args = internal_method_parameters();
      +
      2738  const auto& block = node.get_statement_block();
      +
      2739  printer->add_newline(2);
      +
      2740  printer->fmt_push_block("inline double nrn_current_{}({})",
      +
      2741  info.mod_suffix,
      +
      2742  get_parameter_str(args));
      +
      2743  printer->add_line("double current = 0.0;");
      +
      2744  print_statement_block(*block, false, false);
      +
      2745  for (auto& current: info.currents) {
      +
      2746  const auto& name = get_variable_name(current);
      +
      2747  printer->fmt_line("current += {};", name);
      +
      2748  }
      +
      2749  printer->add_line("return current;");
      +
      2750  printer->pop_block();
      +
      2751 }
      +
      2752 
      2753 
      -
      2754 
      - -
      2756  const auto& block = node.get_statement_block();
      -
      2757  print_statement_block(*block, false, false);
      -
      2758  if (!info.currents.empty()) {
      -
      2759  std::string sum;
      -
      2760  for (const auto& current: info.currents) {
      -
      2761  auto var = breakpoint_current(current);
      -
      2762  sum += get_variable_name(var);
      -
      2763  if (&current != &info.currents.back()) {
      -
      2764  sum += "+";
      -
      2765  }
      -
      2766  }
      -
      2767  printer->fmt_line("double rhs = {};", sum);
      -
      2768  }
      -
      2769 
      -
      2770  std::string sum;
      -
      2771  for (const auto& conductance: info.conductances) {
      -
      2772  auto var = breakpoint_current(conductance.variable);
      -
      2773  sum += get_variable_name(var);
      -
      2774  if (&conductance != &info.conductances.back()) {
      -
      2775  sum += "+";
      -
      2776  }
      -
      2777  }
      -
      2778  printer->fmt_line("double g = {};", sum);
      -
      2779 
      -
      2780  for (const auto& conductance: info.conductances) {
      -
      2781  if (!conductance.ion.empty()) {
      -
      2782  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + conductance.ion + "dv";
      -
      2783  const auto& rhs = get_variable_name(conductance.variable);
      -
      2784  const ShadowUseStatement statement{lhs, "+=", rhs};
      -
      2785  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      -
      2786  printer->add_line(text);
      -
      2787  }
      -
      2788  }
      -
      2789 }
      + +
      2755  const auto& block = node.get_statement_block();
      +
      2756  print_statement_block(*block, false, false);
      +
      2757  if (!info.currents.empty()) {
      +
      2758  std::string sum;
      +
      2759  for (const auto& current: info.currents) {
      +
      2760  auto var = breakpoint_current(current);
      +
      2761  sum += get_variable_name(var);
      +
      2762  if (&current != &info.currents.back()) {
      +
      2763  sum += "+";
      +
      2764  }
      +
      2765  }
      +
      2766  printer->fmt_line("double rhs = {};", sum);
      +
      2767  }
      +
      2768 
      +
      2769  std::string sum;
      +
      2770  for (const auto& conductance: info.conductances) {
      +
      2771  auto var = breakpoint_current(conductance.variable);
      +
      2772  sum += get_variable_name(var);
      +
      2773  if (&conductance != &info.conductances.back()) {
      +
      2774  sum += "+";
      +
      2775  }
      +
      2776  }
      +
      2777  printer->fmt_line("double g = {};", sum);
      +
      2778 
      +
      2779  for (const auto& conductance: info.conductances) {
      +
      2780  if (!conductance.ion.empty()) {
      +
      2781  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + conductance.ion + "dv";
      +
      2782  const auto& rhs = get_variable_name(conductance.variable);
      +
      2783  const ShadowUseStatement statement{lhs, "+=", rhs};
      +
      2784  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      +
      2785  printer->add_line(text);
      +
      2786  }
      +
      2787  }
      +
      2788 }
      +
      2789 
      2790 
      -
      2791 
      - -
      2793  printer->fmt_line("double g = nrn_current_{}({}+0.001);",
      -
      2794  info.mod_suffix,
      -
      2795  internal_method_arguments());
      -
      2796  for (auto& ion: info.ions) {
      -
      2797  for (auto& var: ion.writes) {
      -
      2798  if (ion.is_ionic_current(var)) {
      -
      2799  const auto& name = get_variable_name(var);
      -
      2800  printer->fmt_line("double di{} = {};", ion.name, name);
      -
      2801  }
      -
      2802  }
      -
      2803  }
      -
      2804  printer->fmt_line("double rhs = nrn_current_{}({});",
      -
      2805  info.mod_suffix,
      -
      2806  internal_method_arguments());
      -
      2807  printer->add_line("g = (g-rhs)/0.001;");
      -
      2808  for (auto& ion: info.ions) {
      -
      2809  for (auto& var: ion.writes) {
      -
      2810  if (ion.is_ionic_current(var)) {
      -
      2811  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + ion.name + "dv";
      -
      2812  auto rhs = fmt::format("(di{}-{})/0.001", ion.name, get_variable_name(var));
      -
      2813  if (info.point_process) {
      -
      2814  auto area = get_variable_name(naming::NODE_AREA_VARIABLE);
      -
      2815  rhs += fmt::format("*1.e2/{}", area);
      -
      2816  }
      -
      2817  const ShadowUseStatement statement{lhs, "+=", rhs};
      -
      2818  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      -
      2819  printer->add_line(text);
      -
      2820  }
      -
      2821  }
      -
      2822  }
      -
      2823 }
      + +
      2792  printer->fmt_line("double g = nrn_current_{}({}+0.001);",
      +
      2793  info.mod_suffix,
      +
      2794  internal_method_arguments());
      +
      2795  for (auto& ion: info.ions) {
      +
      2796  for (auto& var: ion.writes) {
      +
      2797  if (ion.is_ionic_current(var)) {
      +
      2798  const auto& name = get_variable_name(var);
      +
      2799  printer->fmt_line("double di{} = {};", ion.name, name);
      +
      2800  }
      +
      2801  }
      +
      2802  }
      +
      2803  printer->fmt_line("double rhs = nrn_current_{}({});",
      +
      2804  info.mod_suffix,
      +
      2805  internal_method_arguments());
      +
      2806  printer->add_line("g = (g-rhs)/0.001;");
      +
      2807  for (auto& ion: info.ions) {
      +
      2808  for (auto& var: ion.writes) {
      +
      2809  if (ion.is_ionic_current(var)) {
      +
      2810  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + ion.name + "dv";
      +
      2811  auto rhs = fmt::format("(di{}-{})/0.001", ion.name, get_variable_name(var));
      +
      2812  if (info.point_process) {
      +
      2813  auto area = get_variable_name(naming::NODE_AREA_VARIABLE);
      +
      2814  rhs += fmt::format("*1.e2/{}", area);
      +
      2815  }
      +
      2816  const ShadowUseStatement statement{lhs, "+=", rhs};
      +
      2817  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      +
      2818  printer->add_line(text);
      +
      2819  }
      +
      2820  }
      +
      2821  }
      +
      2822 }
      +
      2823 
      2824 
      -
      2825 
      - -
      2827  printer->add_line("int node_id = node_index[id];");
      -
      2828  printer->add_line("double v = voltage[node_id];");
      -
      2829  print_v_unused();
      -
      2830  if (ion_variable_struct_required()) {
      -
      2831  print_ion_variable();
      -
      2832  }
      -
      2833 
      -
      2834  const auto& read_statements = ion_read_statements(BlockType::Equation);
      -
      2835  for (auto& statement: read_statements) {
      -
      2836  printer->add_line(statement);
      -
      2837  }
      -
      2838 
      -
      2839  if (info.conductances.empty()) {
      -
      2840  print_nrn_cur_non_conductance_kernel();
      -
      2841  } else {
      -
      2842  print_nrn_cur_conductance_kernel(node);
      -
      2843  }
      -
      2844 
      -
      2845  const auto& write_statements = ion_write_statements(BlockType::Equation);
      -
      2846  for (auto& statement: write_statements) {
      -
      2847  auto text = process_shadow_update_statement(statement, BlockType::Equation);
      -
      2848  printer->add_line(text);
      -
      2849  }
      -
      2850 
      -
      2851  if (info.point_process) {
      -
      2852  const auto& area = get_variable_name(naming::NODE_AREA_VARIABLE);
      -
      2853  printer->fmt_line("double mfactor = 1.e2/{};", area);
      -
      2854  printer->add_line("g = g*mfactor;");
      -
      2855  printer->add_line("rhs = rhs*mfactor;");
      -
      2856  }
      -
      2857 
      -
      2858  print_g_unused();
      -
      2859 }
      + +
      2826  printer->add_line("int node_id = node_index[id];");
      +
      2827  printer->add_line("double v = voltage[node_id];");
      +
      2828  print_v_unused();
      +
      2829  if (ion_variable_struct_required()) {
      +
      2830  print_ion_variable();
      +
      2831  }
      +
      2832 
      +
      2833  const auto& read_statements = ion_read_statements(BlockType::Equation);
      +
      2834  for (auto& statement: read_statements) {
      +
      2835  printer->add_line(statement);
      +
      2836  }
      +
      2837 
      +
      2838  if (info.conductances.empty()) {
      +
      2839  print_nrn_cur_non_conductance_kernel();
      +
      2840  } else {
      +
      2841  print_nrn_cur_conductance_kernel(node);
      +
      2842  }
      +
      2843 
      +
      2844  const auto& write_statements = ion_write_statements(BlockType::Equation);
      +
      2845  for (auto& statement: write_statements) {
      +
      2846  auto text = process_shadow_update_statement(statement, BlockType::Equation);
      +
      2847  printer->add_line(text);
      +
      2848  }
      +
      2849 
      +
      2850  if (info.point_process) {
      +
      2851  const auto& area = get_variable_name(naming::NODE_AREA_VARIABLE);
      +
      2852  printer->fmt_line("double mfactor = 1.e2/{};", area);
      +
      2853  printer->add_line("g = g*mfactor;");
      +
      2854  printer->add_line("rhs = rhs*mfactor;");
      +
      2855  }
      +
      2856 
      +
      2857  print_g_unused();
      +
      2858 }
      +
      2859 
      2860 
      -
      2861 
      - -
      2863  if (!info.electrode_current) {
      -
      2864  return;
      -
      2865  }
      -
      2866  std::string rhs, d;
      -
      2867  auto rhs_op = operator_for_rhs();
      -
      2868  auto d_op = operator_for_d();
      -
      2869  if (info.point_process) {
      -
      2870  rhs = "shadow_rhs[id]";
      -
      2871  d = "shadow_d[id]";
      -
      2872  } else {
      -
      2873  rhs = "rhs";
      -
      2874  d = "g";
      -
      2875  }
      -
      2876 
      -
      2877  printer->push_block("if (nt->nrn_fast_imem)");
      -
      2878  if (nrn_cur_reduction_loop_required()) {
      -
      2879  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      2880  printer->add_line("int node_id = node_index[id];");
      -
      2881  }
      -
      2882  printer->fmt_line("nt->nrn_fast_imem->nrn_sav_rhs[node_id] {} {};", rhs_op, rhs);
      -
      2883  printer->fmt_line("nt->nrn_fast_imem->nrn_sav_d[node_id] {} {};", d_op, d);
      -
      2884  if (nrn_cur_reduction_loop_required()) {
      -
      2885  printer->pop_block();
      -
      2886  }
      -
      2887  printer->pop_block();
      -
      2888 }
      + +
      2862  if (!info.electrode_current) {
      +
      2863  return;
      +
      2864  }
      +
      2865  std::string rhs, d;
      +
      2866  auto rhs_op = operator_for_rhs();
      +
      2867  auto d_op = operator_for_d();
      +
      2868  if (info.point_process) {
      +
      2869  rhs = "shadow_rhs[id]";
      +
      2870  d = "shadow_d[id]";
      +
      2871  } else {
      +
      2872  rhs = "rhs";
      +
      2873  d = "g";
      +
      2874  }
      +
      2875 
      +
      2876  printer->push_block("if (nt->nrn_fast_imem)");
      +
      2877  if (nrn_cur_reduction_loop_required()) {
      +
      2878  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      2879  printer->add_line("int node_id = node_index[id];");
      +
      2880  }
      +
      2881  printer->fmt_line("nt->nrn_fast_imem->nrn_sav_rhs[node_id] {} {};", rhs_op, rhs);
      +
      2882  printer->fmt_line("nt->nrn_fast_imem->nrn_sav_d[node_id] {} {};", d_op, d);
      +
      2883  if (nrn_cur_reduction_loop_required()) {
      +
      2884  printer->pop_block();
      +
      2885  }
      +
      2886  printer->pop_block();
      +
      2887 }
      +
      2888 
      2889 
      -
      2890 
      - -
      2892  if (!nrn_cur_required()) {
      -
      2893  return;
      -
      2894  }
      -
      2895 
      -
      2896  if (info.conductances.empty()) {
      -
      2897  print_nrn_current(*info.breakpoint_node);
      -
      2898  }
      -
      2899 
      -
      2900  printer->add_newline(2);
      -
      2901  printer->add_line("/** update current */");
      -
      2902  print_global_function_common_code(BlockType::Equation);
      -
      2903  print_channel_iteration_block_parallel_hint(BlockType::Equation, info.breakpoint_node);
      -
      2904  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      2905  print_nrn_cur_kernel(*info.breakpoint_node);
      -
      2906  print_nrn_cur_matrix_shadow_update();
      -
      2907  if (!nrn_cur_reduction_loop_required()) {
      -
      2908  print_fast_imem_calculation();
      -
      2909  }
      -
      2910  printer->pop_block();
      -
      2911 
      -
      2912  if (nrn_cur_reduction_loop_required()) {
      -
      2913  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      2914  print_nrn_cur_matrix_shadow_reduction();
      -
      2915  printer->pop_block();
      -
      2916  print_fast_imem_calculation();
      -
      2917  }
      -
      2918 
      -
      2919  print_kernel_data_present_annotation_block_end();
      -
      2920  printer->pop_block();
      -
      2921 }
      + +
      2891  if (!nrn_cur_required()) {
      +
      2892  return;
      +
      2893  }
      +
      2894 
      +
      2895  if (info.conductances.empty()) {
      +
      2896  print_nrn_current(*info.breakpoint_node);
      +
      2897  }
      +
      2898 
      +
      2899  printer->add_newline(2);
      +
      2900  printer->add_line("/** update current */");
      +
      2901  print_global_function_common_code(BlockType::Equation);
      +
      2902  print_channel_iteration_block_parallel_hint(BlockType::Equation, info.breakpoint_node);
      +
      2903  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      2904  print_nrn_cur_kernel(*info.breakpoint_node);
      +
      2905  print_nrn_cur_matrix_shadow_update();
      +
      2906  if (!nrn_cur_reduction_loop_required()) {
      +
      2907  print_fast_imem_calculation();
      +
      2908  }
      +
      2909  printer->pop_block();
      +
      2910 
      +
      2911  if (nrn_cur_reduction_loop_required()) {
      +
      2912  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      2913  print_nrn_cur_matrix_shadow_reduction();
      +
      2914  printer->pop_block();
      +
      2915  print_fast_imem_calculation();
      +
      2916  }
      +
      2917 
      +
      2918  print_kernel_data_present_annotation_block_end();
      +
      2919  printer->pop_block();
      +
      2920 }
      +
      2921 
      2922 
      -
      2923 
      -
      2924 /****************************************************************************************/
      -
      2925 /* Main code printing entry points */
      -
      2926 /****************************************************************************************/
      -
      2927 
      - -
      2929  print_standard_includes();
      -
      2930  print_backend_includes();
      -
      2931  print_coreneuron_includes();
      -
      2932 }
      +
      2923 /****************************************************************************************/
      +
      2924 /* Main code printing entry points */
      +
      2925 /****************************************************************************************/
      +
      2926 
      + +
      2928  print_standard_includes();
      +
      2929  print_backend_includes();
      +
      2930  print_coreneuron_includes();
      +
      2931 }
      +
      2932 
      2933 
      -
      2934 
      - -
      2936  print_first_pointer_var_index_getter();
      -
      2937  print_first_random_var_index_getter();
      -
      2938  print_net_receive_arg_size_getter();
      -
      2939  print_thread_getters();
      -
      2940  print_num_variable_getter();
      -
      2941  print_mech_type_getter();
      -
      2942  print_memb_list_getter();
      -
      2943 }
      + +
      2935  print_first_pointer_var_index_getter();
      +
      2936  print_first_random_var_index_getter();
      +
      2937  print_net_receive_arg_size_getter();
      +
      2938  print_thread_getters();
      +
      2939  print_num_variable_getter();
      +
      2940  print_mech_type_getter();
      +
      2941  print_memb_list_getter();
      +
      2942 }
      +
      2943 
      2944 
      -
      2945 
      - -
      2947  print_mechanism_global_var_structure(print_initializers);
      -
      2948  print_mechanism_range_var_structure(print_initializers);
      -
      2949  print_ion_var_structure();
      -
      2950 }
      + +
      2946  print_mechanism_global_var_structure(print_initializers);
      +
      2947  print_mechanism_range_var_structure(print_initializers);
      +
      2948  print_ion_var_structure();
      +
      2949 }
      +
      2950 
      2951 
      -
      2952 
      - -
      2954  if (!info.vectorize) {
      -
      2955  return;
      -
      2956  }
      -
      2957  printer->add_multi_line(R"CODE(
      -
      2958  #if NRN_PRCELLSTATE
      -
      2959  inst->v_unused[id] = v;
      -
      2960  #endif
      -
      2961  )CODE");
      -
      2962 }
      + +
      2953  if (!info.vectorize) {
      +
      2954  return;
      +
      2955  }
      +
      2956  printer->add_multi_line(R"CODE(
      +
      2957  #if NRN_PRCELLSTATE
      +
      2958  inst->v_unused[id] = v;
      +
      2959  #endif
      +
      2960  )CODE");
      +
      2961 }
      +
      2962 
      2963 
      -
      2964 
      - -
      2966  printer->add_multi_line(R"CODE(
      -
      2967  #if NRN_PRCELLSTATE
      -
      2968  inst->g_unused[id] = g;
      -
      2969  #endif
      -
      2970  )CODE");
      -
      2971 }
      + +
      2965  printer->add_multi_line(R"CODE(
      +
      2966  #if NRN_PRCELLSTATE
      +
      2967  inst->g_unused[id] = g;
      +
      2968  #endif
      +
      2969  )CODE");
      +
      2970 }
      +
      2971 
      2972 
      -
      2973 
      - -
      2975  print_top_verbatim_blocks();
      -
      2976  for (const auto& procedure: info.procedures) {
      -
      2977  print_procedure(*procedure);
      -
      2978  }
      -
      2979  for (const auto& function: info.functions) {
      -
      2980  print_function(*function);
      -
      2981  }
      -
      2982  for (const auto& function: info.function_tables) {
      -
      2983  print_function_tables(*function);
      -
      2984  }
      -
      2985  for (size_t i = 0; i < info.before_after_blocks.size(); i++) {
      -
      2986  print_before_after_block(info.before_after_blocks[i], i);
      -
      2987  }
      -
      2988  for (const auto& callback: info.derivimplicit_callbacks) {
      -
      2989  const auto& block = *callback->get_node_to_solve();
      -
      2990  print_derivimplicit_kernel(block);
      -
      2991  }
      -
      2992  print_net_send_buffering();
      -
      2993  print_net_init();
      -
      2994  print_watch_activate();
      -
      2995  print_watch_check();
      -
      2996  print_net_receive_kernel();
      -
      2997  print_net_receive();
      -
      2998  print_net_receive_buffering();
      -
      2999  print_nrn_init();
      -
      3000  print_nrn_cur();
      -
      3001  print_nrn_state();
      -
      3002 }
      + +
      2974  print_top_verbatim_blocks();
      +
      2975  for (const auto& procedure: info.procedures) {
      +
      2976  print_procedure(*procedure);
      +
      2977  }
      +
      2978  for (const auto& function: info.functions) {
      +
      2979  print_function(*function);
      +
      2980  }
      +
      2981  for (const auto& function: info.function_tables) {
      +
      2982  print_function_tables(*function);
      +
      2983  }
      +
      2984  for (size_t i = 0; i < info.before_after_blocks.size(); i++) {
      +
      2985  print_before_after_block(info.before_after_blocks[i], i);
      +
      2986  }
      +
      2987  for (const auto& callback: info.derivimplicit_callbacks) {
      +
      2988  const auto& block = *callback->get_node_to_solve();
      +
      2989  print_derivimplicit_kernel(block);
      +
      2990  }
      +
      2991  print_net_send_buffering();
      +
      2992  print_net_init();
      +
      2993  print_watch_activate();
      +
      2994  print_watch_check();
      +
      2995  print_net_receive_kernel();
      +
      2996  print_net_receive();
      +
      2997  print_net_receive_buffering();
      +
      2998  print_nrn_init();
      +
      2999  print_nrn_cur();
      +
      3000  print_nrn_state();
      +
      3001 }
      +
      3002 
      3003 
      -
      3004 
      - -
      3006  print_backend_info();
      -
      3007  print_headers_include();
      -
      3008  print_namespace_start();
      -
      3009  print_nmodl_constants();
      -
      3010  print_prcellstate_macros();
      -
      3011  print_mechanism_info();
      -
      3012  print_data_structures(true);
      -
      3013  print_global_variables_for_hoc();
      -
      3014  print_common_getters();
      -
      3015  print_memory_allocation_routine();
      -
      3016  print_abort_routine();
      -
      3017  print_thread_memory_callbacks();
      -
      3018  print_instance_variable_setup();
      -
      3019  print_nrn_alloc();
      -
      3020  print_nrn_constructor();
      -
      3021  print_nrn_destructor();
      -
      3022  print_function_prototypes();
      -
      3023  print_functors_definitions();
      -
      3024  print_compute_functions();
      -
      3025  print_check_table_thread_function();
      -
      3026  print_mechanism_register();
      -
      3027  print_namespace_stop();
      -
      3028 }
      + +
      3005  print_backend_info();
      +
      3006  print_headers_include();
      +
      3007  print_namespace_start();
      +
      3008  print_nmodl_constants();
      +
      3009  print_prcellstate_macros();
      +
      3010  print_mechanism_info();
      +
      3011  print_data_structures(true);
      +
      3012  print_global_variables_for_hoc();
      +
      3013  print_common_getters();
      +
      3014  print_memory_allocation_routine();
      +
      3015  print_abort_routine();
      +
      3016  print_thread_memory_callbacks();
      +
      3017  print_instance_variable_setup();
      +
      3018  print_nrn_alloc();
      +
      3019  print_nrn_constructor();
      +
      3020  print_nrn_destructor();
      +
      3021  print_function_prototypes();
      +
      3022  print_functors_definitions();
      +
      3023  print_compute_functions();
      +
      3024  print_check_table_thread_function();
      +
      3025  print_mechanism_register();
      +
      3026  print_namespace_stop();
      +
      3027 }
      +
      3028 
      3029 
      -
      3030 
      -
      3031 /****************************************************************************************/
      -
      3032 /* Overloaded visitor routines */
      -
      3033 /****************************************************************************************/
      +
      3030 /****************************************************************************************/
      +
      3031 /* Overloaded visitor routines */
      +
      3032 /****************************************************************************************/
      +
      3033 
      3034 
      -
      3035 
      - -
      3037  printer->fmt_line("{}_{}({});",
      -
      3038  node.get_node_to_solve()->get_node_name(),
      -
      3039  info.mod_suffix,
      -
      3040  external_method_arguments());
      -
      3041 }
      + +
      3036  printer->fmt_line("{}_{}({});",
      +
      3037  node.get_node_to_solve()->get_node_name(),
      +
      3038  info.mod_suffix,
      +
      3039  external_method_arguments());
      +
      3040 }
      +
      3041 
      3042 
      -
      3043 
      - -
      3045  // For_netcon should take the same arguments as net_receive and apply the operations
      -
      3046  // in the block to the weights of the netcons. Since all the weights are on the same vector,
      -
      3047  // weights, we have a mask of operations that we apply iteratively, advancing the offset
      -
      3048  // to the next netcon.
      -
      3049  const auto& args = node.get_parameters();
      -
      3050  RenameVisitor v;
      -
      3051  const auto& statement_block = node.get_statement_block();
      -
      3052  for (size_t i_arg = 0; i_arg < args.size(); ++i_arg) {
      -
      3053  // sanitize node_name since we want to substitute names like (*w) as they are
      -
      3054  auto old_name =
      -
      3055  std::regex_replace(args[i_arg]->get_node_name(), regex_special_chars, R"(\$&)");
      -
      3056  const auto& new_name = fmt::format("weights[{} + nt->_fornetcon_weight_perm[i]]", i_arg);
      -
      3057  v.set(old_name, new_name);
      -
      3058  statement_block->accept(v);
      -
      3059  }
      -
      3060 
      -
      3061  const auto index = position_of_int_var(naming::FOR_NETCON_VARIABLE);
      -
      3062 
      -
      3063  printer->fmt_text("const size_t offset = {}*pnodecount + id;", index);
      -
      3064  printer->add_newline();
      -
      3065  printer->add_line(
      -
      3066  "const size_t for_netcon_start = nt->_fornetcon_perm_indices[indexes[offset]];");
      -
      3067  printer->add_line(
      -
      3068  "const size_t for_netcon_end = nt->_fornetcon_perm_indices[indexes[offset] + 1];");
      -
      3069 
      -
      3070  printer->push_block("for (auto i = for_netcon_start; i < for_netcon_end; ++i)");
      -
      3071  print_statement_block(*statement_block, false, false);
      -
      3072  printer->pop_block();
      -
      3073 }
      + +
      3044  // For_netcon should take the same arguments as net_receive and apply the operations
      +
      3045  // in the block to the weights of the netcons. Since all the weights are on the same vector,
      +
      3046  // weights, we have a mask of operations that we apply iteratively, advancing the offset
      +
      3047  // to the next netcon.
      +
      3048  const auto& args = node.get_parameters();
      +
      3049  RenameVisitor v;
      +
      3050  const auto& statement_block = node.get_statement_block();
      +
      3051  for (size_t i_arg = 0; i_arg < args.size(); ++i_arg) {
      +
      3052  // sanitize node_name since we want to substitute names like (*w) as they are
      +
      3053  auto old_name =
      +
      3054  std::regex_replace(args[i_arg]->get_node_name(), regex_special_chars, R"(\$&)");
      +
      3055  const auto& new_name = fmt::format("weights[{} + nt->_fornetcon_weight_perm[i]]", i_arg);
      +
      3056  v.set(old_name, new_name);
      +
      3057  statement_block->accept(v);
      +
      3058  }
      +
      3059 
      +
      3060  const auto index = position_of_int_var(naming::FOR_NETCON_VARIABLE);
      +
      3061 
      +
      3062  printer->fmt_text("const size_t offset = {}*pnodecount + id;", index);
      +
      3063  printer->add_newline();
      +
      3064  printer->add_line(
      +
      3065  "const size_t for_netcon_start = nt->_fornetcon_perm_indices[indexes[offset]];");
      +
      3066  printer->add_line(
      +
      3067  "const size_t for_netcon_end = nt->_fornetcon_perm_indices[indexes[offset] + 1];");
      +
      3068 
      +
      3069  printer->push_block("for (auto i = for_netcon_start; i < for_netcon_end; ++i)");
      +
      3070  print_statement_block(*statement_block, false, false);
      +
      3071  printer->pop_block();
      +
      3072 }
      +
      3073 
      3074 
      -
      3075 
      - -
      3077  printer->add_text(fmt::format("nrn_watch_activate(inst, id, pnodecount, {}, v, watch_remove)",
      -
      3078  current_watch_statement++));
      -
      3079 }
      -
      3080 
      -
      3081 } // namespace codegen
      -
      3082 } // namespace nmodl
      + +
      3076  printer->add_text(fmt::format("nrn_watch_activate(inst, id, pnodecount, {}, v, watch_remove)",
      +
      3077  current_watch_statement++));
      +
      3078 }
      +
      3079 
      +
      3080 } // namespace codegen
      +
      3081 } // namespace nmodl
      Represents a BEFORE block in NMODL.
      -
      void print_coreneuron_includes()
      Print includes from coreneuron.
      -
      virtual std::string net_receive_buffering_declaration()
      Generate the target backend code for the net_receive_buffering function delcaration.
      +
      void print_coreneuron_includes()
      Print includes from coreneuron.
      +
      virtual std::string net_receive_buffering_declaration()
      Generate the target backend code for the net_receive_buffering function delcaration.
      std::shared_ptr< Block > get_node_to_solve() const noexcept
      Getter for member variable DerivimplicitCallback::node_to_solve.
      virtual bool nrn_cur_reduction_loop_required()
      Check if reduction block in nrn_cur required.
      -
      void print_net_move_call(const ast::FunctionCall &node) override
      Print call to net_move.
      +
      void print_net_move_call(const ast::FunctionCall &node) override
      Print call to net_move.
      Base class for all AST node.
      Definition: node.hpp:40
      -
      void print_mechanism_range_var_structure(bool print_initializers) override
      Print the structure that wraps all range and int variables required for the NMODL.
      +
      void print_mechanism_range_var_structure(bool print_initializers) override
      Print the structure that wraps all range and int variables required for the NMODL.
      bool is_index
      if this is pure index (e.g.
      static constexpr char FOR_NETCON_VARIABLE[]
      name of the integer variabe to store FOR_NETCON info.
      static constexpr char CELSIUS_VARIABLE[]
      global temperature variable
      @@ -3193,55 +3192,55 @@
      void print_function_prototypes() override
      Print function and procedures prototype declaration.
      Helper to represent information about index/int variables.
      -
      void print_v_unused() const override
      Set v_unused (voltage) for NRN_PRCELLSTATE feature.
      +
      void print_v_unused() const override
      Set v_unused (voltage) for NRN_PRCELLSTATE feature.
      DUState
      Represent a state in Def-Use chain.
      -
      void print_derivimplicit_kernel(const ast::Block &block)
      Print derivative kernel when derivimplicit method is used.
      +
      void print_derivimplicit_kernel(const ast::Block &block)
      Print derivative kernel when derivimplicit method is used.
      virtual void print_device_stream_wait() const
      Print the code to synchronise/wait on stream specific to NrnThread.
      @ Destructor
      destructor block
      @ MUTEX_UNLOCK
      type of ast::MutexUnlock
      virtual void print_net_send_buf_count_update_to_device() const
      Print the code to update NetSendBuffer_t count from host to device.
      BAType
      enum type to distinguish BEFORE or AFTER blocks
      Definition: ast_common.hpp:80
      -
      std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
      Determine the variable name for a global variable given its symbol.
      +
      std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
      Determine the variable name for a global variable given its symbol.
      -
      virtual void print_newtonspace_transfer_to_device() const
      Print code block to transfer newtonspace structure to device.
      +
      virtual void print_newtonspace_transfer_to_device() const
      Print code block to transfer newtonspace structure to device.
      constexpr char NRN_PRIVATE_DESTRUCTOR_METHOD[]
      nrn_private_destructor method in generated code
      -
      void print_net_send_call(const ast::FunctionCall &node) override
      Print call to net_send.
      +
      void print_net_send_call(const ast::FunctionCall &node) override
      Print call to net_send.
      static constexpr char POINT_PROCESS_VARIABLE[]
      inbuilt neuron variable for point process
      -
      virtual void print_net_receive_loop_begin()
      Print the code for the main net_receive loop.
      +
      virtual void print_net_receive_loop_begin()
      Print the code for the main net_receive loop.
      const ArgumentVector & get_parameters() const noexcept override
      Getter for member variable FunctionTableBlock::parameters.
      static constexpr char NTHREAD_D_SHADOW[]
      shadow d variable in neuron thread structure
      std::shared_ptr< symtab::Symbol > SymbolType
      virtual void print_kernel_data_present_annotation_block_begin()
      Print accelerator annotations indicating data presence on device.
      -
      void print_g_unused() const override
      Set g_unused (conductance) for NRN_PRCELLSTATE feature.
      +
      void print_g_unused() const override
      Set g_unused (conductance) for NRN_PRCELLSTATE feature.
      const ArgumentVector & get_parameters() const noexcept override
      Getter for member variable NetReceiveBlock::parameters.
      @ MUTEX_LOCK
      type of ast::MutexLock
      - -
      std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
      Determine the name of an int variable given its symbol.
      -
      void print_initial_block(const ast::InitialBlock *node)
      Print initial block statements.
      -
      void print_net_receive_kernel()
      Print net_receive kernel function definition.
      -
      void print_thread_getters()
      Print the getter method for thread variables and ids.
      + +
      std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
      Determine the name of an int variable given its symbol.
      +
      void print_initial_block(const ast::InitialBlock *node)
      Print initial block statements.
      +
      void print_net_receive_kernel()
      Print net_receive kernel function definition.
      +
      void print_thread_getters()
      Print the getter method for thread variables and ids.
      std::string simulator_name() override
      Name of the simulator the code was generated for.
      -
      void print_net_send_buffering()
      Print kernel for buffering net_send events.
      -
      virtual std::string namespace_name() override
      Name of "our" namespace.
      -
      static std::string get_register_type_for_ba_block(const ast::Block *block)
      Return registration type for a given BEFORE/AFTER block /param block A BEFORE/AFTER block being regis...
      +
      void print_net_send_buffering()
      Print kernel for buffering net_send events.
      +
      virtual std::string namespace_name() override
      Name of "our" namespace.
      +
      static std::string get_register_type_for_ba_block(const ast::Block *block)
      Return registration type for a given BEFORE/AFTER block /param block A BEFORE/AFTER block being regis...
      encapsulates code generation backend implementations
      Definition: ast_common.hpp:26
      std::shared_ptr< StatementBlock > get_statement_block() const noexcept override
      Getter for member variable InitialBlock::statement_block.
      @ index
      index / int variables
      -
      void print_fast_imem_calculation() override
      Print fast membrane current calculation code.
      +
      void print_fast_imem_calculation() override
      Print fast membrane current calculation code.
      virtual void print_dt_update_to_device() const
      Print the code to update dt from host to device.
      virtual void print_net_init_acc_serial_annotation_block_end()
      Print accelerator kernels end annotation for net_init kernel.
      -
      const ParamVector external_method_parameters(bool table=false) noexcept override
      Parameters for functions in generated code that are called back from external code.
      +
      const ParamVector external_method_parameters(bool table=false) noexcept override
      Parameters for functions in generated code that are called back from external code.
      Represent ions used in mod file.
      static constexpr char NRN_CONSTRUCTOR_METHOD[]
      nrn_constructor method in generated code
      -
      void print_net_init()
      Print initial block in the net receive block.
      -
      void print_net_event_call(const ast::FunctionCall &node) override
      Print call to net_event.
      -
      void print_nrn_destructor() override
      Print nrn_destructor function definition.
      -
      void print_sdlists_init(bool print_initializers) override
      - -
      void print_send_event_move()
      Print send event move block used in net receive as well as watch.
      -
      void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
      Generate Function call statement for nrn_wrote_conc.
      +
      void print_net_init()
      Print initial block in the net receive block.
      +
      void print_net_event_call(const ast::FunctionCall &node) override
      Print call to net_event.
      +
      void print_nrn_destructor() override
      Print nrn_destructor function definition.
      +
      void print_sdlists_init(bool print_initializers) override
      + +
      void print_send_event_move()
      Print send event move block used in net receive as well as watch.
      +
      void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
      Generate Function call statement for nrn_wrote_conc.
      int get_prefixsum_from_name(const std::vector< T > &variables, const std::string &name)
      std::string name
      name of the ion
      static constexpr char NTHREAD_DT_VARIABLE[]
      dt variable in neuron thread structure
      @@ -3249,20 +3248,20 @@
      void print_atomic_reduction_pragma() override
      Print atomic update pragma for reduction statements.
      Check if variable is used in given block.
      Implement string manipulation functions.
      -
      void print_global_variables_for_hoc() override
      Print byte arrays that register scalar and vector variables for hoc interface.
      +
      void print_global_variables_for_hoc() override
      Print byte arrays that register scalar and vector variables for hoc interface.
      static constexpr char NODE_AREA_VARIABLE[]
      inbuilt neuron variable for area of the compartment
      -
      void print_net_receive_common_code(const ast::Block &node, bool need_mech_inst=true)
      Print the common code section for net receive related methods.
      -
      void print_nrn_current(const ast::BreakpointBlock &node) override
      Print the nrn_current kernel.
      +
      void print_net_receive_common_code(const ast::Block &node, bool need_mech_inst=true)
      Print the common code section for net receive related methods.
      +
      void print_nrn_current(const ast::BreakpointBlock &node) override
      Print the nrn_current kernel.
      virtual bool is_function_block() const noexcept
      Check if the ast node is an instance of ast::FunctionBlock.
      Definition: ast.cpp:142
      static constexpr char NRN_ALLOC_METHOD[]
      nrn_alloc method in generated code
      bool is_integer
      if this is an integer (e.g.
      -
      void print_first_pointer_var_index_getter()
      Print the getter method for index position of first pointer variable.
      -
      void print_setup_range_variable()
      Print the function that initialize range variable with different data type.
      -
      std::string internal_method_arguments() override
      Arguments for functions that are defined and used internally.
      +
      void print_first_pointer_var_index_getter()
      Print the getter method for index position of first pointer variable.
      +
      void print_setup_range_variable()
      Print the function that initialize range variable with different data type.
      +
      std::string internal_method_arguments() override
      Arguments for functions that are defined and used internally.
      Class that binds all pieces together for parsing C verbatim blocks.
      Definition: c11_driver.hpp:37
      std::shared_ptr< StatementBlock > get_statement_block() const noexcept override
      Getter for member variable BreakpointBlock::statement_block.
      -
      std::string nrn_thread_arguments() const override
      Arguments for "_threadargs_" macro in neuron implementation.
      -
      void print_net_receive_arg_size_getter()
      Print the getter method for getting number of arguments for net_receive.
      +
      std::string nrn_thread_arguments() const override
      Arguments for "_threadargs_" macro in neuron implementation.
      +
      void print_net_receive_arg_size_getter()
      Print the getter method for getting number of arguments for net_receive.
      Helper visitor to gather AST information to help code generation.
      std::string get_node_name() const override
      Return name of the node.
      Definition: ast.cpp:3800
      void print_function_or_procedure(const ast::Block &node, const std::string &name, const std::unordered_set< CppObjectSpecifier > &specifiers={ CppObjectSpecifier::Inline}) override
      Print nmodl function or procedure (common code)
      @@ -3272,35 +3271,35 @@
      Base class for all block scoped nodes.
      Definition: block.hpp:41
      Represents a INITIAL block in the NMODL.
      virtual void print_nrn_cur_matrix_shadow_update()
      Print the update to matrix elements with/without shadow vectors.
      -
      std::string register_mechanism_arguments() const override
      Arguments for register_mech or point_register_mech function.
      +
      std::string register_mechanism_arguments() const override
      Arguments for register_mech or point_register_mech function.
      virtual void print_net_init_acc_serial_annotation_block_begin()
      Print accelerator kernels begin annotation for net_init kernel.
      virtual void print_deriv_advance_flag_transfer_to_device() const
      Print the code to copy derivative advance flag to device.
      -
      void print_first_random_var_index_getter()
      Print the getter method for index position of first RANDOM variable.
      +
      void print_first_random_var_index_getter()
      Print the getter method for index position of first RANDOM variable.
      static constexpr char NTHREAD_RHS_SHADOW[]
      shadow rhs variable in neuron thread structure
      Utility functions for visitors implementation.
      @ Equation
      breakpoint block
      static constexpr char USE_TABLE_VARIABLE[]
      global variable to indicate if table is used
      Represent WATCH statement in NMODL.
      -
      virtual void print_before_after_block(const ast::Block *node, size_t block_id)
      Print NMODL before / after block in target backend code.
      +
      virtual void print_before_after_block(const ast::Block *node, size_t block_id)
      Print NMODL before / after block in target backend code.
      Represents a BREAKPOINT block in NMODL.
      nmodl::parser::UnitDriver driver
      Definition: parser.cpp:28
      const std::regex regex_special_chars
      Visitor for printing C++ code compatible with legacy api of CoreNEURON
      static const std::unordered_map< std::string, std::string > VERBATIM_VARIABLES_MAPPING
      commonly used variables in verbatim block and how they should be mapped to new code generation backen...
      -
      void print_ion_variable() override
      Print the ion variable struct.
      +
      void print_ion_variable() override
      Print the ion variable struct.
      @ Constructor
      constructor block
      -
      void print_ion_var_structure()
      Print structure of ion variables used for local copies.
      +
      void print_ion_var_structure()
      Print structure of ion variables used for local copies.
      -
      ParamVector internal_method_parameters() override
      Parameters for internally defined functions.
      +
      ParamVector internal_method_parameters() override
      Parameters for internally defined functions.
      int position_of_int_var(const std::string &name) const override
      Determine the position in the data array for a given int variable.
      virtual void print_channel_iteration_block_parallel_hint(BlockType type, const ast::Block *block)
      Print pragma annotations for channel iterations.
      virtual bool is_constant_variable(const std::string &name) const
      Check if variable is qualified as constant.
      void print_function_procedure_helper(const ast::Block &node) override
      Common helper function to help printing function or procedure blocks.
      std::shared_ptr< StatementBlock > get_statement_block() const noexcept override
      Getter for member variable ForNetcon::statement_block.
      Definition: for_netcon.hpp:185
      -
      void visit_for_netcon(const ast::ForNetcon &node) override
      visit node of type ast::ForNetcon
      -
      void print_compute_functions() override
      Print all compute functions for every backend.
      -
      void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
      Print the nrn_cur kernel with NMODL conductance keyword provisions.
      +
      void visit_for_netcon(const ast::ForNetcon &node) override
      visit node of type ast::ForNetcon
      +
      void print_compute_functions() override
      Print all compute functions for every backend.
      +
      void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
      Print the nrn_cur kernel with NMODL conductance keyword provisions.
      bool is_intra_cell_conc(const std::string &text) const
      Check if variable name is internal cell concentration.
      virtual void print_memory_allocation_routine() const
      Print memory allocation routine.
      @@ -3313,10 +3312,10 @@
      static constexpr char NRN_INIT_METHOD[]
      nrn_init method in generated code
      virtual void print_rhs_d_shadow_variables()
      Print the setup method for setting matrix shadow vectors.
      const ExpressionVector & get_arguments() const noexcept
      Getter for member variable FunctionCall::arguments.
      -
      std::string process_verbatim_text(std::string const &text) override
      Process a verbatim block for possible variable renaming.
      +
      std::string process_verbatim_text(std::string const &text) override
      Process a verbatim block for possible variable renaming.
      static constexpr char THREAD_ARGS[]
      verbatim name of the variable for nrn thread arguments
      -
      void print_net_receive()
      Print net_receive function definition.
      -
      static void rename_net_receive_arguments(const ast::NetReceiveBlock &net_receive_node, const ast::Node &node)
      Rename arguments to NET_RECEIVE block with corresponding pointer variable.
      +
      void print_net_receive()
      Print net_receive function definition.
      +
      static void rename_net_receive_arguments(const ast::NetReceiveBlock &net_receive_node, const ast::Node &node)
      Rename arguments to NET_RECEIVE block with corresponding pointer variable.
      bool optimize_ion_variable_copies() const override
      Check if ion variable copies should be avoided.
      void print_top_verbatim_blocks()
      Print top level (global scope) verbatim blocks.
      Blindly rename given variable to new name
      @@ -3325,84 +3324,84 @@
      static constexpr char TQITEM_VARIABLE[]
      inbuilt neuron variable for tqitem process
      void set(const std::string &old_name, std::string new_name)
      std::vector< std::shared_ptr< const ast::Ast > > collect_nodes(const ast::Ast &node, const std::vector< ast::AstNodeType > &types)
      traverse node recursively and collect nodes of given types
      -
      void print_mechanism_global_var_structure(bool print_initializers) override
      Print the structure that wraps all global variables used in the NMODL.
      +
      void print_mechanism_global_var_structure(bool print_initializers) override
      Print the structure that wraps all global variables used in the NMODL.
      Visitor to return Def-Use chain for a given variable in the block/node
      -
      void print_nrn_constructor() override
      Print nrn_constructor function definition.
      +
      void print_nrn_constructor() override
      Print nrn_constructor function definition.
      virtual void print_kernel_data_present_annotation_block_end()
      Print matching block end of accelerator annotations for data presence on device.
      -
      void print_nrn_alloc() override
      Print nrn_alloc function definition.
      +
      void print_nrn_alloc() override
      Print nrn_alloc function definition.
      virtual void print_nrn_cur_matrix_shadow_reduction()
      Print the reduction to matrix elements from shadow vectors.
      -
      void print_memb_list_getter()
      Print the getter method for returning membrane list from NrnThread.
      - +
      void print_memb_list_getter()
      Print the getter method for returning membrane list from NrnThread.
      +
      static constexpr char INST_GLOBAL_MEMBER[]
      instance struct member pointing to the global variable structure
      @ PROTECT_STATEMENT
      type of ast::ProtectStatement
      -
      void print_instance_variable_setup()
      Print the function that initialize instance structure.
      -
      void print_nrn_init(bool skip_init_check=true)
      Print the nrn_init function definition.
      -
      std::string get_variable_name(const std::string &name, bool use_instance=true) const override
      Determine variable name in the structure of mechanism properties.
      +
      void print_instance_variable_setup()
      Print the function that initialize instance structure.
      +
      void print_nrn_init(bool skip_init_check=true)
      Print the nrn_init function definition.
      +
      std::string get_variable_name(const std::string &name, bool use_instance=true) const override
      Determine variable name in the structure of mechanism properties.
      NmodlType
      NMODL variable properties.
      @ BATYPE_BREAKPOINT
      Definition: ast_common.hpp:80
      virtual void print_abort_routine() const
      Print backend specific abort routine.
      const ArgumentVector & get_parameters() const noexcept override
      Getter for member variable ForNetcon::parameters.
      Definition: for_netcon.hpp:176
      -
      void print_nrn_cur() override
      Print nrn_cur / current update function definition.
      -
      std::string get_range_var_float_type(const SymbolType &symbol)
      Returns floating point type for given range variable symbol.
      -
      virtual void print_net_send_buffering_cnt_update() const
      Print the code related to the update of NetSendBuffer_t cnt.
      -
      void print_thread_memory_callbacks()
      Print thread related memory allocation and deallocation callbacks.
      +
      void print_nrn_cur() override
      Print nrn_cur / current update function definition.
      +
      std::string get_range_var_float_type(const SymbolType &symbol)
      Returns floating point type for given range variable symbol.
      +
      virtual void print_net_send_buffering_cnt_update() const
      Print the code related to the update of NetSendBuffer_t cnt.
      +
      void print_thread_memory_callbacks()
      Print thread related memory allocation and deallocation callbacks.
      Represent a callback to NEURON's derivimplicit solver.
      Represents ion write statement during code generation.
      -
      void print_net_receive_buffering(bool need_mech_inst=true)
      Print kernel for buffering net_receive events.
      -
      virtual void print_global_function_common_code(BlockType type, const std::string &function_name="") override
      Print common code for global functions like nrn_init, nrn_cur and nrn_state.
      -
      ParamVector functor_params() override
      The parameters of the Newton solver "functor".
      +
      void print_net_receive_buffering(bool need_mech_inst=true)
      Print kernel for buffering net_receive events.
      +
      virtual void print_global_function_common_code(BlockType type, const std::string &function_name="") override
      Print common code for global functions like nrn_init, nrn_cur and nrn_state.
      +
      ParamVector functor_params() override
      The parameters of the Newton solver "functor".
      Implement logger based on spdlog library.
      bool is_vdata
      if variable resides in vdata field of NrnThread typically true for bbcore pointer
      BlockType
      Helper to represent various block types.
      -
      void print_mechanism_register() override
      Print the mechanism registration function.
      +
      void print_mechanism_register() override
      Print the mechanism registration function.
      virtual void print_global_method_annotation()
      Print backend specific global method annotation.
      static constexpr char NRN_STATE_METHOD[]
      nrn_state method in generated code
      -
      void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
      Print main body of nrn_cur function.
      +
      void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
      Print main body of nrn_cur function.
      Version information and units file path.
      -
      std::string nrn_thread_internal_arguments() override
      Arguments for "_threadargs_" macro in neuron implementation.
      +
      std::string nrn_thread_internal_arguments() override
      Arguments for "_threadargs_" macro in neuron implementation.
      virtual void print_device_atomic_capture_annotation() const
      Print pragma annotation for increase and capture of variable in automatic way.
      -
      void visit_derivimplicit_callback(const ast::DerivimplicitCallback &node) override
      visit node of type ast::DerivimplicitCallback
      -
      virtual void print_get_memb_list()
      Print the target backend code for defining and checking a local Memb_list variable.
      -
      void print_num_variable_getter()
      Print the getter methods for float and integer variables count.
      +
      void visit_derivimplicit_callback(const ast::DerivimplicitCallback &node) override
      visit node of type ast::DerivimplicitCallback
      +
      virtual void print_get_memb_list()
      Print the target backend code for defining and checking a local Memb_list variable.
      +
      void print_num_variable_getter()
      Print the getter methods for float and integer variables count.
      virtual std::shared_ptr< StatementBlock > get_statement_block() const
      Return associated statement block for the AST node.
      Definition: ast.cpp:32
      -
      void print_standard_includes() override
      Print standard C/C++ includes.
      -
      virtual void print_net_receive_loop_end()
      Print the code for closing the main net_receive loop.
      +
      void print_standard_includes() override
      Print standard C/C++ includes.
      +
      virtual void print_net_receive_loop_end()
      Print the code for closing the main net_receive loop.
      void visit_children(visitor::Visitor &v) override
      visit children i.e.
      Definition: ast.cpp:388
      -
      void print_mech_type_getter()
      Print the getter method for returning mechtype.
      +
      void print_mech_type_getter()
      Print the getter method for returning mechtype.
      Represents a AFTER block in NMODL.
      Definition: after_block.hpp:51
      virtual bool is_before_block() const noexcept
      Check if the ast node is an instance of ast::BeforeBlock.
      Definition: ast.cpp:152
      static constexpr char NTHREAD_T_VARIABLE[]
      t variable in neuron thread structure
      -
      std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
      Determine the name of a float variable given its symbol.
      -
      virtual void print_global_variable_device_update_annotation()
      Print the pragma annotation to update global variables from host to the device.
      +
      std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
      Determine the name of a float variable given its symbol.
      +
      virtual void print_global_variable_device_update_annotation()
      Print the pragma annotation to update global variables from host to the device.
      Blindly rename given variable to new name
      @ State
      derivative block
      void print_function_tables(const ast::FunctionTableBlock &node)
      Print NMODL function_table in target backend code.
      THIS FILE IS GENERATED AT BUILD TIME AND SHALL NOT BE EDITED.
      -
      void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
      Add the variable tqitem during get_int_variables.
      -
      void visit_watch_statement(const ast::WatchStatement &node) override
      visit node of type ast::WatchStatement
      -
      const std::string external_method_arguments() noexcept override
      Arguments for external functions called from generated code.
      -
      void print_nrn_cur_non_conductance_kernel() override
      Print the nrn_cur kernel without NMODL conductance keyword provisions.
      +
      void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
      Add the variable tqitem during get_int_variables.
      +
      void visit_watch_statement(const ast::WatchStatement &node) override
      visit node of type ast::WatchStatement
      +
      const std::string external_method_arguments() noexcept override
      Arguments for external functions called from generated code.
      +
      void print_nrn_cur_non_conductance_kernel() override
      Print the nrn_cur kernel without NMODL conductance keyword provisions.
      bool variable_used(const ast::Node &node, std::string name)
      virtual void print_backend_includes()
      Print backend specific includes (none needed for C++ backend)
      std::string process_verbatim_token(const std::string &token)
      Process a token in a verbatim block for possible variable renaming.
      -
      void print_nrn_state() override
      Print nrn_state / state update function definition.
      -
      void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
      Add the variable point_process during get_int_variables.
      +
      void print_nrn_state() override
      Print nrn_state / state update function definition.
      +
      void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
      Add the variable point_process during get_int_variables.
      static constexpr char NRN_DESTRUCTOR_METHOD[]
      nrn_destructor method in generated code
      -
      void print_codegen_routines() override
      Print entry point to code generation.
      +
      void print_codegen_routines() override
      Print entry point to code generation.
      - +
      static constexpr char THREAD_ARGS_PROTO[]
      verbatim name of the variable for nrn thread arguments in prototype
      @ BeforeAfter
      before / after block
      -
      virtual void print_net_send_buffering_grow()
      Print statement that grows NetSendBuffering_t structure if needed.
      +
      virtual void print_net_send_buffering_grow()
      Print statement that grows NetSendBuffering_t structure if needed.
      constexpr char NRN_PRIVATE_CONSTRUCTOR_METHOD[]
      nrn_private_constructor method in generated code
      virtual std::string get_node_name() const
      Return name of of the node.
      Definition: ast.cpp:28
      virtual void print_net_send_buf_update_to_host() const
      Print the code to update NetSendBuffer_t from device to host.
      Auto generated AST classes declaration.
      -
      void print_data_structures(bool print_initializers) override
      Print all classes.
      -
      std::string replace_if_verbatim_variable(std::string name)
      Replace commonly used verbatim variables.
      -
      virtual void print_ion_var_constructor(const std::vector< std::string > &members)
      Print constructor of ion variables.
      +
      void print_data_structures(bool print_initializers) override
      Print all classes.
      +
      std::string replace_if_verbatim_variable(std::string name)
      Replace commonly used verbatim variables.
      +
      virtual void print_ion_var_constructor(const std::vector< std::string > &members)
      Print constructor of ion variables.
      int position_of_float_var(const std::string &name) const override
      Determine the position in the data array for a given float variable.
      std::vector< std::tuple< std::string, std::string, std::string, std::string > > ParamVector
      A vector of parameters represented by a 4-tuple of strings:
      static constexpr char ION_VARNAME_PREFIX[]
      prefix for ion variable
      diff --git a/doxygen/src_2codegen_2codegen__neuron__cpp__visitor_8cpp_source.html b/doxygen/src_2codegen_2codegen__neuron__cpp__visitor_8cpp_source.html index 8ef272e15..2b404647d 100644 --- a/doxygen/src_2codegen_2codegen__neuron__cpp__visitor_8cpp_source.html +++ b/doxygen/src_2codegen_2codegen__neuron__cpp__visitor_8cpp_source.html @@ -233,7 +233,7 @@
      137 }
      138 
      139 
      - +
      141  if (info.table_count == 0) {
      142  return;
      143  }
      @@ -279,2443 +279,2445 @@
      183  printer->pop_block();
      184 }
      185 
      - -
      187  printer->add_line("/* Neuron setdata functions */");
      -
      188  printer->add_line("extern void _nrn_setdata_reg(int, void(*)(Prop*));");
      -
      189  printer->push_block("static void _setdata(Prop* _prop)");
      -
      190  if (!info.point_process) {
      -
      191  printer->add_multi_line(R"CODE(
      -
      192  _extcall_prop = _prop;
      -
      193  _prop_id = _nrn_get_prop_id(_prop);
      -
      194  )CODE");
      -
      195  }
      -
      196  printer->pop_block();
      -
      197 
      -
      198  if (info.point_process) {
      -
      199  printer->push_block("static void _hoc_setdata(void* _vptr)");
      -
      200  printer->add_multi_line(R"CODE(
      -
      201  Prop* _prop;
      -
      202  _prop = ((Point_process*)_vptr)->prop;
      -
      203  _setdata(_prop);
      -
      204  )CODE");
      -
      205  } else {
      -
      206  printer->push_block("static void _hoc_setdata()");
      -
      207  printer->add_multi_line(R"CODE(
      -
      208  Prop *_prop = hoc_getdata_range(mech_type);
      -
      209  _setdata(_prop);
      -
      210  hoc_retpushx(1.);
      -
      211  )CODE");
      -
      212  }
      -
      213  printer->pop_block();
      -
      214 
      -
      215  printer->add_line("/* Mechanism procedures and functions */");
      -
      216  for (const auto& node: info.functions) {
      -
      217  print_function_declaration(*node, node->get_node_name());
      -
      218  printer->add_text(';');
      -
      219  printer->add_newline();
      -
      220  }
      -
      221  for (const auto& node: info.procedures) {
      -
      222  print_function_declaration(*node, node->get_node_name());
      -
      223  printer->add_text(';');
      -
      224  printer->add_newline();
      -
      225  }
      -
      226 }
      -
      227 
      -
      228 
      - -
      230  printer->add_newline(2);
      -
      231 
      -
      232  print_point_process_function_definitions();
      -
      233  print_setdata_functions();
      -
      234  print_check_table_function_prototypes();
      -
      235 }
      -
      236 
      +
      186 
      + +
      188  printer->add_line("/* Neuron setdata functions */");
      +
      189  printer->add_line("extern void _nrn_setdata_reg(int, void(*)(Prop*));");
      +
      190  printer->push_block("static void _setdata(Prop* _prop)");
      +
      191  if (!info.point_process) {
      +
      192  printer->add_multi_line(R"CODE(
      +
      193  _extcall_prop = _prop;
      +
      194  _prop_id = _nrn_get_prop_id(_prop);
      +
      195  )CODE");
      +
      196  }
      +
      197  printer->pop_block();
      +
      198 
      +
      199  if (info.point_process) {
      +
      200  printer->push_block("static void _hoc_setdata(void* _vptr)");
      +
      201  printer->add_multi_line(R"CODE(
      +
      202  Prop* _prop;
      +
      203  _prop = ((Point_process*)_vptr)->prop;
      +
      204  _setdata(_prop);
      +
      205  )CODE");
      +
      206  } else {
      +
      207  printer->push_block("static void _hoc_setdata()");
      +
      208  printer->add_multi_line(R"CODE(
      +
      209  Prop *_prop = hoc_getdata_range(mech_type);
      +
      210  _setdata(_prop);
      +
      211  hoc_retpushx(1.);
      +
      212  )CODE");
      +
      213  }
      +
      214  printer->pop_block();
      +
      215 }
      +
      216 
      +
      217 
      + +
      219  printer->add_newline(2);
      +
      220 
      +
      221  printer->add_line("/* Mechanism procedures and functions */");
      +
      222  for (const auto& node: info.functions) {
      +
      223  print_function_declaration(*node, node->get_node_name());
      +
      224  printer->add_text(';');
      +
      225  printer->add_newline();
      +
      226  }
      +
      227  for (const auto& node: info.procedures) {
      +
      228  print_function_declaration(*node, node->get_node_name());
      +
      229  printer->add_text(';');
      +
      230  printer->add_newline();
      +
      231  }
      +
      232 
      +
      233  print_point_process_function_definitions();
      +
      234  print_setdata_functions();
      +
      235  print_check_table_entrypoint();
      +
      236 }
      237 
      - -
      239  const ast::Block& node,
      -
      240  const std::string& name,
      -
      241  const std::unordered_set<CppObjectSpecifier>& specifiers) {
      -
      242  printer->add_newline(2);
      -
      243  print_function_declaration(node, name, specifiers);
      -
      244  printer->add_text(" ");
      -
      245  printer->push_block();
      -
      246 
      -
      247  // function requires return variable declaration
      -
      248  if (node.is_function_block()) {
      -
      249  auto type = default_float_data_type();
      -
      250  printer->fmt_line("{} ret_{} = 0.0;", type, name);
      -
      251  } else {
      -
      252  printer->fmt_line("int ret_{} = 0;", name);
      -
      253  }
      -
      254 
      -
      255  if (!info.artificial_cell) {
      -
      256  printer->add_line("auto v = node_data.node_voltages[node_data.nodeindices[id]];");
      -
      257  }
      -
      258 
      -
      259  print_statement_block(*node.get_statement_block(), false, false);
      -
      260  printer->fmt_line("return ret_{};", name);
      -
      261  printer->pop_block();
      -
      262 }
      -
      263 
      +
      238 
      + +
      240  const ast::Block& node,
      +
      241  const std::string& name,
      +
      242  const std::unordered_set<CppObjectSpecifier>& specifiers) {
      +
      243  printer->add_newline(2);
      +
      244  print_function_declaration(node, name, specifiers);
      +
      245  printer->add_text(" ");
      +
      246  printer->push_block();
      +
      247 
      +
      248  // function requires return variable declaration
      +
      249  if (node.is_function_block()) {
      +
      250  auto type = default_float_data_type();
      +
      251  printer->fmt_line("{} ret_{} = 0.0;", type, name);
      +
      252  } else {
      +
      253  printer->fmt_line("int ret_{} = 0;", name);
      +
      254  }
      +
      255 
      +
      256  if (!info.artificial_cell) {
      +
      257  printer->add_line("auto v = node_data.node_voltages[node_data.nodeindices[id]];");
      +
      258  }
      +
      259 
      +
      260  print_statement_block(*node.get_statement_block(), false, false);
      +
      261  printer->fmt_line("return ret_{};", name);
      +
      262  printer->pop_block();
      +
      263 }
      264 
      - -
      266  auto name = node.get_node_name();
      -
      267  if (info.function_uses_table(name)) {
      -
      268  auto new_name = "f_" + name;
      -
      269  print_function_or_procedure(node,
      -
      270  new_name,
      - -
      272  print_table_check_function(node);
      -
      273  print_table_replacement_function(node);
      -
      274  } else {
      -
      275  print_function_or_procedure(node, name);
      -
      276  }
      -
      277 }
      -
      278 
      +
      265 
      + +
      267  auto name = node.get_node_name();
      +
      268  if (info.function_uses_table(name)) {
      +
      269  auto new_name = "f_" + name;
      +
      270  print_function_or_procedure(node,
      +
      271  new_name,
      + +
      273  print_table_check_function(node);
      +
      274  print_table_replacement_function(node);
      +
      275  } else {
      +
      276  print_function_or_procedure(node, name);
      +
      277  }
      +
      278 }
      279 
      - -
      281  const ast::Block* function_or_procedure_block,
      -
      282  InterpreterWrapper wrapper_type) {
      -
      283  if (info.point_process && wrapper_type == InterpreterWrapper::Python) {
      -
      284  return;
      -
      285  }
      -
      286  const auto block_name = function_or_procedure_block->get_node_name();
      -
      287  if (info.point_process) {
      -
      288  printer->fmt_push_block("static double _hoc_{}(void* _vptr)", block_name);
      -
      289  } else if (wrapper_type == InterpreterWrapper::HOC) {
      -
      290  printer->fmt_push_block("static void _hoc_{}(void)", block_name);
      -
      291  } else {
      -
      292  printer->fmt_push_block("static double _npy_{}(Prop* _prop)", block_name);
      -
      293  }
      -
      294  printer->add_multi_line(R"CODE(
      -
      295  double _r{};
      -
      296  Datum* _ppvar;
      -
      297  Datum* _thread;
      -
      298  NrnThread* nt;
      -
      299  )CODE");
      -
      300 
      -
      301  std::string prop_name;
      -
      302  if (info.point_process) {
      -
      303  printer->add_multi_line(R"CODE(
      -
      304  auto* const _pnt = static_cast<Point_process*>(_vptr);
      -
      305  auto* const _p = _pnt->prop;
      -
      306  if (!_p) {
      -
      307  hoc_execerror("POINT_PROCESS data instance not valid", NULL);
      -
      308  }
      -
      309  _nrn_mechanism_cache_instance _lmc{_p};
      -
      310  size_t const id{};
      -
      311  _ppvar = _nrn_mechanism_access_dparam(_p);
      -
      312  _thread = _extcall_thread.data();
      -
      313  nt = static_cast<NrnThread*>(_pnt->_vnt);
      -
      314  )CODE");
      -
      315 
      -
      316  prop_name = "_p";
      -
      317  } else if (wrapper_type == InterpreterWrapper::HOC) {
      -
      318  if (program_symtab->lookup(block_name)->has_all_properties(NmodlType::use_range_ptr_var)) {
      -
      319  printer->push_block("if (!_prop_id)");
      -
      320  printer->fmt_line(
      -
      321  "hoc_execerror(\"No data for {}_{}. Requires prior call to setdata_{} and that the "
      -
      322  "specified mechanism instance still be in existence.\", NULL);",
      -
      323  function_or_procedure_block->get_node_name(),
      -
      324  info.mod_suffix,
      -
      325  info.mod_suffix);
      -
      326  printer->pop_block();
      -
      327  printer->add_line("Prop* _local_prop = _extcall_prop;");
      -
      328  } else {
      -
      329  printer->add_line("Prop* _local_prop = _prop_id ? _extcall_prop : nullptr;");
      -
      330  }
      -
      331  printer->add_multi_line(R"CODE(
      -
      332  _nrn_mechanism_cache_instance _lmc{_local_prop};
      -
      333  size_t const id{};
      -
      334  _ppvar = _local_prop ? _nrn_mechanism_access_dparam(_local_prop) : nullptr;
      -
      335  _thread = _extcall_thread.data();
      -
      336  nt = nrn_threads;
      -
      337  )CODE");
      -
      338  prop_name = "_local_prop";
      -
      339  } else { // wrapper_type == InterpreterWrapper::Python
      -
      340  printer->add_multi_line(R"CODE(
      -
      341  _nrn_mechanism_cache_instance _lmc{_prop};
      -
      342  size_t const id = 0;
      -
      343  _ppvar = _nrn_mechanism_access_dparam(_prop);
      -
      344  _thread = _extcall_thread.data();
      -
      345  nt = nrn_threads;
      -
      346  )CODE");
      -
      347  prop_name = "_prop";
      -
      348  }
      -
      349 
      -
      350  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      -
      351  if (!info.artificial_cell) {
      -
      352  printer->fmt_line("auto node_data = make_node_data_{}({});", info.mod_suffix, prop_name);
      -
      353  }
      -
      354  if (!codegen_thread_variables.empty()) {
      -
      355  printer->fmt_line("auto _thread_vars = {}(_thread[{}].get<double*>());",
      -
      356  thread_variables_struct(),
      -
      357  info.thread_var_thread_id);
      -
      358  }
      -
      359  if (info.function_uses_table(block_name)) {
      -
      360  printer->fmt_line("{}({});",
      -
      361  table_update_function_name(block_name),
      -
      362  internal_method_arguments());
      -
      363  }
      -
      364  const auto get_func_call_str = [&]() {
      -
      365  const auto params = function_or_procedure_block->get_parameters();
      -
      366  const auto func_proc_name = block_name + "_" + info.mod_suffix;
      -
      367  auto func_call = fmt::format("{}({}", func_proc_name, internal_method_arguments());
      -
      368  for (int i = 0; i < params.size(); ++i) {
      -
      369  func_call.append(fmt::format(", *getarg({})", i + 1));
      -
      370  }
      -
      371  func_call.append(")");
      -
      372  return func_call;
      -
      373  };
      -
      374  if (function_or_procedure_block->is_function_block()) {
      -
      375  printer->add_indent();
      -
      376  printer->fmt_text("_r = {};", get_func_call_str());
      -
      377  printer->add_newline();
      -
      378  } else {
      -
      379  printer->add_line("_r = 1.;");
      -
      380  printer->fmt_line("{};", get_func_call_str());
      -
      381  }
      -
      382  if (info.point_process || wrapper_type != InterpreterWrapper::HOC) {
      -
      383  printer->add_line("return(_r);");
      -
      384  } else if (wrapper_type == InterpreterWrapper::HOC) {
      -
      385  printer->add_line("hoc_retpushx(_r);");
      -
      386  }
      -
      387  printer->pop_block();
      -
      388 }
      -
      389 
      +
      280 
      + +
      282  const ast::Block* function_or_procedure_block,
      +
      283  InterpreterWrapper wrapper_type) {
      +
      284  if (info.point_process && wrapper_type == InterpreterWrapper::Python) {
      +
      285  return;
      +
      286  }
      +
      287  const auto block_name = function_or_procedure_block->get_node_name();
      +
      288  if (info.point_process) {
      +
      289  printer->fmt_push_block("static double _hoc_{}(void* _vptr)", block_name);
      +
      290  } else if (wrapper_type == InterpreterWrapper::HOC) {
      +
      291  printer->fmt_push_block("static void _hoc_{}(void)", block_name);
      +
      292  } else {
      +
      293  printer->fmt_push_block("static double _npy_{}(Prop* _prop)", block_name);
      +
      294  }
      +
      295  printer->add_multi_line(R"CODE(
      +
      296  double _r{};
      +
      297  Datum* _ppvar;
      +
      298  Datum* _thread;
      +
      299  NrnThread* nt;
      +
      300  )CODE");
      +
      301 
      +
      302  std::string prop_name;
      +
      303  if (info.point_process) {
      +
      304  printer->add_multi_line(R"CODE(
      +
      305  auto* const _pnt = static_cast<Point_process*>(_vptr);
      +
      306  auto* const _p = _pnt->prop;
      +
      307  if (!_p) {
      +
      308  hoc_execerror("POINT_PROCESS data instance not valid", nullptr);
      +
      309  }
      +
      310  _nrn_mechanism_cache_instance _lmc{_p};
      +
      311  size_t const id{};
      +
      312  _ppvar = _nrn_mechanism_access_dparam(_p);
      +
      313  _thread = _extcall_thread.data();
      +
      314  nt = static_cast<NrnThread*>(_pnt->_vnt);
      +
      315  )CODE");
      +
      316 
      +
      317  prop_name = "_p";
      +
      318  } else if (wrapper_type == InterpreterWrapper::HOC) {
      +
      319  if (program_symtab->lookup(block_name)->has_all_properties(NmodlType::use_range_ptr_var)) {
      +
      320  printer->push_block("if (!_prop_id)");
      +
      321  printer->fmt_line(
      +
      322  "hoc_execerror(\"No data for {}_{}. Requires prior call to setdata_{} and that the "
      +
      323  "specified mechanism instance still be in existence.\", nullptr);",
      +
      324  function_or_procedure_block->get_node_name(),
      +
      325  info.mod_suffix,
      +
      326  info.mod_suffix);
      +
      327  printer->pop_block();
      +
      328  printer->add_line("Prop* _local_prop = _extcall_prop;");
      +
      329  } else {
      +
      330  printer->add_line("Prop* _local_prop = _prop_id ? _extcall_prop : nullptr;");
      +
      331  }
      +
      332  printer->add_multi_line(R"CODE(
      +
      333  _nrn_mechanism_cache_instance _lmc{_local_prop};
      +
      334  size_t const id{};
      +
      335  _ppvar = _local_prop ? _nrn_mechanism_access_dparam(_local_prop) : nullptr;
      +
      336  _thread = _extcall_thread.data();
      +
      337  nt = nrn_threads;
      +
      338  )CODE");
      +
      339  prop_name = "_local_prop";
      +
      340  } else { // wrapper_type == InterpreterWrapper::Python
      +
      341  printer->add_multi_line(R"CODE(
      +
      342  _nrn_mechanism_cache_instance _lmc{_prop};
      +
      343  size_t const id = 0;
      +
      344  _ppvar = _nrn_mechanism_access_dparam(_prop);
      +
      345  _thread = _extcall_thread.data();
      +
      346  nt = nrn_threads;
      +
      347  )CODE");
      +
      348  prop_name = "_prop";
      +
      349  }
      +
      350 
      +
      351  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      +
      352  if (!info.artificial_cell) {
      +
      353  printer->fmt_line("auto node_data = make_node_data_{}({});", info.mod_suffix, prop_name);
      +
      354  }
      +
      355  if (!codegen_thread_variables.empty()) {
      +
      356  printer->fmt_line("auto _thread_vars = {}(_thread[{}].get<double*>());",
      +
      357  thread_variables_struct(),
      +
      358  info.thread_var_thread_id);
      +
      359  }
      +
      360  if (info.function_uses_table(block_name)) {
      +
      361  printer->fmt_line("{}({});",
      +
      362  table_update_function_name(block_name),
      +
      363  internal_method_arguments());
      +
      364  }
      +
      365  const auto get_func_call_str = [&]() {
      +
      366  const auto& params = function_or_procedure_block->get_parameters();
      +
      367  const auto func_proc_name = block_name + "_" + info.mod_suffix;
      +
      368  auto func_call = fmt::format("{}({}", func_proc_name, internal_method_arguments());
      +
      369  for (int i = 0; i < params.size(); ++i) {
      +
      370  func_call.append(fmt::format(", *getarg({})", i + 1));
      +
      371  }
      +
      372  func_call.append(")");
      +
      373  return func_call;
      +
      374  };
      +
      375  if (function_or_procedure_block->is_function_block()) {
      +
      376  printer->add_indent();
      +
      377  printer->fmt_text("_r = {};", get_func_call_str());
      +
      378  printer->add_newline();
      +
      379  } else {
      +
      380  printer->add_line("_r = 1.;");
      +
      381  printer->fmt_line("{};", get_func_call_str());
      +
      382  }
      +
      383  if (info.point_process || wrapper_type != InterpreterWrapper::HOC) {
      +
      384  printer->add_line("return(_r);");
      +
      385  } else if (wrapper_type == InterpreterWrapper::HOC) {
      +
      386  printer->add_line("hoc_retpushx(_r);");
      +
      387  }
      +
      388  printer->pop_block();
      +
      389 }
      390 
      - -
      392  for (const auto& procedure: info.procedures) {
      -
      393  print_hoc_py_wrapper_function_body(procedure, InterpreterWrapper::HOC);
      -
      394  print_hoc_py_wrapper_function_body(procedure, InterpreterWrapper::Python);
      -
      395  }
      -
      396  for (const auto& function: info.functions) {
      -
      397  print_hoc_py_wrapper_function_body(function, InterpreterWrapper::HOC);
      -
      398  print_hoc_py_wrapper_function_body(function, InterpreterWrapper::Python);
      -
      399  }
      -
      400 }
      -
      401 
      -
      402 
      -
      403 /****************************************************************************************/
      -
      404 /* Code-specific helper routines */
      +
      391 
      + +
      393  auto print_wrappers = [this](const auto& callables) {
      +
      394  for (const auto& callable: callables) {
      +
      395  print_hoc_py_wrapper_function_body(callable, InterpreterWrapper::HOC);
      +
      396  print_hoc_py_wrapper_function_body(callable, InterpreterWrapper::Python);
      +
      397  }
      +
      398  };
      +
      399 
      +
      400  print_wrappers(info.procedures);
      +
      401  print_wrappers(info.functions);
      +
      402 }
      +
      403 
      +
      404 
      405 /****************************************************************************************/
      -
      406 
      -
      407 void CodegenNeuronCppVisitor::add_variable_tqitem(std::vector<IndexVariableInfo>& variables) {
      -
      408  if (info.net_send_used) {
      -
      409  variables.emplace_back(make_symbol(naming::TQITEM_VARIABLE), false, false, true);
      -
      410  variables.back().is_constant = true;
      -
      411  info.tqitem_index = static_cast<int>(variables.size() - 1);
      -
      412  }
      -
      413 }
      -
      414 
      - -
      416  std::vector<IndexVariableInfo>& variables) {
      -
      417  variables.emplace_back(make_symbol(naming::POINT_PROCESS_VARIABLE), false, false, true);
      -
      418  variables.back().is_constant = true;
      -
      419 }
      -
      420 
      - -
      422  const auto& args = internal_method_parameters();
      -
      423  return get_arg_str(args);
      -
      424 }
      -
      425 
      -
      426 
      - -
      428  ParamVector params;
      -
      429  params.emplace_back("", "_nrn_mechanism_cache_range&", "", "_lmc");
      -
      430  params.emplace_back("", fmt::format("{}&", instance_struct()), "", "inst");
      -
      431  if (!info.artificial_cell) {
      -
      432  params.emplace_back("", fmt::format("{}&", node_data_struct()), "", "node_data");
      -
      433  }
      -
      434  params.emplace_back("", "size_t", "", "id");
      -
      435  params.emplace_back("", "Datum*", "", "_ppvar");
      -
      436  params.emplace_back("", "Datum*", "", "_thread");
      -
      437  if (!codegen_thread_variables.empty()) {
      -
      438  params.emplace_back("", fmt::format("{}&", thread_variables_struct()), "", "_thread_vars");
      -
      439  }
      -
      440  params.emplace_back("", "NrnThread*", "", "nt");
      -
      441  return params;
      -
      442 }
      -
      443 
      -
      444 
      -
      445 /// TODO: Edit for NEURON
      - -
      447  return {};
      -
      448 }
      -
      449 
      -
      450 
      -
      451 /// TODO: Edit for NEURON
      - -
      453  bool table) noexcept {
      -
      454  return {};
      -
      455 }
      -
      456 
      -
      457 
      -
      458 /// TODO: Edit for NEURON
      - -
      460  return {};
      -
      461 }
      -
      462 
      -
      463 
      -
      464 /// TODO: Edit for NEURON
      - -
      466  return {};
      -
      467 }
      -
      468 
      -
      469 
      -
      470 /// TODO: Write for NEURON
      -
      471 std::string CodegenNeuronCppVisitor::process_verbatim_text(std::string const& text) {
      -
      472  return {};
      -
      473 }
      -
      474 
      -
      475 
      -
      476 /// TODO: Write for NEURON
      - -
      478  return {};
      -
      479 };
      -
      480 
      -
      481 
      - -
      483  const std::string& function_or_procedure_name) const {
      -
      484  return fmt::format("_hoc_{}", function_or_procedure_name);
      -
      485 }
      -
      486 
      -
      487 
      - -
      489  const std::string& function_or_procedure_name) const {
      -
      490  return fmt::format("static {} {}(void{})",
      -
      491  info.point_process ? "double" : "void",
      -
      492  hoc_function_name(function_or_procedure_name),
      -
      493  info.point_process ? "*" : "");
      -
      494 }
      -
      495 
      -
      496 
      - -
      498  const std::string& function_or_procedure_name) const {
      -
      499  return fmt::format("_npy_{}", function_or_procedure_name);
      -
      500 }
      -
      501 
      -
      502 
      - -
      504  const std::string& function_or_procedure_name) const {
      -
      505  return fmt::format("static double {}(Prop*)", py_function_name(function_or_procedure_name));
      -
      506 }
      -
      507 
      -
      508 
      -
      509 /****************************************************************************************/
      -
      510 /* Code-specific printing routines for code generation */
      +
      406 /* Code-specific helper routines */
      +
      407 /****************************************************************************************/
      +
      408 
      +
      409 void CodegenNeuronCppVisitor::add_variable_tqitem(std::vector<IndexVariableInfo>& variables) {
      +
      410  if (info.net_send_used) {
      +
      411  variables.emplace_back(make_symbol(naming::TQITEM_VARIABLE), false, false, true);
      +
      412  variables.back().is_constant = true;
      +
      413  info.tqitem_index = static_cast<int>(variables.size() - 1);
      +
      414  }
      +
      415 }
      +
      416 
      + +
      418  std::vector<IndexVariableInfo>& variables) {
      +
      419  variables.emplace_back(make_symbol(naming::POINT_PROCESS_VARIABLE), false, false, true);
      +
      420  variables.back().is_constant = true;
      +
      421 }
      +
      422 
      + +
      424  const auto& args = internal_method_parameters();
      +
      425  return get_arg_str(args);
      +
      426 }
      +
      427 
      +
      428 
      + +
      430  ParamVector params;
      +
      431  params.emplace_back("", "_nrn_mechanism_cache_range&", "", "_lmc");
      +
      432  params.emplace_back("", fmt::format("{}&", instance_struct()), "", "inst");
      +
      433  if (!info.artificial_cell) {
      +
      434  params.emplace_back("", fmt::format("{}&", node_data_struct()), "", "node_data");
      +
      435  }
      +
      436  params.emplace_back("", "size_t", "", "id");
      +
      437  params.emplace_back("", "Datum*", "", "_ppvar");
      +
      438  params.emplace_back("", "Datum*", "", "_thread");
      +
      439  if (!codegen_thread_variables.empty()) {
      +
      440  params.emplace_back("", fmt::format("{}&", thread_variables_struct()), "", "_thread_vars");
      +
      441  }
      +
      442  params.emplace_back("", "NrnThread*", "", "nt");
      +
      443  return params;
      +
      444 }
      +
      445 
      +
      446 
      +
      447 /// TODO: Edit for NEURON
      + +
      449  return {};
      +
      450 }
      +
      451 
      +
      452 
      +
      453 /// TODO: Edit for NEURON
      + +
      455  bool table) noexcept {
      +
      456  return {};
      +
      457 }
      +
      458 
      +
      459 
      +
      460 /// TODO: Edit for NEURON
      + +
      462  return {};
      +
      463 }
      +
      464 
      +
      465 
      +
      466 /// TODO: Edit for NEURON
      + +
      468  return {};
      +
      469 }
      +
      470 
      +
      471 
      +
      472 /// TODO: Write for NEURON
      +
      473 std::string CodegenNeuronCppVisitor::process_verbatim_text(std::string const& text) {
      +
      474  return {};
      +
      475 }
      +
      476 
      +
      477 
      +
      478 /// TODO: Write for NEURON
      + +
      480  return {};
      +
      481 };
      +
      482 
      +
      483 
      + +
      485  const std::string& function_or_procedure_name) const {
      +
      486  return fmt::format("_hoc_{}", function_or_procedure_name);
      +
      487 }
      +
      488 
      +
      489 
      + +
      491  const std::string& function_or_procedure_name) const {
      +
      492  return fmt::format("static {} {}(void{})",
      +
      493  info.point_process ? "double" : "void",
      +
      494  hoc_function_name(function_or_procedure_name),
      +
      495  info.point_process ? "*" : "");
      +
      496 }
      +
      497 
      +
      498 
      + +
      500  const std::string& function_or_procedure_name) const {
      +
      501  return fmt::format("_npy_{}", function_or_procedure_name);
      +
      502 }
      +
      503 
      +
      504 
      + +
      506  const std::string& function_or_procedure_name) const {
      +
      507  return fmt::format("static double {}(Prop*)", py_function_name(function_or_procedure_name));
      +
      508 }
      +
      509 
      +
      510 
      511 /****************************************************************************************/
      -
      512 
      - -
      514  return "neuron";
      -
      515 }
      -
      516 
      -
      517 
      - -
      519  std::vector<ShadowUseStatement>& statements,
      -
      520  const Ion& ion,
      -
      521  const std::string& /* concentration */) {
      -
      522  auto ion_name = ion.name;
      -
      523  int dparam_index = get_int_variable_index(fmt::format("style_{}", ion_name));
      -
      524 
      -
      525  auto style_name = fmt::format("_style_{}", ion_name);
      -
      526  auto style_stmt = fmt::format("int {} = *(_ppvar[{}].get<int*>())", style_name, dparam_index);
      -
      527  statements.push_back(ShadowUseStatement{style_stmt, "", ""});
      -
      528 
      -
      529 
      -
      530  auto wrote_conc_stmt = fmt::format("nrn_wrote_conc(_{}_sym, {}, {}, {}, {})",
      -
      531  ion_name,
      -
      532  get_variable_name(ion.rev_potential_pointer_name()),
      -
      533  get_variable_name(ion.intra_conc_pointer_name()),
      -
      534  get_variable_name(ion.extra_conc_pointer_name()),
      -
      535  style_name);
      -
      536  statements.push_back(ShadowUseStatement{wrote_conc_stmt, "", ""});
      -
      537 }
      -
      538 
      -
      539 /****************************************************************************************/
      -
      540 /* Routines for returning variable name */
      +
      512 /* Code-specific printing routines for code generation */
      +
      513 /****************************************************************************************/
      +
      514 
      + +
      516  return "neuron";
      +
      517 }
      +
      518 
      +
      519 
      + +
      521  std::vector<ShadowUseStatement>& statements,
      +
      522  const Ion& ion,
      +
      523  const std::string& /* concentration */) {
      +
      524  auto ion_name = ion.name;
      +
      525  int dparam_index = get_int_variable_index(fmt::format("style_{}", ion_name));
      +
      526 
      +
      527  auto style_name = fmt::format("_style_{}", ion_name);
      +
      528  auto style_stmt = fmt::format("int {} = *(_ppvar[{}].get<int*>())", style_name, dparam_index);
      +
      529  statements.push_back(ShadowUseStatement{style_stmt, "", ""});
      +
      530 
      +
      531 
      +
      532  auto wrote_conc_stmt = fmt::format("nrn_wrote_conc(_{}_sym, {}, {}, {}, {})",
      +
      533  ion_name,
      +
      534  get_variable_name(ion.rev_potential_pointer_name()),
      +
      535  get_variable_name(ion.intra_conc_pointer_name()),
      +
      536  get_variable_name(ion.extra_conc_pointer_name()),
      +
      537  style_name);
      +
      538  statements.push_back(ShadowUseStatement{wrote_conc_stmt, "", ""});
      +
      539 }
      +
      540 
      541 /****************************************************************************************/
      -
      542 
      -
      543 
      - -
      545  bool use_instance) const {
      -
      546  if (!use_instance) {
      -
      547  throw std::runtime_error("Printing non-instance variables is not implemented.");
      -
      548  }
      -
      549 
      -
      550  auto name = symbol->get_name();
      -
      551  auto dimension = symbol->get_length();
      -
      552  if (symbol->is_array()) {
      -
      553  return fmt::format("(inst.{}+id*{})", name, dimension);
      -
      554  } else {
      -
      555  return fmt::format("inst.{}[id]", name);
      -
      556  }
      -
      557 }
      -
      558 
      -
      559 
      - -
      561  const std::string& name,
      -
      562  bool use_instance) const {
      -
      563  auto position = position_of_int_var(name);
      -
      564 
      -
      565  if (info.semantics[position].name == naming::RANDOM_SEMANTIC) {
      -
      566  return fmt::format("_ppvar[{}].literal_value<void*>()", position);
      -
      567  }
      -
      568 
      -
      569  if (info.semantics[position].name == naming::FOR_NETCON_SEMANTIC) {
      -
      570  return fmt::format("_ppvar[{}].literal_value<void*>()", position);
      -
      571  }
      -
      572 
      -
      573  if (symbol.is_index) {
      -
      574  if (use_instance) {
      -
      575  throw std::runtime_error("Not implemented. [wiejo]");
      -
      576  // return fmt::format("inst->{}[{}]", name, position);
      -
      577  }
      -
      578  throw std::runtime_error("Not implemented. [ncuwi]");
      -
      579  // return fmt::format("indexes[{}]", position);
      -
      580  }
      -
      581  if (symbol.is_integer) {
      -
      582  if (use_instance) {
      -
      583  return fmt::format("inst.{}[id]", name);
      -
      584  }
      -
      585  return fmt::format("_ppvar[{}]", position);
      -
      586  }
      -
      587  if (use_instance) {
      -
      588  return fmt::format("(*inst.{}[id])", name);
      -
      589  }
      -
      590 
      -
      591 
      -
      592  throw std::runtime_error("Not implemented. [nvueir]");
      -
      593 }
      -
      594 
      -
      595 
      - -
      597  bool use_instance) const {
      -
      598  auto i_var = var_info.offset;
      -
      599  auto var_name = var_info.symbol->get_name();
      -
      600 
      -
      601  if (use_instance) {
      -
      602  if (var_info.symbol->is_array()) {
      -
      603  return fmt::format("(_thread_vars.{}_ptr(id))", var_name);
      -
      604  } else {
      -
      605  return fmt::format("_thread_vars.{}(id)", var_name);
      -
      606  }
      -
      607  } else {
      -
      608  if (var_info.symbol->is_array()) {
      -
      609  return fmt::format("({}.thread_data + {})", global_struct_instance(), i_var);
      -
      610  } else {
      -
      611  return fmt::format("{}.thread_data[{}]", global_struct_instance(), i_var);
      -
      612  }
      -
      613  }
      -
      614 }
      -
      615 
      -
      616 
      - -
      618  bool use_instance) const {
      -
      619  if (use_instance) {
      -
      620  return fmt::format("inst.{}->{}", naming::INST_GLOBAL_MEMBER, symbol->get_name());
      -
      621  } else {
      -
      622  return fmt::format("{}.{}", global_struct_instance(), symbol->get_name());
      -
      623  }
      -
      624 }
      -
      625 
      -
      626 
      -
      627 std::string CodegenNeuronCppVisitor::get_variable_name(const std::string& name,
      -
      628  bool use_instance) const {
      -
      629  const std::string& varname = update_if_ion_variable_name(name);
      -
      630 
      -
      631  auto name_comparator = [&varname](const auto& sym) { return varname == get_name(sym); };
      +
      542 /* Routines for returning variable name */
      +
      543 /****************************************************************************************/
      +
      544 
      +
      545 
      + +
      547  bool use_instance) const {
      +
      548  if (!use_instance) {
      +
      549  throw std::runtime_error("Printing non-instance variables is not implemented.");
      +
      550  }
      +
      551 
      +
      552  auto name = symbol->get_name();
      +
      553  auto dimension = symbol->get_length();
      +
      554  if (symbol->is_array()) {
      +
      555  return fmt::format("(inst.{}+id*{})", name, dimension);
      +
      556  } else {
      +
      557  return fmt::format("inst.{}[id]", name);
      +
      558  }
      +
      559 }
      +
      560 
      +
      561 
      + +
      563  const std::string& name,
      +
      564  bool use_instance) const {
      +
      565  auto position = position_of_int_var(name);
      +
      566 
      +
      567  if (info.semantics[position].name == naming::RANDOM_SEMANTIC) {
      +
      568  return fmt::format("_ppvar[{}].literal_value<void*>()", position);
      +
      569  }
      +
      570 
      +
      571  if (info.semantics[position].name == naming::FOR_NETCON_SEMANTIC) {
      +
      572  return fmt::format("_ppvar[{}].literal_value<void*>()", position);
      +
      573  }
      +
      574 
      +
      575  if (symbol.is_index) {
      +
      576  if (use_instance) {
      +
      577  throw std::runtime_error("Not implemented. [wiejo]");
      +
      578  // return fmt::format("inst->{}[{}]", name, position);
      +
      579  }
      +
      580  throw std::runtime_error("Not implemented. [ncuwi]");
      +
      581  // return fmt::format("indexes[{}]", position);
      +
      582  }
      +
      583  if (symbol.is_integer) {
      +
      584  if (use_instance) {
      +
      585  return fmt::format("inst.{}[id]", name);
      +
      586  }
      +
      587  return fmt::format("_ppvar[{}]", position);
      +
      588  }
      +
      589  if (use_instance) {
      +
      590  return fmt::format("(*inst.{}[id])", name);
      +
      591  }
      +
      592 
      +
      593 
      +
      594  throw std::runtime_error("Not implemented. [nvueir]");
      +
      595 }
      +
      596 
      +
      597 
      + +
      599  bool use_instance) const {
      +
      600  auto i_var = var_info.offset;
      +
      601  auto var_name = var_info.symbol->get_name();
      +
      602 
      +
      603  if (use_instance) {
      +
      604  if (var_info.symbol->is_array()) {
      +
      605  return fmt::format("(_thread_vars.{}_ptr(id))", var_name);
      +
      606  } else {
      +
      607  return fmt::format("_thread_vars.{}(id)", var_name);
      +
      608  }
      +
      609  } else {
      +
      610  if (var_info.symbol->is_array()) {
      +
      611  return fmt::format("({}.thread_data + {})", global_struct_instance(), i_var);
      +
      612  } else {
      +
      613  return fmt::format("{}.thread_data[{}]", global_struct_instance(), i_var);
      +
      614  }
      +
      615  }
      +
      616 }
      +
      617 
      +
      618 
      + +
      620  bool use_instance) const {
      +
      621  if (use_instance) {
      +
      622  return fmt::format("inst.{}->{}", naming::INST_GLOBAL_MEMBER, symbol->get_name());
      +
      623  } else {
      +
      624  return fmt::format("{}.{}", global_struct_instance(), symbol->get_name());
      +
      625  }
      +
      626 }
      +
      627 
      +
      628 
      +
      629 std::string CodegenNeuronCppVisitor::get_variable_name(const std::string& name,
      +
      630  bool use_instance) const {
      +
      631  const std::string& varname = update_if_ion_variable_name(name);
      632 
      -
      633  if (name == naming::POINT_PROCESS_VARIABLE) {
      -
      634  if (printing_net_receive) {
      -
      635  // In net_receive blocks, the point process is passed in as an
      -
      636  // argument called:
      -
      637  return "_pnt";
      -
      638  }
      -
      639  // The "integer variable" branch will pick up the correct `_ppvar` when
      -
      640  // not printing a NET_RECEIVE block.
      -
      641  }
      -
      642 
      -
      643  // float variable
      -
      644  auto f = std::find_if(codegen_float_variables.begin(),
      -
      645  codegen_float_variables.end(),
      -
      646  name_comparator);
      -
      647  if (f != codegen_float_variables.end()) {
      -
      648  return float_variable_name(*f, use_instance);
      -
      649  }
      -
      650 
      -
      651  // integer variable
      -
      652  auto i =
      -
      653  std::find_if(codegen_int_variables.begin(), codegen_int_variables.end(), name_comparator);
      -
      654  if (i != codegen_int_variables.end()) {
      -
      655  return int_variable_name(*i, varname, use_instance);
      -
      656  }
      -
      657 
      -
      658  // thread variable
      -
      659  auto t = std::find_if(codegen_thread_variables.begin(),
      -
      660  codegen_thread_variables.end(),
      -
      661  name_comparator);
      -
      662  if (t != codegen_thread_variables.end()) {
      -
      663  return thread_variable_name(*t, use_instance);
      -
      664  }
      -
      665 
      -
      666  // global variable
      -
      667  auto g = std::find_if(codegen_global_variables.begin(),
      -
      668  codegen_global_variables.end(),
      -
      669  name_comparator);
      -
      670  if (g != codegen_global_variables.end()) {
      -
      671  return global_variable_name(*g, use_instance);
      -
      672  }
      -
      673 
      -
      674  if (varname == naming::NTHREAD_DT_VARIABLE) {
      -
      675  return std::string("nt->_") + naming::NTHREAD_DT_VARIABLE;
      -
      676  }
      -
      677 
      -
      678  if (varname == naming::NTHREAD_T_VARIABLE) {
      -
      679  return std::string("nt->_") + naming::NTHREAD_T_VARIABLE;
      -
      680  }
      -
      681 
      -
      682  // external variable
      -
      683  auto e = std::find_if(info.external_variables.begin(),
      -
      684  info.external_variables.end(),
      -
      685  name_comparator);
      -
      686  if (e != info.external_variables.end()) {
      -
      687  return fmt::format("{}()", varname);
      -
      688  }
      -
      689 
      -
      690  auto const iter =
      -
      691  std::find_if(info.neuron_global_variables.begin(),
      -
      692  info.neuron_global_variables.end(),
      -
      693  [&varname](auto const& entry) { return entry.first->get_name() == varname; });
      -
      694  if (iter != info.neuron_global_variables.end()) {
      -
      695  std::string ret;
      -
      696  if (use_instance) {
      -
      697  ret = "*(inst.";
      -
      698  }
      -
      699  ret.append(varname);
      -
      700  if (use_instance) {
      -
      701  ret.append(")");
      -
      702  }
      -
      703  return ret;
      -
      704  }
      -
      705 
      -
      706  // otherwise return original name
      -
      707  return varname;
      -
      708 }
      -
      709 
      -
      710 
      -
      711 /****************************************************************************************/
      -
      712 /* Main printing routines for code generation */
      +
      633  auto name_comparator = [&varname](const auto& sym) { return varname == get_name(sym); };
      +
      634 
      +
      635  if (name == naming::POINT_PROCESS_VARIABLE) {
      +
      636  if (printing_net_receive) {
      +
      637  // In net_receive blocks, the point process is passed in as an
      +
      638  // argument called:
      +
      639  return "_pnt";
      +
      640  }
      +
      641  // The "integer variable" branch will pick up the correct `_ppvar` when
      +
      642  // not printing a NET_RECEIVE block.
      +
      643  }
      +
      644 
      +
      645  // float variable
      +
      646  auto f = std::find_if(codegen_float_variables.begin(),
      +
      647  codegen_float_variables.end(),
      +
      648  name_comparator);
      +
      649  if (f != codegen_float_variables.end()) {
      +
      650  return float_variable_name(*f, use_instance);
      +
      651  }
      +
      652 
      +
      653  // integer variable
      +
      654  auto i =
      +
      655  std::find_if(codegen_int_variables.begin(), codegen_int_variables.end(), name_comparator);
      +
      656  if (i != codegen_int_variables.end()) {
      +
      657  return int_variable_name(*i, varname, use_instance);
      +
      658  }
      +
      659 
      +
      660  // thread variable
      +
      661  auto t = std::find_if(codegen_thread_variables.begin(),
      +
      662  codegen_thread_variables.end(),
      +
      663  name_comparator);
      +
      664  if (t != codegen_thread_variables.end()) {
      +
      665  return thread_variable_name(*t, use_instance);
      +
      666  }
      +
      667 
      +
      668  // global variable
      +
      669  auto g = std::find_if(codegen_global_variables.begin(),
      +
      670  codegen_global_variables.end(),
      +
      671  name_comparator);
      +
      672  if (g != codegen_global_variables.end()) {
      +
      673  return global_variable_name(*g, use_instance);
      +
      674  }
      +
      675 
      +
      676  if (varname == naming::NTHREAD_DT_VARIABLE) {
      +
      677  return std::string("nt->_") + naming::NTHREAD_DT_VARIABLE;
      +
      678  }
      +
      679 
      +
      680  if (varname == naming::NTHREAD_T_VARIABLE) {
      +
      681  return std::string("nt->_") + naming::NTHREAD_T_VARIABLE;
      +
      682  }
      +
      683 
      +
      684  // external variable
      +
      685  auto e = std::find_if(info.external_variables.begin(),
      +
      686  info.external_variables.end(),
      +
      687  name_comparator);
      +
      688  if (e != info.external_variables.end()) {
      +
      689  return fmt::format("{}()", varname);
      +
      690  }
      +
      691 
      +
      692  auto const iter =
      +
      693  std::find_if(info.neuron_global_variables.begin(),
      +
      694  info.neuron_global_variables.end(),
      +
      695  [&varname](auto const& entry) { return entry.first->get_name() == varname; });
      +
      696  if (iter != info.neuron_global_variables.end()) {
      +
      697  std::string ret;
      +
      698  if (use_instance) {
      +
      699  ret = "*(inst.";
      +
      700  }
      +
      701  ret.append(varname);
      +
      702  if (use_instance) {
      +
      703  ret.append(")");
      +
      704  }
      +
      705  return ret;
      +
      706  }
      +
      707 
      +
      708  // otherwise return original name
      +
      709  return varname;
      +
      710 }
      +
      711 
      +
      712 
      713 /****************************************************************************************/
      -
      714 
      -
      715 
      - -
      717  printer->add_newline();
      -
      718  printer->add_multi_line(R"CODE(
      -
      719  #include <Eigen/Dense>
      -
      720  #include <Eigen/LU>
      -
      721  #include <math.h>
      -
      722  #include <stdio.h>
      -
      723  #include <stdlib.h>
      -
      724  #include <vector>
      -
      725  )CODE");
      -
      726  if (info.eigen_newton_solver_exist) {
      -
      727  printer->add_multi_line(nmodl::solvers::newton_hpp);
      -
      728  }
      -
      729 }
      -
      730 
      -
      731 
      - -
      733  printer->add_newline();
      -
      734  printer->add_multi_line(R"CODE(
      -
      735  #include "mech_api.h"
      -
      736  #include "neuron/cache/mechanism_range.hpp"
      -
      737  #include "nrniv_mf.h"
      -
      738  #include "section_fwd.hpp"
      -
      739  )CODE");
      -
      740 }
      -
      741 
      -
      742 
      -
      743 void CodegenNeuronCppVisitor::print_sdlists_init([[maybe_unused]] bool print_initializers) {
      -
      744  /// _initlists() should only be called once by the mechanism registration function
      -
      745  /// (_<mod_file>_reg())
      -
      746  printer->add_newline(2);
      -
      747  printer->push_block("static void _initlists()");
      -
      748  for (auto i = 0; i < info.prime_variables_by_order.size(); ++i) {
      -
      749  const auto& prime_var = info.prime_variables_by_order[i];
      -
      750  /// TODO: Something similar needs to happen for slist/dlist2 but I don't know their usage at
      -
      751  // the moment
      -
      752  /// TODO: We have to do checks and add errors similar to nocmodl in the
      -
      753  // SemanticAnalysisVisitor
      -
      754  if (prime_var->is_array()) {
      -
      755  /// TODO: Needs a for loop here. Look at
      -
      756  // https://github.com/neuronsimulator/nrn/blob/df001a436bcb4e23d698afe66c2a513819a6bfe8/src/nmodl/deriv.cpp#L524
      -
      757  /// TODO: Also needs a test
      -
      758  printer->fmt_push_block("for (int _i = 0; _i < {}; ++_i)", prime_var->get_length());
      -
      759  printer->fmt_line("/* {}[{}] */", prime_var->get_name(), prime_var->get_length());
      -
      760  printer->fmt_line("_slist1[{}+_i] = {{{}, _i}};",
      -
      761  i,
      -
      762  position_of_float_var(prime_var->get_name()));
      -
      763  const auto prime_var_deriv_name = "D" + prime_var->get_name();
      -
      764  printer->fmt_line("/* {}[{}] */", prime_var_deriv_name, prime_var->get_length());
      -
      765  printer->fmt_line("_dlist1[{}+_i] = {{{}, _i}};",
      -
      766  i,
      -
      767  position_of_float_var(prime_var_deriv_name));
      -
      768  printer->pop_block();
      -
      769  } else {
      -
      770  printer->fmt_line("/* {} */", prime_var->get_name());
      -
      771  printer->fmt_line("_slist1[{}] = {{{}, 0}};",
      -
      772  i,
      -
      773  position_of_float_var(prime_var->get_name()));
      -
      774  const auto prime_var_deriv_name = "D" + prime_var->get_name();
      -
      775  printer->fmt_line("/* {} */", prime_var_deriv_name);
      -
      776  printer->fmt_line("_dlist1[{}] = {{{}, 0}};",
      -
      777  i,
      -
      778  position_of_float_var(prime_var_deriv_name));
      -
      779  }
      -
      780  }
      -
      781  printer->pop_block();
      -
      782 }
      -
      783 
      - -
      785  auto params = internal_method_parameters();
      -
      786  params.push_back({"", "double", "", "v"});
      -
      787 
      -
      788  return params;
      -
      789 }
      -
      790 
      - -
      792  const auto value_initialize = print_initializers ? "{}" : "";
      -
      793 
      -
      794  /// TODO: Print only global variables printed in NEURON
      -
      795  printer->add_newline(2);
      -
      796  printer->add_line("/* NEURON global variables */");
      -
      797  if (info.primes_size != 0) {
      -
      798  printer->fmt_line("static neuron::container::field_index _slist1[{0}], _dlist1[{0}];",
      -
      799  info.primes_size);
      -
      800  }
      -
      801 
      -
      802  for (const auto& ion: info.ions) {
      -
      803  printer->fmt_line("static Symbol* _{}_sym;", ion.name);
      -
      804  }
      -
      805 
      -
      806  printer->add_line("static int mech_type;");
      +
      714 /* Main printing routines for code generation */
      +
      715 /****************************************************************************************/
      +
      716 
      +
      717 
      + +
      719  printer->add_newline();
      +
      720  printer->add_multi_line(R"CODE(
      +
      721  #include <Eigen/Dense>
      +
      722  #include <Eigen/LU>
      +
      723  #include <math.h>
      +
      724  #include <stdio.h>
      +
      725  #include <stdlib.h>
      +
      726  #include <vector>
      +
      727  )CODE");
      +
      728  if (info.eigen_newton_solver_exist) {
      +
      729  printer->add_multi_line(nmodl::solvers::newton_hpp);
      +
      730  }
      +
      731 }
      +
      732 
      +
      733 
      + +
      735  printer->add_newline();
      +
      736  printer->add_multi_line(R"CODE(
      +
      737  #include "mech_api.h"
      +
      738  #include "neuron/cache/mechanism_range.hpp"
      +
      739  #include "nrniv_mf.h"
      +
      740  #include "section_fwd.hpp"
      +
      741  )CODE");
      +
      742 }
      +
      743 
      +
      744 
      +
      745 void CodegenNeuronCppVisitor::print_sdlists_init([[maybe_unused]] bool print_initializers) {
      +
      746  /// _initlists() should only be called once by the mechanism registration function
      +
      747  /// (_<mod_file>_reg())
      +
      748  printer->add_newline(2);
      +
      749  printer->push_block("static void _initlists()");
      +
      750  for (auto i = 0; i < info.prime_variables_by_order.size(); ++i) {
      +
      751  const auto& prime_var = info.prime_variables_by_order[i];
      +
      752  /// TODO: Something similar needs to happen for slist/dlist2 but I don't know their usage at
      +
      753  // the moment
      +
      754  /// TODO: We have to do checks and add errors similar to nocmodl in the
      +
      755  // SemanticAnalysisVisitor
      +
      756  if (prime_var->is_array()) {
      +
      757  /// TODO: Needs a for loop here. Look at
      +
      758  // https://github.com/neuronsimulator/nrn/blob/df001a436bcb4e23d698afe66c2a513819a6bfe8/src/nmodl/deriv.cpp#L524
      +
      759  /// TODO: Also needs a test
      +
      760  printer->fmt_push_block("for (int _i = 0; _i < {}; ++_i)", prime_var->get_length());
      +
      761  printer->fmt_line("/* {}[{}] */", prime_var->get_name(), prime_var->get_length());
      +
      762  printer->fmt_line("_slist1[{}+_i] = {{{}, _i}};",
      +
      763  i,
      +
      764  position_of_float_var(prime_var->get_name()));
      +
      765  const auto prime_var_deriv_name = "D" + prime_var->get_name();
      +
      766  printer->fmt_line("/* {}[{}] */", prime_var_deriv_name, prime_var->get_length());
      +
      767  printer->fmt_line("_dlist1[{}+_i] = {{{}, _i}};",
      +
      768  i,
      +
      769  position_of_float_var(prime_var_deriv_name));
      +
      770  printer->pop_block();
      +
      771  } else {
      +
      772  printer->fmt_line("/* {} */", prime_var->get_name());
      +
      773  printer->fmt_line("_slist1[{}] = {{{}, 0}};",
      +
      774  i,
      +
      775  position_of_float_var(prime_var->get_name()));
      +
      776  const auto prime_var_deriv_name = "D" + prime_var->get_name();
      +
      777  printer->fmt_line("/* {} */", prime_var_deriv_name);
      +
      778  printer->fmt_line("_dlist1[{}] = {{{}, 0}};",
      +
      779  i,
      +
      780  position_of_float_var(prime_var_deriv_name));
      +
      781  }
      +
      782  }
      +
      783  printer->pop_block();
      +
      784 }
      +
      785 
      + +
      787  auto params = internal_method_parameters();
      +
      788  params.push_back({"", "double", "", "v"});
      +
      789 
      +
      790  return params;
      +
      791 }
      +
      792 
      + +
      794  const auto value_initialize = print_initializers ? "{}" : "";
      +
      795 
      +
      796  /// TODO: Print only global variables printed in NEURON
      +
      797  printer->add_newline(2);
      +
      798  printer->add_line("/* NEURON global variables */");
      +
      799  if (info.primes_size != 0) {
      +
      800  printer->fmt_line("static neuron::container::field_index _slist1[{0}], _dlist1[{0}];",
      +
      801  info.primes_size);
      +
      802  }
      +
      803 
      +
      804  for (const auto& ion: info.ions) {
      +
      805  printer->fmt_line("static Symbol* _{}_sym;", ion.name);
      +
      806  }
      807 
      -
      808  if (info.point_process) {
      -
      809  printer->add_line("static int _pointtype;");
      -
      810  } else {
      -
      811  printer->add_multi_line(R"CODE(
      -
      812  static Prop* _extcall_prop;
      -
      813  /* _prop_id kind of shadows _extcall_prop to allow validity checking. */
      -
      814  static _nrn_non_owning_id_without_container _prop_id{};)CODE");
      -
      815  }
      -
      816 
      -
      817  printer->fmt_line("static int {} = {};",
      - -
      819  info.pointer_variables.size() > 0
      -
      820  ? static_cast<int>(info.pointer_variables.size())
      -
      821  : -1);
      -
      822 
      -
      823  printer->add_line("static _nrn_mechanism_std_vector<Datum> _extcall_thread;");
      +
      808  printer->add_line("static int mech_type;");
      +
      809 
      +
      810  if (info.point_process) {
      +
      811  printer->add_line("static int _pointtype;");
      +
      812  } else {
      +
      813  printer->add_multi_line(R"CODE(
      +
      814  static Prop* _extcall_prop;
      +
      815  /* _prop_id kind of shadows _extcall_prop to allow validity checking. */
      +
      816  static _nrn_non_owning_id_without_container _prop_id{};)CODE");
      +
      817  }
      +
      818 
      +
      819  printer->fmt_line("static int {} = {};",
      + +
      821  info.pointer_variables.size() > 0
      +
      822  ? static_cast<int>(info.pointer_variables.size())
      +
      823  : -1);
      824 
      -
      825  // Start printing the CNRN-style global variables.
      -
      826  auto float_type = default_float_data_type();
      -
      827  printer->add_newline(2);
      -
      828  printer->add_line("/** all global variables */");
      -
      829  printer->fmt_push_block("struct {}", global_struct());
      -
      830 
      -
      831  if (!info.ions.empty()) {
      -
      832  // TODO implement these when needed.
      -
      833  }
      -
      834 
      -
      835  if (!info.thread_variables.empty()) {
      -
      836  size_t prefix_sum = 0;
      -
      837  for (size_t i = 0; i < info.thread_variables.size(); ++i) {
      -
      838  const auto& var = info.thread_variables[i];
      -
      839  codegen_thread_variables.push_back({var, i, prefix_sum});
      -
      840 
      -
      841  prefix_sum += var->get_length();
      -
      842  }
      -
      843  }
      -
      844 
      -
      845 
      -
      846  for (const auto& var: info.global_variables) {
      -
      847  codegen_global_variables.push_back(var);
      -
      848  }
      -
      849 
      -
      850  if (info.vectorize && !info.top_local_variables.empty()) {
      -
      851  size_t prefix_sum = info.thread_var_data_size;
      -
      852  size_t n_thread_vars = codegen_thread_variables.size();
      -
      853  for (size_t i = 0; i < info.top_local_variables.size(); ++i) {
      -
      854  const auto& var = info.top_local_variables[i];
      -
      855  codegen_thread_variables.push_back({var, n_thread_vars + i, prefix_sum});
      -
      856 
      -
      857  prefix_sum += var->get_length();
      -
      858  }
      -
      859  }
      -
      860 
      -
      861  if (!info.vectorize && !info.top_local_variables.empty()) {
      -
      862  for (size_t i = 0; i < info.top_local_variables.size(); ++i) {
      -
      863  const auto& var = info.top_local_variables[i];
      -
      864  codegen_global_variables.push_back(var);
      -
      865  }
      -
      866  }
      -
      867 
      -
      868 
      -
      869  if (!codegen_thread_variables.empty()) {
      -
      870  if (!info.vectorize) {
      -
      871  // MOD files that aren't "VECTORIZED" don't have thread data.
      -
      872  throw std::runtime_error("Found thread variables with `vectorize == false`.");
      -
      873  }
      -
      874 
      -
      875  codegen_global_variables.push_back(make_symbol("thread_data_in_use"));
      +
      825  printer->add_line("static _nrn_mechanism_std_vector<Datum> _extcall_thread;");
      +
      826 
      +
      827  // Start printing the CNRN-style global variables.
      +
      828  auto float_type = default_float_data_type();
      +
      829  printer->add_newline(2);
      +
      830  printer->add_line("/** all global variables */");
      +
      831  printer->fmt_push_block("struct {}", global_struct());
      +
      832 
      +
      833  if (!info.ions.empty()) {
      +
      834  // TODO implement these when needed.
      +
      835  }
      +
      836 
      +
      837  if (!info.thread_variables.empty()) {
      +
      838  size_t prefix_sum = 0;
      +
      839  for (size_t i = 0; i < info.thread_variables.size(); ++i) {
      +
      840  const auto& var = info.thread_variables[i];
      +
      841  codegen_thread_variables.push_back({var, i, prefix_sum});
      +
      842 
      +
      843  prefix_sum += var->get_length();
      +
      844  }
      +
      845  }
      +
      846 
      +
      847 
      +
      848  for (const auto& var: info.global_variables) {
      +
      849  codegen_global_variables.push_back(var);
      +
      850  }
      +
      851 
      +
      852  if (info.vectorize && !info.top_local_variables.empty()) {
      +
      853  size_t prefix_sum = info.thread_var_data_size;
      +
      854  size_t n_thread_vars = codegen_thread_variables.size();
      +
      855  for (size_t i = 0; i < info.top_local_variables.size(); ++i) {
      +
      856  const auto& var = info.top_local_variables[i];
      +
      857  codegen_thread_variables.push_back({var, n_thread_vars + i, prefix_sum});
      +
      858 
      +
      859  prefix_sum += var->get_length();
      +
      860  }
      +
      861  }
      +
      862 
      +
      863  if (!info.vectorize && !info.top_local_variables.empty()) {
      +
      864  for (size_t i = 0; i < info.top_local_variables.size(); ++i) {
      +
      865  const auto& var = info.top_local_variables[i];
      +
      866  codegen_global_variables.push_back(var);
      +
      867  }
      +
      868  }
      +
      869 
      +
      870 
      +
      871  if (!codegen_thread_variables.empty()) {
      +
      872  if (!info.vectorize) {
      +
      873  // MOD files that aren't "VECTORIZED" don't have thread data.
      +
      874  throw std::runtime_error("Found thread variables with `vectorize == false`.");
      +
      875  }
      876 
      -
      877  auto symbol = make_symbol("thread_data");
      -
      878  auto thread_data_size = info.thread_var_data_size + info.top_local_thread_size;
      -
      879  symbol->set_as_array(thread_data_size);
      -
      880  codegen_global_variables.push_back(symbol);
      -
      881  }
      -
      882 
      -
      883  for (const auto& var: info.state_vars) {
      -
      884  auto name = var->get_name() + "0";
      -
      885  auto symbol = program_symtab->lookup(name);
      -
      886  if (symbol == nullptr) {
      -
      887  codegen_global_variables.push_back(make_symbol(name));
      -
      888  }
      -
      889  }
      -
      890 
      -
      891  for (const auto& var: info.constant_variables) {
      -
      892  codegen_global_variables.push_back(var);
      -
      893  }
      -
      894 
      -
      895  for (const auto& var: codegen_global_variables) {
      -
      896  auto name = var->get_name();
      -
      897  auto length = var->get_length();
      -
      898  if (var->is_array()) {
      -
      899  printer->fmt_line("{} {}[{}] /* TODO init const-array */;", float_type, name, length);
      -
      900  } else {
      -
      901  double value{};
      -
      902  if (auto const& value_ptr = var->get_value()) {
      -
      903  value = *value_ptr;
      -
      904  }
      -
      905  printer->fmt_line("{} {}{};",
      -
      906  float_type,
      -
      907  name,
      -
      908  print_initializers ? fmt::format("{{{:g}}}", value) : std::string{});
      -
      909  }
      -
      910  }
      -
      911 
      -
      912  if (info.table_count > 0) {
      -
      913  // basically the same code as coreNEURON uses
      -
      914  printer->fmt_line("double usetable{};", print_initializers ? "{1}" : "");
      -
      915  codegen_global_variables.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      -
      916 
      -
      917  for (const auto& block: info.functions_with_table) {
      -
      918  const auto& name = block->get_node_name();
      -
      919  printer->fmt_line("{} tmin_{}{};", float_type, name, value_initialize);
      -
      920  printer->fmt_line("{} mfac_{}{};", float_type, name, value_initialize);
      -
      921  codegen_global_variables.push_back(make_symbol("tmin_" + name));
      -
      922  codegen_global_variables.push_back(make_symbol("mfac_" + name));
      -
      923  }
      -
      924 
      -
      925  for (const auto& variable: info.table_statement_variables) {
      -
      926  auto const name = "t_" + variable->get_name();
      -
      927  auto const num_values = variable->get_num_values();
      -
      928  if (variable->is_array()) {
      -
      929  int array_len = variable->get_length();
      -
      930  printer->fmt_line(
      -
      931  "{} {}[{}][{}]{};", float_type, name, array_len, num_values, value_initialize);
      -
      932  } else {
      -
      933  printer->fmt_line("{} {}[{}]{};", float_type, name, num_values, value_initialize);
      -
      934  }
      -
      935  codegen_global_variables.push_back(make_symbol(name));
      -
      936  }
      -
      937  }
      -
      938 
      -
      939  if (!info.function_tables.empty()) {
      -
      940  throw std::runtime_error("Not implemented, global function tables.");
      -
      941  }
      -
      942 
      -
      943  if (info.vectorize && info.thread_data_index) {
      -
      944  // TODO compare CoreNEURON something extcall stuff.
      -
      945  // throw std::runtime_error("Not implemented, global vectorize something else.");
      -
      946  }
      -
      947 
      -
      948  if (info.diam_used) {
      -
      949  printer->fmt_line("Symbol* _morphology_sym;");
      -
      950  }
      -
      951 
      -
      952  printer->pop_block(";");
      +
      877  codegen_global_variables.push_back(make_symbol("thread_data_in_use"));
      +
      878 
      +
      879  auto symbol = make_symbol("thread_data");
      +
      880  auto thread_data_size = info.thread_var_data_size + info.top_local_thread_size;
      +
      881  symbol->set_as_array(thread_data_size);
      +
      882  codegen_global_variables.push_back(symbol);
      +
      883  }
      +
      884 
      +
      885  for (const auto& var: info.state_vars) {
      +
      886  auto name = var->get_name() + "0";
      +
      887  auto symbol = program_symtab->lookup(name);
      +
      888  if (symbol == nullptr) {
      +
      889  codegen_global_variables.push_back(make_symbol(name));
      +
      890  }
      +
      891  }
      +
      892 
      +
      893  for (const auto& var: info.constant_variables) {
      +
      894  codegen_global_variables.push_back(var);
      +
      895  }
      +
      896 
      +
      897  for (const auto& var: codegen_global_variables) {
      +
      898  auto name = var->get_name();
      +
      899  auto length = var->get_length();
      +
      900  if (var->is_array()) {
      +
      901  printer->fmt_line("{} {}[{}] /* TODO init const-array */;", float_type, name, length);
      +
      902  } else {
      +
      903  double value{};
      +
      904  if (auto const& value_ptr = var->get_value()) {
      +
      905  value = *value_ptr;
      +
      906  }
      +
      907  printer->fmt_line("{} {}{};",
      +
      908  float_type,
      +
      909  name,
      +
      910  print_initializers ? fmt::format("{{{:g}}}", value) : std::string{});
      +
      911  }
      +
      912  }
      +
      913 
      +
      914  if (info.table_count > 0) {
      +
      915  // basically the same code as coreNEURON uses
      +
      916  printer->fmt_line("double usetable{};", print_initializers ? "{1}" : "");
      +
      917  codegen_global_variables.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      +
      918 
      +
      919  for (const auto& block: info.functions_with_table) {
      +
      920  const auto& name = block->get_node_name();
      +
      921  printer->fmt_line("{} tmin_{}{};", float_type, name, value_initialize);
      +
      922  printer->fmt_line("{} mfac_{}{};", float_type, name, value_initialize);
      +
      923  codegen_global_variables.push_back(make_symbol("tmin_" + name));
      +
      924  codegen_global_variables.push_back(make_symbol("mfac_" + name));
      +
      925  }
      +
      926 
      +
      927  for (const auto& variable: info.table_statement_variables) {
      +
      928  auto const name = "t_" + variable->get_name();
      +
      929  auto const num_values = variable->get_num_values();
      +
      930  if (variable->is_array()) {
      +
      931  int array_len = variable->get_length();
      +
      932  printer->fmt_line(
      +
      933  "{} {}[{}][{}]{};", float_type, name, array_len, num_values, value_initialize);
      +
      934  } else {
      +
      935  printer->fmt_line("{} {}[{}]{};", float_type, name, num_values, value_initialize);
      +
      936  }
      +
      937  codegen_global_variables.push_back(make_symbol(name));
      +
      938  }
      +
      939  }
      +
      940 
      +
      941  if (!info.function_tables.empty()) {
      +
      942  throw std::runtime_error("Not implemented, global function tables.");
      +
      943  }
      +
      944 
      +
      945  if (info.vectorize && info.thread_data_index) {
      +
      946  // TODO compare CoreNEURON something extcall stuff.
      +
      947  // throw std::runtime_error("Not implemented, global vectorize something else.");
      +
      948  }
      +
      949 
      +
      950  if (info.diam_used) {
      +
      951  printer->fmt_line("Symbol* _morphology_sym;");
      +
      952  }
      953 
      -
      954  print_global_var_struct_assertions();
      -
      955  print_global_var_struct_decl();
      -
      956  print_global_var_external_access();
      -
      957 
      -
      958  print_global_param_default_values();
      -
      959 }
      -
      960 
      - -
      962  for (const auto& var: codegen_global_variables) {
      -
      963  auto var_name = get_name(var);
      -
      964  auto var_expr = get_variable_name(var_name, false);
      -
      965 
      -
      966  printer->fmt_push_block("auto {}() -> std::decay<decltype({})>::type ",
      -
      967  method_name(var_name),
      -
      968  var_expr);
      -
      969  printer->fmt_line("return {};", var_expr);
      -
      970  printer->pop_block();
      -
      971  }
      -
      972  if (!codegen_global_variables.empty()) {
      -
      973  printer->add_newline();
      -
      974  }
      -
      975 
      -
      976  for (const auto& var: info.external_variables) {
      -
      977  auto var_name = get_name(var);
      -
      978  printer->fmt_line("double {}();", var_name);
      -
      979  }
      -
      980  if (!info.external_variables.empty()) {
      -
      981  printer->add_newline();
      -
      982  }
      -
      983 }
      -
      984 
      - -
      986  printer->push_block("static std::vector<double> _parameter_defaults =");
      -
      987 
      -
      988  std::vector<std::string> defaults;
      -
      989  for (const auto& p: info.range_parameter_vars) {
      -
      990  double value = p->get_value() == nullptr ? 0.0 : *p->get_value();
      -
      991  defaults.push_back(fmt::format("{:g} /* {} */", value, p->get_name()));
      -
      992  }
      -
      993 
      -
      994  printer->add_multi_line(fmt::format("{}", fmt::join(defaults, ",\n")));
      -
      995  printer->pop_block(";");
      -
      996 }
      -
      997 
      - -
      999  auto variable_printer = [&](const std::vector<SymbolType>& variables, bool if_array) {
      -
      1000  for (const auto& variable: variables) {
      -
      1001  if (variable->is_array() == if_array) {
      -
      1002  // false => do not use the instance struct, which is not
      -
      1003  // defined in the global declaration that we are printing
      -
      1004  auto name = get_variable_name(variable->get_name(), false);
      -
      1005  auto ename = add_escape_quote(variable->get_name() + "_" + info.mod_suffix);
      -
      1006  if (if_array) {
      -
      1007  auto length = variable->get_length();
      -
      1008  printer->fmt_line("{{{}, {}, {}}},", ename, name, length);
      -
      1009  } else {
      -
      1010  printer->fmt_line("{{{}, &{}}},", ename, name);
      -
      1011  }
      -
      1012  }
      -
      1013  }
      -
      1014  };
      -
      1015 
      -
      1016  auto globals = info.global_variables;
      -
      1017  auto thread_vars = info.thread_variables;
      -
      1018 
      -
      1019  if (info.table_count > 0) {
      -
      1020  globals.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      -
      1021  }
      -
      1022 
      -
      1023  printer->add_newline(2);
      -
      1024  printer->add_line("/** connect global (scalar) variables to hoc -- */");
      -
      1025  printer->add_line("static DoubScal hoc_scalar_double[] = {");
      -
      1026  printer->increase_indent();
      -
      1027  variable_printer(globals, false);
      -
      1028  variable_printer(thread_vars, false);
      -
      1029  printer->add_line("{nullptr, nullptr}");
      -
      1030  printer->decrease_indent();
      -
      1031  printer->add_line("};");
      -
      1032 
      -
      1033  printer->add_newline(2);
      -
      1034  printer->add_line("/** connect global (array) variables to hoc -- */");
      -
      1035  printer->add_line("static DoubVec hoc_vector_double[] = {");
      -
      1036  printer->increase_indent();
      -
      1037  variable_printer(globals, true);
      -
      1038  variable_printer(thread_vars, true);
      -
      1039  printer->add_line("{nullptr, nullptr, 0}");
      -
      1040  printer->decrease_indent();
      -
      1041  printer->add_line("};");
      -
      1042 
      -
      1043  printer->add_newline(2);
      -
      1044  printer->add_line("/* declaration of user functions */");
      -
      1045  for (const auto& procedure: info.procedures) {
      -
      1046  const auto proc_name = procedure->get_node_name();
      -
      1047  printer->fmt_line("{};", hoc_function_signature(proc_name));
      -
      1048  }
      -
      1049  for (const auto& function: info.functions) {
      -
      1050  const auto func_name = function->get_node_name();
      -
      1051  printer->fmt_line("{};", hoc_function_signature(func_name));
      -
      1052  }
      -
      1053  if (!info.point_process) {
      -
      1054  for (const auto& procedure: info.procedures) {
      -
      1055  const auto proc_name = procedure->get_node_name();
      -
      1056  printer->fmt_line("{};", py_function_signature(proc_name));
      -
      1057  }
      -
      1058  for (const auto& function: info.functions) {
      -
      1059  const auto func_name = function->get_node_name();
      -
      1060  printer->fmt_line("{};", py_function_signature(func_name));
      -
      1061  }
      -
      1062  }
      -
      1063 
      -
      1064  printer->add_newline(2);
      -
      1065  printer->add_line("/* connect user functions to hoc names */");
      -
      1066  printer->add_line("static VoidFunc hoc_intfunc[] = {");
      -
      1067  printer->increase_indent();
      -
      1068  if (info.point_process) {
      -
      1069  printer->add_line("{0, 0}");
      -
      1070  printer->decrease_indent();
      -
      1071  printer->add_line("};");
      -
      1072  printer->add_line("static Member_func _member_func[] = {");
      -
      1073  printer->increase_indent();
      -
      1074  printer->add_multi_line(R"CODE(
      -
      1075  {"loc", _hoc_loc_pnt},
      -
      1076  {"has_loc", _hoc_has_loc},
      -
      1077  {"get_loc", _hoc_get_loc_pnt},)CODE");
      -
      1078  } else {
      -
      1079  printer->fmt_line("{{\"setdata_{}\", _hoc_setdata}},", info.mod_suffix);
      -
      1080  }
      -
      1081 
      -
      1082  for (const auto& procedure: info.procedures) {
      -
      1083  const auto proc_name = procedure->get_node_name();
      -
      1084  printer->fmt_line("{{\"{}{}\", {}}},",
      -
      1085  proc_name,
      -
      1086  info.rsuffix,
      -
      1087  hoc_function_name(proc_name));
      -
      1088  }
      -
      1089  for (const auto& function: info.functions) {
      -
      1090  const auto func_name = function->get_node_name();
      -
      1091  printer->fmt_line("{{\"{}{}\", {}}},",
      -
      1092  func_name,
      -
      1093  info.rsuffix,
      -
      1094  hoc_function_name(func_name));
      -
      1095  }
      -
      1096 
      -
      1097  printer->add_line("{nullptr, nullptr}");
      -
      1098  printer->decrease_indent();
      -
      1099  printer->add_line("};");
      -
      1100  if (!info.point_process) {
      -
      1101  printer->push_block("static NPyDirectMechFunc npy_direct_func_proc[] =");
      -
      1102  for (const auto& procedure: info.procedures) {
      -
      1103  const auto proc_name = procedure->get_node_name();
      -
      1104  printer->fmt_line("{{\"{}\", {}}},", proc_name, py_function_name(proc_name));
      -
      1105  }
      -
      1106  for (const auto& function: info.functions) {
      -
      1107  const auto func_name = function->get_node_name();
      -
      1108  printer->fmt_line("{{\"{}\", {}}},", func_name, py_function_name(func_name));
      -
      1109  }
      -
      1110  printer->add_line("{nullptr, nullptr}");
      -
      1111  printer->pop_block(";");
      -
      1112  }
      -
      1113 }
      -
      1114 
      - -
      1116  printer->add_newline(2);
      -
      1117  printer->add_line("/** register channel with the simulator */");
      -
      1118  printer->fmt_push_block("extern \"C\" void _{}_reg()", info.mod_file);
      -
      1119  printer->add_line("_initlists();");
      -
      1120  printer->add_newline();
      -
      1121 
      -
      1122  for (const auto& ion: info.ions) {
      -
      1123  double valence = ion.valence.value_or(-10000.0);
      -
      1124  printer->fmt_line("ion_reg(\"{}\", {});", ion.name, valence);
      -
      1125  }
      -
      1126  if (!info.ions.empty()) {
      -
      1127  printer->add_newline();
      -
      1128  }
      -
      1129 
      -
      1130  for (const auto& ion: info.ions) {
      -
      1131  printer->fmt_line("_{0}_sym = hoc_lookup(\"{0}_ion\");", ion.name);
      -
      1132  }
      -
      1133  if (!info.ions.empty()) {
      -
      1134  printer->add_newline();
      -
      1135  }
      -
      1136 
      -
      1137  const auto compute_functions_parameters =
      -
      1138  breakpoint_exist()
      -
      1139  ? fmt::format("{}, {}, {}",
      -
      1140  nrn_cur_required() ? method_name(naming::NRN_CUR_METHOD) : "nullptr",
      -
      1141  method_name(naming::NRN_JACOB_METHOD),
      -
      1142  nrn_state_required() ? method_name(naming::NRN_STATE_METHOD) : "nullptr")
      -
      1143  : "nullptr, nullptr, nullptr";
      -
      1144  const auto register_mech_args = fmt::format("{}, {}, {}, {}, {}, {}",
      -
      1145  get_channel_info_var_name(),
      -
      1146  method_name(naming::NRN_ALLOC_METHOD),
      -
      1147  compute_functions_parameters,
      -
      1148  method_name(naming::NRN_INIT_METHOD),
      - -
      1150  1 + info.thread_data_index);
      -
      1151  if (info.point_process) {
      -
      1152  printer->fmt_line(
      -
      1153  "_pointtype = point_register_mech({}, _hoc_create_pnt, _hoc_destroy_pnt, "
      -
      1154  "_member_func);",
      -
      1155  register_mech_args);
      -
      1156 
      -
      1157  if (info.destructor_node) {
      -
      1158  printer->fmt_line("register_destructor({});",
      -
      1159  method_name(naming::NRN_DESTRUCTOR_METHOD));
      -
      1160  }
      -
      1161  } else {
      -
      1162  printer->fmt_line("register_mech({});", register_mech_args);
      -
      1163  }
      -
      1164 
      -
      1165 
      -
      1166  if (info.thread_callback_register) {
      -
      1167  printer->fmt_line("_extcall_thread.resize({});", info.thread_data_index + 1);
      -
      1168  printer->fmt_line("thread_mem_init(_extcall_thread.data());");
      -
      1169  printer->fmt_line("{} = 0;", get_variable_name("thread_data_in_use", false));
      -
      1170  }
      -
      1171 
      -
      1172 
      -
      1173  /// type related information
      -
      1174  printer->add_newline();
      -
      1175  printer->fmt_line("mech_type = nrn_get_mechtype({}[1]);", get_channel_info_var_name());
      -
      1176 
      -
      1177  printer->add_line("hoc_register_parm_default(mech_type, &_parameter_defaults);");
      +
      954  printer->pop_block(";");
      +
      955 
      +
      956  print_global_var_struct_assertions();
      +
      957  print_global_var_struct_decl();
      +
      958  print_global_var_external_access();
      +
      959 
      +
      960  print_global_param_default_values();
      +
      961 }
      +
      962 
      + +
      964  for (const auto& var: codegen_global_variables) {
      +
      965  auto var_name = get_name(var);
      +
      966  auto var_expr = get_variable_name(var_name, false);
      +
      967 
      +
      968  printer->fmt_push_block("auto {}() -> std::decay<decltype({})>::type ",
      +
      969  method_name(var_name),
      +
      970  var_expr);
      +
      971  printer->fmt_line("return {};", var_expr);
      +
      972  printer->pop_block();
      +
      973  }
      +
      974  if (!codegen_global_variables.empty()) {
      +
      975  printer->add_newline();
      +
      976  }
      +
      977 
      +
      978  for (const auto& var: info.external_variables) {
      +
      979  auto var_name = get_name(var);
      +
      980  printer->fmt_line("double {}();", var_name);
      +
      981  }
      +
      982  if (!info.external_variables.empty()) {
      +
      983  printer->add_newline();
      +
      984  }
      +
      985 }
      +
      986 
      + +
      988  printer->push_block("static std::vector<double> _parameter_defaults =");
      +
      989 
      +
      990  std::vector<std::string> defaults;
      +
      991  for (const auto& p: info.range_parameter_vars) {
      +
      992  double value = p->get_value() == nullptr ? 0.0 : *p->get_value();
      +
      993  defaults.push_back(fmt::format("{:g} /* {} */", value, p->get_name()));
      +
      994  }
      +
      995 
      +
      996  printer->add_multi_line(fmt::format("{}", fmt::join(defaults, ",\n")));
      +
      997  printer->pop_block(";");
      +
      998 }
      +
      999 
      + +
      1001  auto variable_printer = [&](const std::vector<SymbolType>& variables, bool if_array) {
      +
      1002  for (const auto& variable: variables) {
      +
      1003  if (variable->is_array() == if_array) {
      +
      1004  // false => do not use the instance struct, which is not
      +
      1005  // defined in the global declaration that we are printing
      +
      1006  auto name = get_variable_name(variable->get_name(), false);
      +
      1007  auto ename = add_escape_quote(variable->get_name() + "_" + info.mod_suffix);
      +
      1008  if (if_array) {
      +
      1009  auto length = variable->get_length();
      +
      1010  printer->fmt_line("{{{}, {}, {}}},", ename, name, length);
      +
      1011  } else {
      +
      1012  printer->fmt_line("{{{}, &{}}},", ename, name);
      +
      1013  }
      +
      1014  }
      +
      1015  }
      +
      1016  };
      +
      1017 
      +
      1018  auto globals = info.global_variables;
      +
      1019  auto thread_vars = info.thread_variables;
      +
      1020 
      +
      1021  if (info.table_count > 0) {
      +
      1022  globals.push_back(make_symbol(naming::USE_TABLE_VARIABLE));
      +
      1023  }
      +
      1024 
      +
      1025  printer->add_newline(2);
      +
      1026  printer->add_line("/** connect global (scalar) variables to hoc -- */");
      +
      1027  printer->add_line("static DoubScal hoc_scalar_double[] = {");
      +
      1028  printer->increase_indent();
      +
      1029  variable_printer(globals, false);
      +
      1030  variable_printer(thread_vars, false);
      +
      1031  printer->add_line("{nullptr, nullptr}");
      +
      1032  printer->decrease_indent();
      +
      1033  printer->add_line("};");
      +
      1034 
      +
      1035  printer->add_newline(2);
      +
      1036  printer->add_line("/** connect global (array) variables to hoc -- */");
      +
      1037  printer->add_line("static DoubVec hoc_vector_double[] = {");
      +
      1038  printer->increase_indent();
      +
      1039  variable_printer(globals, true);
      +
      1040  variable_printer(thread_vars, true);
      +
      1041  printer->add_line("{nullptr, nullptr, 0}");
      +
      1042  printer->decrease_indent();
      +
      1043  printer->add_line("};");
      +
      1044 
      +
      1045  printer->add_newline(2);
      +
      1046  printer->add_line("/* declaration of user functions */");
      +
      1047  for (const auto& procedure: info.procedures) {
      +
      1048  const auto proc_name = procedure->get_node_name();
      +
      1049  printer->fmt_line("{};", hoc_function_signature(proc_name));
      +
      1050  }
      +
      1051  for (const auto& function: info.functions) {
      +
      1052  const auto func_name = function->get_node_name();
      +
      1053  printer->fmt_line("{};", hoc_function_signature(func_name));
      +
      1054  }
      +
      1055  if (!info.point_process) {
      +
      1056  for (const auto& procedure: info.procedures) {
      +
      1057  const auto proc_name = procedure->get_node_name();
      +
      1058  printer->fmt_line("{};", py_function_signature(proc_name));
      +
      1059  }
      +
      1060  for (const auto& function: info.functions) {
      +
      1061  const auto func_name = function->get_node_name();
      +
      1062  printer->fmt_line("{};", py_function_signature(func_name));
      +
      1063  }
      +
      1064  }
      +
      1065 
      +
      1066  printer->add_newline(2);
      +
      1067  printer->add_line("/* connect user functions to hoc names */");
      +
      1068  printer->add_line("static VoidFunc hoc_intfunc[] = {");
      +
      1069  printer->increase_indent();
      +
      1070  if (info.point_process) {
      +
      1071  printer->add_line("{0, 0}");
      +
      1072  printer->decrease_indent();
      +
      1073  printer->add_line("};");
      +
      1074  printer->add_line("static Member_func _member_func[] = {");
      +
      1075  printer->increase_indent();
      +
      1076  printer->add_multi_line(R"CODE(
      +
      1077  {"loc", _hoc_loc_pnt},
      +
      1078  {"has_loc", _hoc_has_loc},
      +
      1079  {"get_loc", _hoc_get_loc_pnt},)CODE");
      +
      1080  } else {
      +
      1081  printer->fmt_line("{{\"setdata_{}\", _hoc_setdata}},", info.mod_suffix);
      +
      1082  }
      +
      1083 
      +
      1084  for (const auto& procedure: info.procedures) {
      +
      1085  const auto proc_name = procedure->get_node_name();
      +
      1086  printer->fmt_line("{{\"{}{}\", {}}},",
      +
      1087  proc_name,
      +
      1088  info.rsuffix,
      +
      1089  hoc_function_name(proc_name));
      +
      1090  }
      +
      1091  for (const auto& function: info.functions) {
      +
      1092  const auto func_name = function->get_node_name();
      +
      1093  printer->fmt_line("{{\"{}{}\", {}}},",
      +
      1094  func_name,
      +
      1095  info.rsuffix,
      +
      1096  hoc_function_name(func_name));
      +
      1097  }
      +
      1098 
      +
      1099  printer->add_line("{nullptr, nullptr}");
      +
      1100  printer->decrease_indent();
      +
      1101  printer->add_line("};");
      +
      1102  if (!info.point_process) {
      +
      1103  printer->push_block("static NPyDirectMechFunc npy_direct_func_proc[] =");
      +
      1104  for (const auto& procedure: info.procedures) {
      +
      1105  const auto proc_name = procedure->get_node_name();
      +
      1106  printer->fmt_line("{{\"{}\", {}}},", proc_name, py_function_name(proc_name));
      +
      1107  }
      +
      1108  for (const auto& function: info.functions) {
      +
      1109  const auto func_name = function->get_node_name();
      +
      1110  printer->fmt_line("{{\"{}\", {}}},", func_name, py_function_name(func_name));
      +
      1111  }
      +
      1112  printer->add_line("{nullptr, nullptr}");
      +
      1113  printer->pop_block(";");
      +
      1114  }
      +
      1115 }
      +
      1116 
      + +
      1118  printer->add_newline(2);
      +
      1119  printer->add_line("/** register channel with the simulator */");
      +
      1120  printer->fmt_push_block("extern \"C\" void _{}_reg()", info.mod_file);
      +
      1121  printer->add_line("_initlists();");
      +
      1122  printer->add_newline();
      +
      1123 
      +
      1124  for (const auto& ion: info.ions) {
      +
      1125  double valence = ion.valence.value_or(-10000.0);
      +
      1126  printer->fmt_line("ion_reg(\"{}\", {});", ion.name, valence);
      +
      1127  }
      +
      1128  if (!info.ions.empty()) {
      +
      1129  printer->add_newline();
      +
      1130  }
      +
      1131 
      +
      1132  for (const auto& ion: info.ions) {
      +
      1133  printer->fmt_line("_{0}_sym = hoc_lookup(\"{0}_ion\");", ion.name);
      +
      1134  }
      +
      1135  if (!info.ions.empty()) {
      +
      1136  printer->add_newline();
      +
      1137  }
      +
      1138 
      +
      1139  const auto compute_functions_parameters =
      +
      1140  breakpoint_exist()
      +
      1141  ? fmt::format("{}, {}, {}",
      +
      1142  nrn_cur_required() ? method_name(naming::NRN_CUR_METHOD) : "nullptr",
      +
      1143  method_name(naming::NRN_JACOB_METHOD),
      +
      1144  nrn_state_required() ? method_name(naming::NRN_STATE_METHOD) : "nullptr")
      +
      1145  : "nullptr, nullptr, nullptr";
      +
      1146  const auto register_mech_args = fmt::format("{}, {}, {}, {}, {}, {}",
      +
      1147  get_channel_info_var_name(),
      +
      1148  method_name(naming::NRN_ALLOC_METHOD),
      +
      1149  compute_functions_parameters,
      +
      1150  method_name(naming::NRN_INIT_METHOD),
      + +
      1152  1 + info.thread_data_index);
      +
      1153  if (info.point_process) {
      +
      1154  printer->fmt_line(
      +
      1155  "_pointtype = point_register_mech({}, _hoc_create_pnt, _hoc_destroy_pnt, "
      +
      1156  "_member_func);",
      +
      1157  register_mech_args);
      +
      1158 
      +
      1159  if (info.destructor_node) {
      +
      1160  printer->fmt_line("register_destructor({});",
      +
      1161  method_name(naming::NRN_DESTRUCTOR_METHOD));
      +
      1162  }
      +
      1163  } else {
      +
      1164  printer->fmt_line("register_mech({});", register_mech_args);
      +
      1165  }
      +
      1166 
      +
      1167 
      +
      1168  if (info.thread_callback_register) {
      +
      1169  printer->fmt_line("_extcall_thread.resize({});", info.thread_data_index + 1);
      +
      1170  printer->fmt_line("thread_mem_init(_extcall_thread.data());");
      +
      1171  printer->fmt_line("{} = 0;", get_variable_name("thread_data_in_use", false));
      +
      1172  }
      +
      1173 
      +
      1174 
      +
      1175  /// type related information
      +
      1176  printer->add_newline();
      +
      1177  printer->fmt_line("mech_type = nrn_get_mechtype({}[1]);", get_channel_info_var_name());
      1178 
      -
      1179  // register the table-checking function
      -
      1180  if (info.table_count > 0) {
      -
      1181  printer->fmt_line("_nrn_thread_table_reg(mech_type, {});", table_thread_function_name());
      -
      1182  }
      -
      1183 
      -
      1184  printer->add_line("_nrn_mechanism_register_data_fields(mech_type,");
      -
      1185  printer->increase_indent();
      -
      1186 
      -
      1187  const auto codegen_float_variables_size = codegen_float_variables.size();
      -
      1188  std::vector<std::string> mech_register_args;
      -
      1189 
      -
      1190  for (int i = 0; i < codegen_float_variables_size; ++i) {
      -
      1191  const auto& float_var = codegen_float_variables[i];
      -
      1192  if (float_var->is_array()) {
      -
      1193  mech_register_args.push_back(
      -
      1194  fmt::format("_nrn_mechanism_field<double>{{\"{}\", {}}} /* {} */",
      -
      1195  float_var->get_name(),
      -
      1196  float_var->get_length(),
      -
      1197  i));
      -
      1198  } else {
      -
      1199  mech_register_args.push_back(fmt::format(
      -
      1200  "_nrn_mechanism_field<double>{{\"{}\"}} /* {} */", float_var->get_name(), i));
      -
      1201  }
      -
      1202  }
      -
      1203 
      -
      1204  const auto codegen_int_variables_size = codegen_int_variables.size();
      -
      1205  for (int i = 0; i < codegen_int_variables_size; ++i) {
      -
      1206  const auto& int_var = codegen_int_variables[i];
      -
      1207  const auto& name = int_var.symbol->get_name();
      -
      1208  if (i != info.semantics[i].index) {
      -
      1209  throw std::runtime_error("Broken logic.");
      -
      1210  }
      -
      1211  const auto& semantic = info.semantics[i].name;
      -
      1212 
      -
      1213  auto type = "double*";
      -
      1214  if (name == naming::POINT_PROCESS_VARIABLE) {
      -
      1215  type = "Point_process*";
      -
      1216  } else if (name == naming::TQITEM_VARIABLE) {
      -
      1217  type = "void*";
      -
      1218  } else if (stringutils::starts_with(name, "style_") &&
      -
      1219  stringutils::starts_with(semantic, "#") &&
      -
      1220  stringutils::ends_with(semantic, "_ion")) {
      -
      1221  type = "int*";
      -
      1222  } else if (semantic == naming::FOR_NETCON_SEMANTIC) {
      -
      1223  type = "void*";
      -
      1224  }
      -
      1225 
      -
      1226  mech_register_args.push_back(
      -
      1227  fmt::format("_nrn_mechanism_field<{}>{{\"{}\", \"{}\"}} /* {} */",
      -
      1228  type,
      -
      1229  name,
      -
      1230  info.semantics[i].name,
      -
      1231  i));
      -
      1232  }
      -
      1233 
      -
      1234  printer->add_multi_line(fmt::format("{}", fmt::join(mech_register_args, ",\n")));
      +
      1179  printer->add_line("hoc_register_parm_default(mech_type, &_parameter_defaults);");
      +
      1180 
      +
      1181  // register the table-checking function
      +
      1182  if (info.table_count > 0) {
      +
      1183  printer->fmt_line("_nrn_thread_table_reg(mech_type, {});", table_thread_function_name());
      +
      1184  }
      +
      1185 
      +
      1186  printer->add_line("_nrn_mechanism_register_data_fields(mech_type,");
      +
      1187  printer->increase_indent();
      +
      1188 
      +
      1189  const auto codegen_float_variables_size = codegen_float_variables.size();
      +
      1190  std::vector<std::string> mech_register_args;
      +
      1191 
      +
      1192  for (int i = 0; i < codegen_float_variables_size; ++i) {
      +
      1193  const auto& float_var = codegen_float_variables[i];
      +
      1194  if (float_var->is_array()) {
      +
      1195  mech_register_args.push_back(
      +
      1196  fmt::format("_nrn_mechanism_field<double>{{\"{}\", {}}} /* {} */",
      +
      1197  float_var->get_name(),
      +
      1198  float_var->get_length(),
      +
      1199  i));
      +
      1200  } else {
      +
      1201  mech_register_args.push_back(fmt::format(
      +
      1202  "_nrn_mechanism_field<double>{{\"{}\"}} /* {} */", float_var->get_name(), i));
      +
      1203  }
      +
      1204  }
      +
      1205 
      +
      1206  const auto codegen_int_variables_size = codegen_int_variables.size();
      +
      1207  for (int i = 0; i < codegen_int_variables_size; ++i) {
      +
      1208  const auto& int_var = codegen_int_variables[i];
      +
      1209  const auto& name = int_var.symbol->get_name();
      +
      1210  if (i != info.semantics[i].index) {
      +
      1211  throw std::runtime_error("Broken logic.");
      +
      1212  }
      +
      1213  const auto& semantic = info.semantics[i].name;
      +
      1214 
      +
      1215  auto type = "double*";
      +
      1216  if (name == naming::POINT_PROCESS_VARIABLE) {
      +
      1217  type = "Point_process*";
      +
      1218  } else if (name == naming::TQITEM_VARIABLE) {
      +
      1219  type = "void*";
      +
      1220  } else if (stringutils::starts_with(name, "style_") &&
      +
      1221  stringutils::starts_with(semantic, "#") &&
      +
      1222  stringutils::ends_with(semantic, "_ion")) {
      +
      1223  type = "int*";
      +
      1224  } else if (semantic == naming::FOR_NETCON_SEMANTIC) {
      +
      1225  type = "void*";
      +
      1226  }
      +
      1227 
      +
      1228  mech_register_args.push_back(
      +
      1229  fmt::format("_nrn_mechanism_field<{}>{{\"{}\", \"{}\"}} /* {} */",
      +
      1230  type,
      +
      1231  name,
      +
      1232  info.semantics[i].name,
      +
      1233  i));
      +
      1234  }
      1235 
      -
      1236  printer->decrease_indent();
      -
      1237  printer->add_line(");");
      -
      1238  printer->add_newline();
      -
      1239 
      -
      1240 
      -
      1241  printer->fmt_line("hoc_register_prop_size(mech_type, {}, {});",
      -
      1242  float_variables_size(),
      -
      1243  int_variables_size());
      -
      1244 
      -
      1245  for (int i = 0; i < codegen_int_variables_size; ++i) {
      -
      1246  if (i != info.semantics[i].index) {
      -
      1247  throw std::runtime_error("Broken logic.");
      -
      1248  }
      -
      1249 
      -
      1250  printer->fmt_line("hoc_register_dparam_semantics(mech_type, {}, \"{}\");",
      -
      1251  i,
      -
      1252  info.semantics[i].name);
      -
      1253  }
      -
      1254 
      -
      1255  if (info.write_concentration) {
      -
      1256  printer->fmt_line("nrn_writes_conc(mech_type, 0);");
      -
      1257  }
      -
      1258 
      -
      1259  if (info.artificial_cell) {
      -
      1260  printer->fmt_line("add_nrn_artcell(mech_type, {});", info.tqitem_index);
      -
      1261  }
      -
      1262 
      -
      1263  if (info.net_event_used) {
      -
      1264  printer->add_line("add_nrn_has_net_event(mech_type);");
      -
      1265  }
      -
      1266 
      -
      1267  if (info.for_netcon_used) {
      -
      1268  auto dparam_it =
      -
      1269  std::find_if(info.semantics.begin(), info.semantics.end(), [](const IndexSemantics& a) {
      -
      1270  return a.name == naming::FOR_NETCON_SEMANTIC;
      -
      1271  });
      -
      1272  if (dparam_it == info.semantics.end()) {
      -
      1273  throw std::runtime_error("Couldn't find `fornetcon` variable.");
      -
      1274  }
      -
      1275 
      -
      1276  int dparam_index = dparam_it->index;
      -
      1277  printer->fmt_line("add_nrn_fornetcons(mech_type, {});", dparam_index);
      -
      1278  }
      -
      1279 
      -
      1280  printer->add_line("hoc_register_var(hoc_scalar_double, hoc_vector_double, hoc_intfunc);");
      -
      1281  if (!info.point_process) {
      -
      1282  printer->add_line("hoc_register_npy_direct(mech_type, npy_direct_func_proc);");
      -
      1283  }
      -
      1284  if (info.net_receive_node) {
      -
      1285  printer->fmt_line("pnt_receive[mech_type] = nrn_net_receive_{};", info.mod_suffix);
      -
      1286  printer->fmt_line("pnt_receive_size[mech_type] = {};", info.num_net_receive_parameters);
      -
      1287  }
      -
      1288 
      -
      1289  if (info.net_receive_initial_node) {
      -
      1290  printer->fmt_line("pnt_receive_init[mech_type] = net_init;");
      -
      1291  }
      -
      1292 
      -
      1293  if (info.thread_callback_register) {
      -
      1294  printer->add_line("_nrn_thread_reg(mech_type, 1, thread_mem_init);");
      -
      1295  printer->add_line("_nrn_thread_reg(mech_type, 0, thread_mem_cleanup);");
      -
      1296  }
      -
      1297 
      -
      1298  if (info.diam_used) {
      -
      1299  printer->fmt_line("{}._morphology_sym = hoc_lookup(\"morphology\");",
      -
      1300  global_struct_instance());
      -
      1301  }
      -
      1302 
      -
      1303  printer->pop_block();
      -
      1304 }
      -
      1305 
      -
      1306 
      - -
      1308  if (!info.thread_callback_register) {
      -
      1309  return;
      -
      1310  }
      -
      1311 
      -
      1312  auto static_thread_data = get_variable_name("thread_data", false);
      -
      1313  auto inuse = get_variable_name("thread_data_in_use", false);
      -
      1314  auto thread_data_index = info.thread_var_thread_id;
      -
      1315  printer->push_block("static void thread_mem_init(Datum* _thread) ");
      -
      1316  printer->push_block(fmt::format("if({})", inuse));
      -
      1317  printer->fmt_line("_thread[{}] = {{neuron::container::do_not_search, new double[{}]{{}}}};",
      -
      1318  thread_data_index,
      -
      1319  info.thread_var_data_size + info.top_local_thread_size);
      -
      1320  printer->pop_block();
      -
      1321  printer->push_block("else");
      -
      1322  printer->fmt_line("_thread[{}] = {{neuron::container::do_not_search, {}}};",
      -
      1323  thread_data_index,
      -
      1324  static_thread_data);
      -
      1325  printer->fmt_line("{} = 1;", inuse);
      -
      1326  printer->pop_block();
      -
      1327  printer->pop_block();
      -
      1328 
      -
      1329  printer->push_block("static void thread_mem_cleanup(Datum* _thread) ");
      -
      1330  printer->fmt_line("double * _thread_data_ptr = _thread[{}].get<double*>();", thread_data_index);
      -
      1331  printer->push_block(fmt::format("if(_thread_data_ptr == {})", static_thread_data));
      -
      1332  printer->fmt_line("{} = 0;", inuse);
      -
      1333  printer->pop_block();
      -
      1334  printer->push_block("else");
      -
      1335  printer->add_line("delete[] _thread_data_ptr;");
      -
      1336  printer->pop_block();
      -
      1337  printer->pop_block();
      -
      1338 }
      -
      1339 
      -
      1340 
      - -
      1342  for (auto const& [var, type]: info.neuron_global_variables) {
      -
      1343  auto const name = var->get_name();
      -
      1344  printer->fmt_line("extern {} {};", type, name);
      -
      1345  }
      -
      1346 }
      -
      1347 
      - -
      1349  auto const value_initialize = print_initializers ? "{}" : "";
      -
      1350  printer->add_newline(2);
      -
      1351  printer->add_line("/** all mechanism instance variables and global variables */");
      -
      1352  printer->fmt_push_block("struct {} ", instance_struct());
      -
      1353 
      -
      1354  for (auto const& [var, type]: info.neuron_global_variables) {
      -
      1355  auto const name = var->get_name();
      -
      1356  printer->fmt_line("{}* {}{};",
      -
      1357  type,
      -
      1358  name,
      -
      1359  print_initializers ? fmt::format("{{&::{}}}", name) : std::string{});
      -
      1360  }
      -
      1361  for (auto& var: codegen_float_variables) {
      -
      1362  const auto& name = var->get_name();
      -
      1363  printer->fmt_line("double* {}{};", name, value_initialize);
      -
      1364  }
      -
      1365  for (auto& var: codegen_int_variables) {
      -
      1366  const auto& name = var.symbol->get_name();
      -
      1367  if (name == naming::POINT_PROCESS_VARIABLE) {
      -
      1368  continue;
      -
      1369  } else if (var.is_index || var.is_integer) {
      -
      1370  // In NEURON we don't create caches for `int*`. Hence, do nothing.
      -
      1371  } else {
      -
      1372  auto qualifier = var.is_constant ? "const " : "";
      -
      1373  auto type = var.is_vdata ? "void*" : default_float_data_type();
      -
      1374  printer->fmt_line("{}{}* const* {}{};", qualifier, type, name, value_initialize);
      -
      1375  }
      -
      1376  }
      -
      1377 
      -
      1378  printer->fmt_line("{}* {}{};",
      -
      1379  global_struct(),
      - -
      1381  print_initializers ? fmt::format("{{&{}}}", global_struct_instance())
      -
      1382  : std::string{});
      -
      1383  printer->pop_block(";");
      -
      1384 }
      -
      1385 
      - -
      1387  printer->add_newline(2);
      -
      1388  printer->fmt_push_block("static {} make_instance_{}(_nrn_mechanism_cache_range& _lmc)",
      -
      1389  instance_struct(),
      -
      1390  info.mod_suffix);
      -
      1391  printer->fmt_push_block("return {}", instance_struct());
      -
      1392 
      -
      1393  std::vector<std::string> make_instance_args;
      +
      1236  printer->add_multi_line(fmt::format("{}", fmt::join(mech_register_args, ",\n")));
      +
      1237 
      +
      1238  printer->decrease_indent();
      +
      1239  printer->add_line(");");
      +
      1240  printer->add_newline();
      +
      1241 
      +
      1242 
      +
      1243  printer->fmt_line("hoc_register_prop_size(mech_type, {}, {});",
      +
      1244  float_variables_size(),
      +
      1245  int_variables_size());
      +
      1246 
      +
      1247  for (int i = 0; i < codegen_int_variables_size; ++i) {
      +
      1248  if (i != info.semantics[i].index) {
      +
      1249  throw std::runtime_error("Broken logic.");
      +
      1250  }
      +
      1251 
      +
      1252  printer->fmt_line("hoc_register_dparam_semantics(mech_type, {}, \"{}\");",
      +
      1253  i,
      +
      1254  info.semantics[i].name);
      +
      1255  }
      +
      1256 
      +
      1257  if (info.write_concentration) {
      +
      1258  printer->fmt_line("nrn_writes_conc(mech_type, 0);");
      +
      1259  }
      +
      1260 
      +
      1261  if (info.artificial_cell) {
      +
      1262  printer->fmt_line("add_nrn_artcell(mech_type, {});", info.tqitem_index);
      +
      1263  }
      +
      1264 
      +
      1265  if (info.net_event_used) {
      +
      1266  printer->add_line("add_nrn_has_net_event(mech_type);");
      +
      1267  }
      +
      1268 
      +
      1269  if (info.for_netcon_used) {
      +
      1270  auto dparam_it =
      +
      1271  std::find_if(info.semantics.begin(), info.semantics.end(), [](const IndexSemantics& a) {
      +
      1272  return a.name == naming::FOR_NETCON_SEMANTIC;
      +
      1273  });
      +
      1274  if (dparam_it == info.semantics.end()) {
      +
      1275  throw std::runtime_error("Couldn't find `fornetcon` variable.");
      +
      1276  }
      +
      1277 
      +
      1278  int dparam_index = dparam_it->index;
      +
      1279  printer->fmt_line("add_nrn_fornetcons(mech_type, {});", dparam_index);
      +
      1280  }
      +
      1281 
      +
      1282  printer->add_line("hoc_register_var(hoc_scalar_double, hoc_vector_double, hoc_intfunc);");
      +
      1283  if (!info.point_process) {
      +
      1284  printer->add_line("hoc_register_npy_direct(mech_type, npy_direct_func_proc);");
      +
      1285  }
      +
      1286  if (info.net_receive_node) {
      +
      1287  printer->fmt_line("pnt_receive[mech_type] = nrn_net_receive_{};", info.mod_suffix);
      +
      1288  printer->fmt_line("pnt_receive_size[mech_type] = {};", info.num_net_receive_parameters);
      +
      1289  }
      +
      1290 
      +
      1291  if (info.net_receive_initial_node) {
      +
      1292  printer->fmt_line("pnt_receive_init[mech_type] = net_init;");
      +
      1293  }
      +
      1294 
      +
      1295  if (info.thread_callback_register) {
      +
      1296  printer->add_line("_nrn_thread_reg(mech_type, 1, thread_mem_init);");
      +
      1297  printer->add_line("_nrn_thread_reg(mech_type, 0, thread_mem_cleanup);");
      +
      1298  }
      +
      1299 
      +
      1300  if (info.diam_used) {
      +
      1301  printer->fmt_line("{}._morphology_sym = hoc_lookup(\"morphology\");",
      +
      1302  global_struct_instance());
      +
      1303  }
      +
      1304 
      +
      1305  printer->pop_block();
      +
      1306 }
      +
      1307 
      +
      1308 
      + +
      1310  if (!info.thread_callback_register) {
      +
      1311  return;
      +
      1312  }
      +
      1313 
      +
      1314  auto static_thread_data = get_variable_name("thread_data", false);
      +
      1315  auto inuse = get_variable_name("thread_data_in_use", false);
      +
      1316  auto thread_data_index = info.thread_var_thread_id;
      +
      1317  printer->push_block("static void thread_mem_init(Datum* _thread) ");
      +
      1318  printer->push_block(fmt::format("if({})", inuse));
      +
      1319  printer->fmt_line("_thread[{}] = {{neuron::container::do_not_search, new double[{}]{{}}}};",
      +
      1320  thread_data_index,
      +
      1321  info.thread_var_data_size + info.top_local_thread_size);
      +
      1322  printer->pop_block();
      +
      1323  printer->push_block("else");
      +
      1324  printer->fmt_line("_thread[{}] = {{neuron::container::do_not_search, {}}};",
      +
      1325  thread_data_index,
      +
      1326  static_thread_data);
      +
      1327  printer->fmt_line("{} = 1;", inuse);
      +
      1328  printer->pop_block();
      +
      1329  printer->pop_block();
      +
      1330 
      +
      1331  printer->push_block("static void thread_mem_cleanup(Datum* _thread) ");
      +
      1332  printer->fmt_line("double * _thread_data_ptr = _thread[{}].get<double*>();", thread_data_index);
      +
      1333  printer->push_block(fmt::format("if(_thread_data_ptr == {})", static_thread_data));
      +
      1334  printer->fmt_line("{} = 0;", inuse);
      +
      1335  printer->pop_block();
      +
      1336  printer->push_block("else");
      +
      1337  printer->add_line("delete[] _thread_data_ptr;");
      +
      1338  printer->pop_block();
      +
      1339  printer->pop_block();
      +
      1340 }
      +
      1341 
      +
      1342 
      + +
      1344  for (auto const& [var, type]: info.neuron_global_variables) {
      +
      1345  auto const name = var->get_name();
      +
      1346  printer->fmt_line("extern {} {};", type, name);
      +
      1347  }
      +
      1348 }
      +
      1349 
      + +
      1351  auto const value_initialize = print_initializers ? "{}" : "";
      +
      1352  printer->add_newline(2);
      +
      1353  printer->add_line("/** all mechanism instance variables and global variables */");
      +
      1354  printer->fmt_push_block("struct {} ", instance_struct());
      +
      1355 
      +
      1356  for (auto const& [var, type]: info.neuron_global_variables) {
      +
      1357  auto const name = var->get_name();
      +
      1358  printer->fmt_line("{}* {}{};",
      +
      1359  type,
      +
      1360  name,
      +
      1361  print_initializers ? fmt::format("{{&::{}}}", name) : std::string{});
      +
      1362  }
      +
      1363  for (auto& var: codegen_float_variables) {
      +
      1364  const auto& name = var->get_name();
      +
      1365  printer->fmt_line("double* {}{};", name, value_initialize);
      +
      1366  }
      +
      1367  for (auto& var: codegen_int_variables) {
      +
      1368  const auto& name = var.symbol->get_name();
      +
      1369  if (name == naming::POINT_PROCESS_VARIABLE) {
      +
      1370  continue;
      +
      1371  } else if (var.is_index || var.is_integer) {
      +
      1372  // In NEURON we don't create caches for `int*`. Hence, do nothing.
      +
      1373  } else {
      +
      1374  auto qualifier = var.is_constant ? "const " : "";
      +
      1375  auto type = var.is_vdata ? "void*" : default_float_data_type();
      +
      1376  printer->fmt_line("{}{}* const* {}{};", qualifier, type, name, value_initialize);
      +
      1377  }
      +
      1378  }
      +
      1379 
      +
      1380  printer->fmt_line("{}* {}{};",
      +
      1381  global_struct(),
      + +
      1383  print_initializers ? fmt::format("{{&{}}}", global_struct_instance())
      +
      1384  : std::string{});
      +
      1385  printer->pop_block(";");
      +
      1386 }
      +
      1387 
      + +
      1389  printer->add_newline(2);
      +
      1390  printer->fmt_push_block("static {} make_instance_{}(_nrn_mechanism_cache_range& _lmc)",
      +
      1391  instance_struct(),
      +
      1392  info.mod_suffix);
      +
      1393  printer->fmt_push_block("return {}", instance_struct());
      1394 
      -
      1395 
      -
      1396  for (auto const& [var, type]: info.neuron_global_variables) {
      -
      1397  auto const name = var->get_name();
      -
      1398  make_instance_args.push_back(fmt::format("&::{}", name));
      -
      1399  }
      -
      1400 
      -
      1401 
      -
      1402  const auto codegen_float_variables_size = codegen_float_variables.size();
      -
      1403  for (int i = 0; i < codegen_float_variables_size; ++i) {
      -
      1404  const auto& float_var = codegen_float_variables[i];
      -
      1405  if (float_var->is_array()) {
      -
      1406  make_instance_args.push_back(
      -
      1407  fmt::format("_lmc.template data_array_ptr<{}, {}>()", i, float_var->get_length()));
      -
      1408  } else {
      -
      1409  make_instance_args.push_back(fmt::format("_lmc.template fpfield_ptr<{}>()", i));
      -
      1410  }
      -
      1411  }
      -
      1412 
      -
      1413  const auto codegen_int_variables_size = codegen_int_variables.size();
      -
      1414  for (size_t i = 0; i < codegen_int_variables_size; ++i) {
      -
      1415  const auto& var = codegen_int_variables[i];
      -
      1416  auto name = var.symbol->get_name();
      -
      1417  auto const variable = [&var, i]() -> std::string {
      -
      1418  if (var.is_index || var.is_integer) {
      -
      1419  return "";
      -
      1420  } else if (var.is_vdata) {
      +
      1395  std::vector<std::string> make_instance_args;
      +
      1396 
      +
      1397 
      +
      1398  for (auto const& [var, type]: info.neuron_global_variables) {
      +
      1399  auto const name = var->get_name();
      +
      1400  make_instance_args.push_back(fmt::format("&::{}", name));
      +
      1401  }
      +
      1402 
      +
      1403 
      +
      1404  const auto codegen_float_variables_size = codegen_float_variables.size();
      +
      1405  for (int i = 0; i < codegen_float_variables_size; ++i) {
      +
      1406  const auto& float_var = codegen_float_variables[i];
      +
      1407  if (float_var->is_array()) {
      +
      1408  make_instance_args.push_back(
      +
      1409  fmt::format("_lmc.template data_array_ptr<{}, {}>()", i, float_var->get_length()));
      +
      1410  } else {
      +
      1411  make_instance_args.push_back(fmt::format("_lmc.template fpfield_ptr<{}>()", i));
      +
      1412  }
      +
      1413  }
      +
      1414 
      +
      1415  const auto codegen_int_variables_size = codegen_int_variables.size();
      +
      1416  for (size_t i = 0; i < codegen_int_variables_size; ++i) {
      +
      1417  const auto& var = codegen_int_variables[i];
      +
      1418  auto name = var.symbol->get_name();
      +
      1419  auto const variable = [&var, i]() -> std::string {
      +
      1420  if (var.is_index || var.is_integer) {
      1421  return "";
      -
      1422  } else {
      -
      1423  return fmt::format("_lmc.template dptr_field_ptr<{}>()", i);
      -
      1424  }
      -
      1425  }();
      -
      1426  if (variable != "") {
      -
      1427  make_instance_args.push_back(variable);
      -
      1428  }
      -
      1429  }
      -
      1430 
      -
      1431  printer->add_multi_line(fmt::format("{}", fmt::join(make_instance_args, ",\n")));
      +
      1422  } else if (var.is_vdata) {
      +
      1423  return "";
      +
      1424  } else {
      +
      1425  return fmt::format("_lmc.template dptr_field_ptr<{}>()", i);
      +
      1426  }
      +
      1427  }();
      +
      1428  if (variable != "") {
      +
      1429  make_instance_args.push_back(variable);
      +
      1430  }
      +
      1431  }
      1432 
      -
      1433  printer->pop_block(";");
      -
      1434  printer->pop_block();
      -
      1435 }
      -
      1436 
      - -
      1438  printer->add_newline(2);
      -
      1439  printer->fmt_push_block("struct {} ", node_data_struct());
      -
      1440 
      -
      1441  // Pointers to node variables
      -
      1442  printer->add_line("int const * nodeindices;");
      -
      1443  printer->add_line("double const * node_voltages;");
      -
      1444  printer->add_line("double * node_diagonal;");
      -
      1445  printer->add_line("double * node_rhs;");
      -
      1446  printer->add_line("int nodecount;");
      -
      1447 
      -
      1448  printer->pop_block(";");
      -
      1449 }
      -
      1450 
      - -
      1452  printer->add_newline(2);
      -
      1453  printer->fmt_push_block("static {} make_node_data_{}(NrnThread& nt, Memb_list& _ml_arg)",
      -
      1454  node_data_struct(),
      -
      1455  info.mod_suffix);
      -
      1456 
      -
      1457  std::vector<std::string> make_node_data_args = {"_ml_arg.nodeindices",
      -
      1458  "nt.node_voltage_storage()",
      -
      1459  "nt.node_d_storage()",
      -
      1460  "nt.node_rhs_storage()",
      -
      1461  "_ml_arg.nodecount"};
      -
      1462 
      -
      1463  printer->fmt_push_block("return {}", node_data_struct());
      -
      1464  printer->add_multi_line(fmt::format("{}", fmt::join(make_node_data_args, ",\n")));
      -
      1465 
      -
      1466  printer->pop_block(";");
      -
      1467  printer->pop_block();
      -
      1468 
      -
      1469 
      -
      1470  printer->fmt_push_block("static {} make_node_data_{}(Prop * _prop)",
      -
      1471  node_data_struct(),
      -
      1472  info.mod_suffix);
      -
      1473  printer->add_line("static std::vector<int> node_index{0};");
      -
      1474  printer->add_line("Node* _node = _nrn_mechanism_access_node(_prop);");
      -
      1475 
      -
      1476  make_node_data_args = {"node_index.data()",
      -
      1477  "&_nrn_mechanism_access_voltage(_node)",
      -
      1478  "&_nrn_mechanism_access_d(_node)",
      -
      1479  "&_nrn_mechanism_access_rhs(_node)",
      -
      1480  "1"};
      -
      1481 
      -
      1482  printer->fmt_push_block("return {}", node_data_struct());
      -
      1483  printer->add_multi_line(fmt::format("{}", fmt::join(make_node_data_args, ",\n")));
      -
      1484 
      -
      1485  printer->pop_block(";");
      -
      1486  printer->pop_block();
      -
      1487  printer->add_newline();
      -
      1488 }
      -
      1489 
      - -
      1491  if (codegen_thread_variables.empty()) {
      -
      1492  return;
      -
      1493  }
      -
      1494 
      -
      1495  printer->add_newline(2);
      -
      1496  printer->fmt_push_block("struct {} ", thread_variables_struct());
      -
      1497  printer->add_line("double * thread_data;");
      -
      1498  printer->add_newline();
      -
      1499 
      -
      1500  std::string simd_width = "1";
      +
      1433  printer->add_multi_line(fmt::format("{}", fmt::join(make_instance_args, ",\n")));
      +
      1434 
      +
      1435  printer->pop_block(";");
      +
      1436  printer->pop_block();
      +
      1437 }
      +
      1438 
      + +
      1440  printer->add_newline(2);
      +
      1441  printer->fmt_push_block("struct {} ", node_data_struct());
      +
      1442 
      +
      1443  // Pointers to node variables
      +
      1444  printer->add_line("int const * nodeindices;");
      +
      1445  printer->add_line("double const * node_voltages;");
      +
      1446  printer->add_line("double * node_diagonal;");
      +
      1447  printer->add_line("double * node_rhs;");
      +
      1448  printer->add_line("int nodecount;");
      +
      1449 
      +
      1450  printer->pop_block(";");
      +
      1451 }
      +
      1452 
      + +
      1454  printer->add_newline(2);
      +
      1455  printer->fmt_push_block("static {} make_node_data_{}(NrnThread& nt, Memb_list& _ml_arg)",
      +
      1456  node_data_struct(),
      +
      1457  info.mod_suffix);
      +
      1458 
      +
      1459  std::vector<std::string> make_node_data_args = {"_ml_arg.nodeindices",
      +
      1460  "nt.node_voltage_storage()",
      +
      1461  "nt.node_d_storage()",
      +
      1462  "nt.node_rhs_storage()",
      +
      1463  "_ml_arg.nodecount"};
      +
      1464 
      +
      1465  printer->fmt_push_block("return {}", node_data_struct());
      +
      1466  printer->add_multi_line(fmt::format("{}", fmt::join(make_node_data_args, ",\n")));
      +
      1467 
      +
      1468  printer->pop_block(";");
      +
      1469  printer->pop_block();
      +
      1470 
      +
      1471 
      +
      1472  printer->fmt_push_block("static {} make_node_data_{}(Prop * _prop)",
      +
      1473  node_data_struct(),
      +
      1474  info.mod_suffix);
      +
      1475  printer->add_line("static std::vector<int> node_index{0};");
      +
      1476  printer->add_line("Node* _node = _nrn_mechanism_access_node(_prop);");
      +
      1477 
      +
      1478  make_node_data_args = {"node_index.data()",
      +
      1479  "&_nrn_mechanism_access_voltage(_node)",
      +
      1480  "&_nrn_mechanism_access_d(_node)",
      +
      1481  "&_nrn_mechanism_access_rhs(_node)",
      +
      1482  "1"};
      +
      1483 
      +
      1484  printer->fmt_push_block("return {}", node_data_struct());
      +
      1485  printer->add_multi_line(fmt::format("{}", fmt::join(make_node_data_args, ",\n")));
      +
      1486 
      +
      1487  printer->pop_block(";");
      +
      1488  printer->pop_block();
      +
      1489  printer->add_newline();
      +
      1490 }
      +
      1491 
      + +
      1493  if (codegen_thread_variables.empty()) {
      +
      1494  return;
      +
      1495  }
      +
      1496 
      +
      1497  printer->add_newline(2);
      +
      1498  printer->fmt_push_block("struct {} ", thread_variables_struct());
      +
      1499  printer->add_line("double * thread_data;");
      +
      1500  printer->add_newline();
      1501 
      -
      1502 
      -
      1503  for (const auto& var_info: codegen_thread_variables) {
      -
      1504  printer->fmt_push_block("double * {}_ptr(size_t id)", var_info.symbol->get_name());
      -
      1505  printer->fmt_line("return thread_data + {} + (id % {});", var_info.offset, simd_width);
      -
      1506  printer->pop_block();
      -
      1507 
      -
      1508  printer->fmt_push_block("double & {}(size_t id)", var_info.symbol->get_name());
      -
      1509  printer->fmt_line("return thread_data[{} + (id % {})];", var_info.offset, simd_width);
      -
      1510  printer->pop_block();
      -
      1511  }
      -
      1512  printer->add_newline();
      -
      1513 
      -
      1514  printer->push_block(fmt::format("{}(double * const thread_data)", thread_variables_struct()));
      -
      1515  printer->fmt_line("this->thread_data = thread_data;");
      -
      1516  printer->pop_block();
      -
      1517 
      -
      1518  printer->pop_block(";");
      -
      1519 }
      -
      1520 
      -
      1521 
      - -
      1523  // read ion statements
      -
      1524  auto read_statements = ion_read_statements(BlockType::Initial);
      -
      1525  for (auto& statement: read_statements) {
      -
      1526  printer->add_line(statement);
      -
      1527  }
      -
      1528 
      -
      1529  // initial block
      -
      1530  if (node != nullptr) {
      -
      1531  const auto& block = node->get_statement_block();
      -
      1532  print_statement_block(*block, false, false);
      -
      1533  }
      -
      1534 
      -
      1535  // write ion statements
      -
      1536  auto write_statements = ion_write_statements(BlockType::Initial);
      -
      1537  for (auto& statement: write_statements) {
      -
      1538  auto text = process_shadow_update_statement(statement, BlockType::Initial);
      -
      1539  printer->add_line(text);
      -
      1540  }
      -
      1541 }
      -
      1542 
      -
      1543 
      - -
      1545  const std::string& function_name) {
      -
      1546  std::string method = function_name.empty() ? compute_method_name(type) : function_name;
      -
      1547  ParamVector args = {{"", "const _nrn_model_sorted_token&", "", "_sorted_token"},
      -
      1548  {"", "NrnThread*", "", "nt"},
      -
      1549  {"", "Memb_list*", "", "_ml_arg"},
      -
      1550  {"", "int", "", "_type"}};
      -
      1551  printer->fmt_push_block("void {}({})", method, get_parameter_str(args));
      -
      1552 
      -
      1553  printer->add_line("_nrn_mechanism_cache_range _lmc{_sorted_token, *nt, *_ml_arg, _type};");
      -
      1554  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      -
      1555  printer->fmt_line("auto node_data = make_node_data_{}(*nt, *_ml_arg);", info.mod_suffix);
      -
      1556 
      -
      1557  printer->add_line("auto nodecount = _ml_arg->nodecount;");
      -
      1558  printer->add_line("auto* _thread = _ml_arg->_thread;");
      -
      1559  if (!codegen_thread_variables.empty()) {
      -
      1560  printer->fmt_line("auto _thread_vars = {}(_thread[{}].get<double*>());",
      -
      1561  thread_variables_struct(),
      -
      1562  info.thread_var_thread_id);
      -
      1563  }
      -
      1564 }
      -
      1565 
      -
      1566 
      -
      1567 void CodegenNeuronCppVisitor::print_nrn_init(bool skip_init_check) {
      -
      1568  printer->add_newline(2);
      -
      1569 
      -
      1570  print_global_function_common_code(BlockType::Initial);
      +
      1502  std::string simd_width = "1";
      +
      1503 
      +
      1504 
      +
      1505  for (const auto& var_info: codegen_thread_variables) {
      +
      1506  printer->fmt_push_block("double * {}_ptr(size_t id)", var_info.symbol->get_name());
      +
      1507  printer->fmt_line("return thread_data + {} + (id % {});", var_info.offset, simd_width);
      +
      1508  printer->pop_block();
      +
      1509 
      +
      1510  printer->fmt_push_block("double & {}(size_t id)", var_info.symbol->get_name());
      +
      1511  printer->fmt_line("return thread_data[{} + (id % {})];", var_info.offset, simd_width);
      +
      1512  printer->pop_block();
      +
      1513  }
      +
      1514  printer->add_newline();
      +
      1515 
      +
      1516  printer->push_block(fmt::format("{}(double * const thread_data)", thread_variables_struct()));
      +
      1517  printer->fmt_line("this->thread_data = thread_data;");
      +
      1518  printer->pop_block();
      +
      1519 
      +
      1520  printer->pop_block(";");
      +
      1521 }
      +
      1522 
      +
      1523 
      + +
      1525  // read ion statements
      +
      1526  auto read_statements = ion_read_statements(BlockType::Initial);
      +
      1527  for (auto& statement: read_statements) {
      +
      1528  printer->add_line(statement);
      +
      1529  }
      +
      1530 
      +
      1531  // initial block
      +
      1532  if (node != nullptr) {
      +
      1533  const auto& block = node->get_statement_block();
      +
      1534  print_statement_block(*block, false, false);
      +
      1535  }
      +
      1536 
      +
      1537  // write ion statements
      +
      1538  auto write_statements = ion_write_statements(BlockType::Initial);
      +
      1539  for (auto& statement: write_statements) {
      +
      1540  auto text = process_shadow_update_statement(statement, BlockType::Initial);
      +
      1541  printer->add_line(text);
      +
      1542  }
      +
      1543 }
      +
      1544 
      +
      1545 
      + +
      1547  const std::string& function_name) {
      +
      1548  std::string method = function_name.empty() ? compute_method_name(type) : function_name;
      +
      1549  ParamVector args = {{"", "const _nrn_model_sorted_token&", "", "_sorted_token"},
      +
      1550  {"", "NrnThread*", "", "nt"},
      +
      1551  {"", "Memb_list*", "", "_ml_arg"},
      +
      1552  {"", "int", "", "_type"}};
      +
      1553  printer->fmt_push_block("void {}({})", method, get_parameter_str(args));
      +
      1554 
      +
      1555  printer->add_line("_nrn_mechanism_cache_range _lmc{_sorted_token, *nt, *_ml_arg, _type};");
      +
      1556  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      +
      1557  printer->fmt_line("auto node_data = make_node_data_{}(*nt, *_ml_arg);", info.mod_suffix);
      +
      1558 
      +
      1559  printer->add_line("auto nodecount = _ml_arg->nodecount;");
      +
      1560  printer->add_line("auto* _thread = _ml_arg->_thread;");
      +
      1561  if (!codegen_thread_variables.empty()) {
      +
      1562  printer->fmt_line("auto _thread_vars = {}(_thread[{}].get<double*>());",
      +
      1563  thread_variables_struct(),
      +
      1564  info.thread_var_thread_id);
      +
      1565  }
      +
      1566 }
      +
      1567 
      +
      1568 
      +
      1569 void CodegenNeuronCppVisitor::print_nrn_init(bool skip_init_check) {
      +
      1570  printer->add_newline(2);
      1571 
      -
      1572  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      1572  print_global_function_common_code(BlockType::Initial);
      1573 
      -
      1574  printer->add_line("auto* _ppvar = _ml_arg->pdata[id];");
      -
      1575  if (!info.artificial_cell) {
      -
      1576  printer->add_line("int node_id = node_data.nodeindices[id];");
      -
      1577  printer->add_line("auto v = node_data.node_voltages[node_id];");
      -
      1578  }
      -
      1579 
      -
      1580  print_rename_state_vars();
      +
      1574  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      1575 
      +
      1576  printer->add_line("auto* _ppvar = _ml_arg->pdata[id];");
      +
      1577  if (!info.artificial_cell) {
      +
      1578  printer->add_line("int node_id = node_data.nodeindices[id];");
      +
      1579  printer->add_line("auto v = node_data.node_voltages[node_id];");
      +
      1580  }
      1581 
      -
      1582  if (!info.changed_dt.empty()) {
      -
      1583  printer->fmt_line("double _save_prev_dt = {};",
      -
      1584  get_variable_name(naming::NTHREAD_DT_VARIABLE));
      -
      1585  printer->fmt_line("{} = {};",
      -
      1586  get_variable_name(naming::NTHREAD_DT_VARIABLE),
      -
      1587  info.changed_dt);
      -
      1588  }
      -
      1589 
      -
      1590  print_initial_block(info.initial_node);
      +
      1582  print_rename_state_vars();
      +
      1583 
      +
      1584  if (!info.changed_dt.empty()) {
      +
      1585  printer->fmt_line("double _save_prev_dt = {};",
      +
      1586  get_variable_name(naming::NTHREAD_DT_VARIABLE));
      +
      1587  printer->fmt_line("{} = {};",
      +
      1588  get_variable_name(naming::NTHREAD_DT_VARIABLE),
      +
      1589  info.changed_dt);
      +
      1590  }
      1591 
      -
      1592  if (!info.changed_dt.empty()) {
      -
      1593  printer->fmt_line("{} = _save_prev_dt;", get_variable_name(naming::NTHREAD_DT_VARIABLE));
      -
      1594  }
      -
      1595 
      -
      1596  printer->pop_block();
      -
      1597  printer->pop_block();
      -
      1598 }
      -
      1599 
      - -
      1601  printer->add_newline(2);
      -
      1602 
      -
      1603  ParamVector args = {{"", "const _nrn_model_sorted_token&", "", "_sorted_token"},
      -
      1604  {"", "NrnThread*", "", "nt"},
      -
      1605  {"", "Memb_list*", "", "_ml_arg"},
      -
      1606  {"", "int", "", "_type"}};
      -
      1607 
      -
      1608  printer->fmt_push_block("static void {}({})",
      -
      1609  method_name(naming::NRN_JACOB_METHOD),
      -
      1610  get_parameter_str(args)); // begin function
      -
      1611 
      -
      1612 
      -
      1613  printer->add_multi_line(
      -
      1614  "_nrn_mechanism_cache_range _lmc{_sorted_token, *nt, *_ml_arg, _type};");
      -
      1615 
      -
      1616  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      -
      1617  printer->fmt_line("auto node_data = make_node_data_{}(*nt, *_ml_arg);", info.mod_suffix);
      -
      1618  printer->fmt_line("auto nodecount = _ml_arg->nodecount;");
      -
      1619  printer->push_block("for (int id = 0; id < nodecount; id++)"); // begin for
      -
      1620 
      -
      1621  if (breakpoint_exist()) {
      -
      1622  printer->add_line("int node_id = node_data.nodeindices[id];");
      -
      1623  printer->fmt_line("node_data.node_diagonal[node_id] {} inst.{}[id];",
      -
      1624  operator_for_d(),
      -
      1625  info.vectorize ? naming::CONDUCTANCE_UNUSED_VARIABLE
      - -
      1627  }
      -
      1628 
      -
      1629  printer->pop_block(); // end for
      -
      1630  printer->pop_block(); // end function
      -
      1631 }
      -
      1632 
      -
      1633 
      - -
      1635  printer->add_line("Datum* _ppvar = _nrn_mechanism_access_dparam(prop);");
      -
      1636  printer->add_line("_nrn_mechanism_cache_instance _lmc{prop};");
      -
      1637  printer->add_line("const size_t id = 0;");
      -
      1638 
      -
      1639  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      -
      1640  if (!info.artificial_cell) {
      -
      1641  printer->fmt_line("auto node_data = make_node_data_{}(prop);", info.mod_suffix);
      -
      1642  }
      -
      1643 
      -
      1644  if (!codegen_thread_variables.empty()) {
      -
      1645  printer->fmt_line("auto _thread_vars = {}({}_global.thread_data);",
      -
      1646  thread_variables_struct(),
      -
      1647  info.mod_suffix);
      -
      1648  }
      -
      1649 
      -
      1650  printer->add_newline();
      -
      1651 }
      -
      1652 
      - -
      1654  if (info.constructor_node) {
      -
      1655  printer->fmt_line("void {}(Prop* prop);", method_name(naming::NRN_CONSTRUCTOR_METHOD));
      -
      1656  }
      -
      1657 }
      -
      1658 
      - -
      1660  if (info.constructor_node) {
      -
      1661  printer->fmt_push_block("void {}(Prop* prop)", method_name(naming::NRN_CONSTRUCTOR_METHOD));
      -
      1662 
      -
      1663  print_callable_preamble_from_prop();
      +
      1592  print_initial_block(info.initial_node);
      +
      1593 
      +
      1594  if (!info.changed_dt.empty()) {
      +
      1595  printer->fmt_line("{} = _save_prev_dt;", get_variable_name(naming::NTHREAD_DT_VARIABLE));
      +
      1596  }
      +
      1597 
      +
      1598  printer->pop_block();
      +
      1599  printer->pop_block();
      +
      1600 }
      +
      1601 
      + +
      1603  printer->add_newline(2);
      +
      1604 
      +
      1605  ParamVector args = {{"", "const _nrn_model_sorted_token&", "", "_sorted_token"},
      +
      1606  {"", "NrnThread*", "", "nt"},
      +
      1607  {"", "Memb_list*", "", "_ml_arg"},
      +
      1608  {"", "int", "", "_type"}};
      +
      1609 
      +
      1610  printer->fmt_push_block("static void {}({})",
      +
      1611  method_name(naming::NRN_JACOB_METHOD),
      +
      1612  get_parameter_str(args)); // begin function
      +
      1613 
      +
      1614 
      +
      1615  printer->add_multi_line(
      +
      1616  "_nrn_mechanism_cache_range _lmc{_sorted_token, *nt, *_ml_arg, _type};");
      +
      1617 
      +
      1618  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      +
      1619  printer->fmt_line("auto node_data = make_node_data_{}(*nt, *_ml_arg);", info.mod_suffix);
      +
      1620  printer->fmt_line("auto nodecount = _ml_arg->nodecount;");
      +
      1621  printer->push_block("for (int id = 0; id < nodecount; id++)"); // begin for
      +
      1622 
      +
      1623  if (breakpoint_exist()) {
      +
      1624  printer->add_line("int node_id = node_data.nodeindices[id];");
      +
      1625  printer->fmt_line("node_data.node_diagonal[node_id] {} inst.{}[id];",
      +
      1626  operator_for_d(),
      +
      1627  info.vectorize ? naming::CONDUCTANCE_UNUSED_VARIABLE
      + +
      1629  }
      +
      1630 
      +
      1631  printer->pop_block(); // end for
      +
      1632  printer->pop_block(); // end function
      +
      1633 }
      +
      1634 
      +
      1635 
      + +
      1637  printer->add_line("Datum* _ppvar = _nrn_mechanism_access_dparam(prop);");
      +
      1638  printer->add_line("_nrn_mechanism_cache_instance _lmc{prop};");
      +
      1639  printer->add_line("const size_t id = 0;");
      +
      1640 
      +
      1641  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      +
      1642  if (!info.artificial_cell) {
      +
      1643  printer->fmt_line("auto node_data = make_node_data_{}(prop);", info.mod_suffix);
      +
      1644  }
      +
      1645 
      +
      1646  if (!codegen_thread_variables.empty()) {
      +
      1647  printer->fmt_line("auto _thread_vars = {}({}_global.thread_data);",
      +
      1648  thread_variables_struct(),
      +
      1649  info.mod_suffix);
      +
      1650  }
      +
      1651 
      +
      1652  printer->add_newline();
      +
      1653 }
      +
      1654 
      + +
      1656  if (info.constructor_node) {
      +
      1657  printer->fmt_line("void {}(Prop* prop);", method_name(naming::NRN_CONSTRUCTOR_METHOD));
      +
      1658  }
      +
      1659 }
      +
      1660 
      + +
      1662  if (info.constructor_node) {
      +
      1663  printer->fmt_push_block("void {}(Prop* prop)", method_name(naming::NRN_CONSTRUCTOR_METHOD));
      1664 
      -
      1665  auto block = info.constructor_node->get_statement_block();
      -
      1666  print_statement_block(*block, false, false);
      -
      1667 
      -
      1668  printer->pop_block();
      -
      1669  }
      -
      1670 }
      -
      1671 
      -
      1672 
      - -
      1674  printer->fmt_line("void {}(Prop* prop);", method_name(naming::NRN_DESTRUCTOR_METHOD));
      -
      1675 }
      -
      1676 
      - -
      1678  printer->fmt_push_block("void {}(Prop* prop)", method_name(naming::NRN_DESTRUCTOR_METHOD));
      -
      1679  print_callable_preamble_from_prop();
      -
      1680 
      -
      1681  for (const auto& rv: info.random_variables) {
      -
      1682  printer->fmt_line("nrnran123_deletestream((nrnran123_State*) {});",
      -
      1683  get_variable_name(get_name(rv), false));
      -
      1684  }
      -
      1685 
      -
      1686 
      -
      1687  if (info.destructor_node) {
      -
      1688  auto block = info.destructor_node->get_statement_block();
      -
      1689  print_statement_block(*block, false, false);
      -
      1690  }
      -
      1691 
      -
      1692  printer->pop_block();
      -
      1693 }
      -
      1694 
      -
      1695 
      - -
      1697  printer->add_newline(2);
      -
      1698 
      -
      1699  auto method = method_name(naming::NRN_ALLOC_METHOD);
      -
      1700  printer->fmt_push_block("static void {}(Prop* _prop)", method);
      -
      1701  printer->add_line("Datum *_ppvar = nullptr;");
      -
      1702 
      -
      1703  if (info.point_process) {
      -
      1704  printer->push_block("if (nrn_point_prop_)");
      -
      1705  printer->add_multi_line(R"CODE(
      -
      1706  _nrn_mechanism_access_alloc_seq(_prop) = _nrn_mechanism_access_alloc_seq(nrn_point_prop_);
      -
      1707  _ppvar = _nrn_mechanism_access_dparam(nrn_point_prop_);
      -
      1708  )CODE");
      -
      1709  printer->chain_block("else");
      -
      1710  }
      -
      1711  if (info.semantic_variable_count) {
      -
      1712  printer->fmt_line("_ppvar = nrn_prop_datum_alloc(mech_type, {}, _prop);",
      -
      1713  info.semantic_variable_count);
      -
      1714  printer->add_line("_nrn_mechanism_access_dparam(_prop) = _ppvar;");
      -
      1715  }
      -
      1716  printer->add_multi_line(R"CODE(
      -
      1717  _nrn_mechanism_cache_instance _lmc{_prop};
      -
      1718  size_t const _iml = 0;
      -
      1719  )CODE");
      -
      1720  printer->fmt_line("assert(_nrn_mechanism_get_num_vars(_prop) == {});",
      -
      1721  codegen_float_variables.size());
      -
      1722  if (float_variables_size()) {
      -
      1723  printer->add_line("/*initialize range parameters*/");
      -
      1724  for (size_t i_param = 0; i_param < info.range_parameter_vars.size(); ++i_param) {
      -
      1725  const auto var = info.range_parameter_vars[i_param];
      -
      1726  if (var->is_array()) {
      -
      1727  continue;
      -
      1728  }
      -
      1729  const auto& var_name = var->get_name();
      -
      1730  auto var_pos = position_of_float_var(var_name);
      -
      1731 
      -
      1732  printer->fmt_line("_lmc.template fpfield<{}>(_iml) = {}; /* {} */",
      -
      1733  var_pos,
      -
      1734  fmt::format("_parameter_defaults[{}]", i_param),
      -
      1735  var_name);
      -
      1736  }
      -
      1737  }
      -
      1738  if (info.point_process) {
      -
      1739  printer->pop_block();
      -
      1740  }
      -
      1741 
      -
      1742  if (info.semantic_variable_count) {
      -
      1743  printer->add_line("_nrn_mechanism_access_dparam(_prop) = _ppvar;");
      -
      1744  }
      -
      1745 
      -
      1746  const auto codegen_int_variables_size = codegen_int_variables.size();
      +
      1665  print_callable_preamble_from_prop();
      +
      1666 
      +
      1667  auto block = info.constructor_node->get_statement_block();
      +
      1668  print_statement_block(*block, false, false);
      +
      1669 
      +
      1670  printer->pop_block();
      +
      1671  }
      +
      1672 }
      +
      1673 
      +
      1674 
      + +
      1676  printer->fmt_line("void {}(Prop* prop);", method_name(naming::NRN_DESTRUCTOR_METHOD));
      +
      1677 }
      +
      1678 
      + +
      1680  printer->fmt_push_block("void {}(Prop* prop)", method_name(naming::NRN_DESTRUCTOR_METHOD));
      +
      1681  print_callable_preamble_from_prop();
      +
      1682 
      +
      1683  for (const auto& rv: info.random_variables) {
      +
      1684  printer->fmt_line("nrnran123_deletestream((nrnran123_State*) {});",
      +
      1685  get_variable_name(get_name(rv), false));
      +
      1686  }
      +
      1687 
      +
      1688 
      +
      1689  if (info.destructor_node) {
      +
      1690  auto block = info.destructor_node->get_statement_block();
      +
      1691  print_statement_block(*block, false, false);
      +
      1692  }
      +
      1693 
      +
      1694  printer->pop_block();
      +
      1695 }
      +
      1696 
      +
      1697 
      + +
      1699  printer->add_newline(2);
      +
      1700 
      +
      1701  auto method = method_name(naming::NRN_ALLOC_METHOD);
      +
      1702  printer->fmt_push_block("static void {}(Prop* _prop)", method);
      +
      1703  printer->add_line("Datum *_ppvar = nullptr;");
      +
      1704 
      +
      1705  if (info.point_process) {
      +
      1706  printer->push_block("if (nrn_point_prop_)");
      +
      1707  printer->add_multi_line(R"CODE(
      +
      1708  _nrn_mechanism_access_alloc_seq(_prop) = _nrn_mechanism_access_alloc_seq(nrn_point_prop_);
      +
      1709  _ppvar = _nrn_mechanism_access_dparam(nrn_point_prop_);
      +
      1710  )CODE");
      +
      1711  printer->chain_block("else");
      +
      1712  }
      +
      1713  if (info.semantic_variable_count) {
      +
      1714  printer->fmt_line("_ppvar = nrn_prop_datum_alloc(mech_type, {}, _prop);",
      +
      1715  info.semantic_variable_count);
      +
      1716  printer->add_line("_nrn_mechanism_access_dparam(_prop) = _ppvar;");
      +
      1717  }
      +
      1718  printer->add_multi_line(R"CODE(
      +
      1719  _nrn_mechanism_cache_instance _lmc{_prop};
      +
      1720  size_t const _iml = 0;
      +
      1721  )CODE");
      +
      1722  printer->fmt_line("assert(_nrn_mechanism_get_num_vars(_prop) == {});",
      +
      1723  codegen_float_variables.size());
      +
      1724  if (float_variables_size()) {
      +
      1725  printer->add_line("/*initialize range parameters*/");
      +
      1726  for (size_t i_param = 0; i_param < info.range_parameter_vars.size(); ++i_param) {
      +
      1727  const auto var = info.range_parameter_vars[i_param];
      +
      1728  if (var->is_array()) {
      +
      1729  continue;
      +
      1730  }
      +
      1731  const auto& var_name = var->get_name();
      +
      1732  auto var_pos = position_of_float_var(var_name);
      +
      1733 
      +
      1734  printer->fmt_line("_lmc.template fpfield<{}>(_iml) = {}; /* {} */",
      +
      1735  var_pos,
      +
      1736  fmt::format("_parameter_defaults[{}]", i_param),
      +
      1737  var_name);
      +
      1738  }
      +
      1739  }
      +
      1740  if (info.point_process) {
      +
      1741  printer->pop_block();
      +
      1742  }
      +
      1743 
      +
      1744  if (info.semantic_variable_count) {
      +
      1745  printer->add_line("_nrn_mechanism_access_dparam(_prop) = _ppvar;");
      +
      1746  }
      1747 
      -
      1748  if (info.diam_used || info.area_used) {
      -
      1749  for (size_t i = 0; i < codegen_int_variables.size(); ++i) {
      -
      1750  auto var_info = codegen_int_variables[i];
      -
      1751  if (var_info.symbol->get_name() == naming::DIAM_VARIABLE) {
      -
      1752  printer->fmt_line("Prop * morphology_prop = need_memb({}._morphology_sym);",
      -
      1753  global_struct_instance());
      -
      1754  printer->fmt_line(
      -
      1755  "_ppvar[{}] = _nrn_mechanism_get_param_handle(morphology_prop, 0);", i);
      -
      1756  }
      -
      1757  if (var_info.symbol->get_name() == naming::AREA_VARIABLE) {
      -
      1758  printer->fmt_line("_ppvar[{}] = _nrn_mechanism_get_area_handle(nrn_alloc_node_);",
      -
      1759  i);
      -
      1760  }
      -
      1761  }
      -
      1762  }
      -
      1763 
      -
      1764  for (const auto& ion: info.ions) {
      -
      1765  printer->fmt_line("Symbol * {}_sym = hoc_lookup(\"{}_ion\");", ion.name, ion.name);
      -
      1766  printer->fmt_line("Prop * {}_prop = need_memb({}_sym);", ion.name, ion.name);
      -
      1767 
      -
      1768  if (ion.is_exterior_conc_written()) {
      -
      1769  printer->fmt_line("nrn_check_conc_write(_prop, {}_prop, 0);", ion.name);
      -
      1770  }
      -
      1771 
      -
      1772  if (ion.is_interior_conc_written()) {
      -
      1773  printer->fmt_line("nrn_check_conc_write(_prop, {}_prop, 1);", ion.name);
      -
      1774  }
      -
      1775 
      -
      1776  int conc = ion.is_conc_written() ? 3 : int(ion.is_conc_read());
      -
      1777  int rev = ion.is_rev_written() ? 3 : int(ion.is_rev_read());
      -
      1778 
      -
      1779  printer->fmt_line("nrn_promote({}_prop, {}, {});", ion.name, conc, rev);
      +
      1748  const auto codegen_int_variables_size = codegen_int_variables.size();
      +
      1749 
      +
      1750  if (info.diam_used || info.area_used) {
      +
      1751  for (size_t i = 0; i < codegen_int_variables.size(); ++i) {
      +
      1752  auto var_info = codegen_int_variables[i];
      +
      1753  if (var_info.symbol->get_name() == naming::DIAM_VARIABLE) {
      +
      1754  printer->fmt_line("Prop * morphology_prop = need_memb({}._morphology_sym);",
      +
      1755  global_struct_instance());
      +
      1756  printer->fmt_line(
      +
      1757  "_ppvar[{}] = _nrn_mechanism_get_param_handle(morphology_prop, 0);", i);
      +
      1758  }
      +
      1759  if (var_info.symbol->get_name() == naming::AREA_VARIABLE) {
      +
      1760  printer->fmt_line("_ppvar[{}] = _nrn_mechanism_get_area_handle(nrn_alloc_node_);",
      +
      1761  i);
      +
      1762  }
      +
      1763  }
      +
      1764  }
      +
      1765 
      +
      1766  for (const auto& ion: info.ions) {
      +
      1767  printer->fmt_line("Symbol * {}_sym = hoc_lookup(\"{}_ion\");", ion.name, ion.name);
      +
      1768  printer->fmt_line("Prop * {}_prop = need_memb({}_sym);", ion.name, ion.name);
      +
      1769 
      +
      1770  if (ion.is_exterior_conc_written()) {
      +
      1771  printer->fmt_line("nrn_check_conc_write(_prop, {}_prop, 0);", ion.name);
      +
      1772  }
      +
      1773 
      +
      1774  if (ion.is_interior_conc_written()) {
      +
      1775  printer->fmt_line("nrn_check_conc_write(_prop, {}_prop, 1);", ion.name);
      +
      1776  }
      +
      1777 
      +
      1778  int conc = ion.is_conc_written() ? 3 : int(ion.is_conc_read());
      +
      1779  int rev = ion.is_rev_written() ? 3 : int(ion.is_rev_read());
      1780 
      -
      1781  for (size_t i = 0; i < codegen_int_variables_size; ++i) {
      -
      1782  const auto& var = codegen_int_variables[i];
      -
      1783 
      -
      1784  const std::string& var_name = var.symbol->get_name();
      +
      1781  printer->fmt_line("nrn_promote({}_prop, {}, {});", ion.name, conc, rev);
      +
      1782 
      +
      1783  for (size_t i = 0; i < codegen_int_variables_size; ++i) {
      +
      1784  const auto& var = codegen_int_variables[i];
      1785 
      -
      1786  if (stringutils::starts_with(var_name, "ion_")) {
      -
      1787  std::string ion_var_name = std::string(var_name.begin() + 4, var_name.end());
      -
      1788  if (ion.is_ionic_variable(ion_var_name) ||
      -
      1789  ion.is_current_derivative(ion_var_name) || ion.is_rev_potential(ion_var_name)) {
      -
      1790  printer->fmt_line("_ppvar[{}] = _nrn_mechanism_get_param_handle({}_prop, {});",
      -
      1791  i,
      -
      1792  ion.name,
      -
      1793  ion.variable_index(ion_var_name));
      -
      1794  }
      -
      1795  } else {
      -
      1796  if (ion.is_style(var_name)) {
      -
      1797  printer->fmt_line(
      -
      1798  "_ppvar[{}] = {{neuron::container::do_not_search, "
      -
      1799  "&(_nrn_mechanism_access_dparam({}_prop)[0].literal_value<int>())}};",
      -
      1800  i,
      -
      1801  ion.name);
      -
      1802  }
      -
      1803  }
      -
      1804  }
      -
      1805  }
      -
      1806 
      -
      1807  if (!info.random_variables.empty()) {
      -
      1808  for (const auto& rv: info.random_variables) {
      -
      1809  printer->fmt_line("{} = nrnran123_newstream();",
      -
      1810  get_variable_name(get_name(rv), false));
      -
      1811  }
      -
      1812  printer->fmt_line("nrn_mech_inst_destruct[mech_type] = neuron::{};",
      -
      1813  method_name(naming::NRN_DESTRUCTOR_METHOD));
      -
      1814  }
      -
      1815 
      -
      1816  if (info.point_process || info.artificial_cell) {
      -
      1817  printer->fmt_push_block("if(!nrn_point_prop_)");
      -
      1818 
      -
      1819  if (info.constructor_node) {
      -
      1820  printer->fmt_line("{}(_prop);", method_name(naming::NRN_CONSTRUCTOR_METHOD));
      -
      1821  }
      -
      1822  printer->pop_block();
      -
      1823  }
      -
      1824 
      -
      1825  printer->pop_block();
      -
      1826 }
      -
      1827 
      -
      1828 
      -
      1829 /****************************************************************************************/
      -
      1830 /* Print nrn_state routine */
      +
      1786  const std::string& var_name = var.symbol->get_name();
      +
      1787 
      +
      1788  if (stringutils::starts_with(var_name, "ion_")) {
      +
      1789  std::string ion_var_name = std::string(var_name.begin() + 4, var_name.end());
      +
      1790  if (ion.is_ionic_variable(ion_var_name) ||
      +
      1791  ion.is_current_derivative(ion_var_name) || ion.is_rev_potential(ion_var_name)) {
      +
      1792  printer->fmt_line("_ppvar[{}] = _nrn_mechanism_get_param_handle({}_prop, {});",
      +
      1793  i,
      +
      1794  ion.name,
      +
      1795  ion.variable_index(ion_var_name));
      +
      1796  }
      +
      1797  } else {
      +
      1798  if (ion.is_style(var_name)) {
      +
      1799  printer->fmt_line(
      +
      1800  "_ppvar[{}] = {{neuron::container::do_not_search, "
      +
      1801  "&(_nrn_mechanism_access_dparam({}_prop)[0].literal_value<int>())}};",
      +
      1802  i,
      +
      1803  ion.name);
      +
      1804  }
      +
      1805  }
      +
      1806  }
      +
      1807  }
      +
      1808 
      +
      1809  if (!info.random_variables.empty()) {
      +
      1810  for (const auto& rv: info.random_variables) {
      +
      1811  printer->fmt_line("{} = nrnran123_newstream();",
      +
      1812  get_variable_name(get_name(rv), false));
      +
      1813  }
      +
      1814  printer->fmt_line("nrn_mech_inst_destruct[mech_type] = neuron::{};",
      +
      1815  method_name(naming::NRN_DESTRUCTOR_METHOD));
      +
      1816  }
      +
      1817 
      +
      1818  if (info.point_process || info.artificial_cell) {
      +
      1819  printer->fmt_push_block("if(!nrn_point_prop_)");
      +
      1820 
      +
      1821  if (info.constructor_node) {
      +
      1822  printer->fmt_line("{}(_prop);", method_name(naming::NRN_CONSTRUCTOR_METHOD));
      +
      1823  }
      +
      1824  printer->pop_block();
      +
      1825  }
      +
      1826 
      +
      1827  printer->pop_block();
      +
      1828 }
      +
      1829 
      +
      1830 
      1831 /****************************************************************************************/
      -
      1832 
      - -
      1834  if (!nrn_state_required()) {
      -
      1835  return;
      -
      1836  }
      -
      1837 
      -
      1838  printer->add_newline(2);
      -
      1839  print_global_function_common_code(BlockType::State);
      -
      1840 
      -
      1841  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      1842  printer->add_line("int node_id = node_data.nodeindices[id];");
      -
      1843  printer->add_line("auto* _ppvar = _ml_arg->pdata[id];");
      -
      1844  printer->add_line("auto v = node_data.node_voltages[node_id];");
      -
      1845 
      -
      1846  /**
      -
      1847  * \todo Eigen solver node also emits IonCurVar variable in the functor
      -
      1848  * but that shouldn't update ions in derivative block
      -
      1849  */
      -
      1850  if (ion_variable_struct_required()) {
      -
      1851  throw std::runtime_error("Not implemented.");
      -
      1852  }
      -
      1853 
      -
      1854  auto read_statements = ion_read_statements(BlockType::State);
      -
      1855  for (auto& statement: read_statements) {
      -
      1856  printer->add_line(statement);
      -
      1857  }
      -
      1858 
      -
      1859  if (info.nrn_state_block) {
      -
      1860  info.nrn_state_block->visit_children(*this);
      -
      1861  }
      -
      1862 
      -
      1863  if (info.currents.empty() && info.breakpoint_node != nullptr) {
      -
      1864  auto block = info.breakpoint_node->get_statement_block();
      -
      1865  print_statement_block(*block, false, false);
      -
      1866  }
      -
      1867 
      -
      1868  const auto& write_statements = ion_write_statements(BlockType::State);
      -
      1869  for (auto& statement: write_statements) {
      -
      1870  const auto& text = process_shadow_update_statement(statement, BlockType::State);
      -
      1871  printer->add_line(text);
      -
      1872  }
      -
      1873 
      -
      1874  printer->pop_block();
      -
      1875  printer->pop_block();
      -
      1876 }
      -
      1877 
      -
      1878 
      -
      1879 /****************************************************************************************/
      -
      1880 /* Print nrn_cur related routines */
      +
      1832 /* Print nrn_state routine */
      +
      1833 /****************************************************************************************/
      +
      1834 
      + +
      1836  if (!nrn_state_required()) {
      +
      1837  return;
      +
      1838  }
      +
      1839 
      +
      1840  printer->add_newline(2);
      +
      1841  print_global_function_common_code(BlockType::State);
      +
      1842 
      +
      1843  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      1844  printer->add_line("int node_id = node_data.nodeindices[id];");
      +
      1845  printer->add_line("auto* _ppvar = _ml_arg->pdata[id];");
      +
      1846  printer->add_line("auto v = node_data.node_voltages[node_id];");
      +
      1847 
      +
      1848  /**
      +
      1849  * \todo Eigen solver node also emits IonCurVar variable in the functor
      +
      1850  * but that shouldn't update ions in derivative block
      +
      1851  */
      +
      1852  if (ion_variable_struct_required()) {
      +
      1853  throw std::runtime_error("Not implemented.");
      +
      1854  }
      +
      1855 
      +
      1856  auto read_statements = ion_read_statements(BlockType::State);
      +
      1857  for (auto& statement: read_statements) {
      +
      1858  printer->add_line(statement);
      +
      1859  }
      +
      1860 
      +
      1861  if (info.nrn_state_block) {
      +
      1862  info.nrn_state_block->visit_children(*this);
      +
      1863  }
      +
      1864 
      +
      1865  if (info.currents.empty() && info.breakpoint_node != nullptr) {
      +
      1866  auto block = info.breakpoint_node->get_statement_block();
      +
      1867  print_statement_block(*block, false, false);
      +
      1868  }
      +
      1869 
      +
      1870  const auto& write_statements = ion_write_statements(BlockType::State);
      +
      1871  for (auto& statement: write_statements) {
      +
      1872  const auto& text = process_shadow_update_statement(statement, BlockType::State);
      +
      1873  printer->add_line(text);
      +
      1874  }
      +
      1875 
      +
      1876  printer->pop_block();
      +
      1877  printer->pop_block();
      +
      1878 }
      +
      1879 
      +
      1880 
      1881 /****************************************************************************************/
      -
      1882 
      - -
      1884  return get_arg_str(nrn_current_parameters());
      -
      1885 }
      -
      1886 
      -
      1887 
      - -
      1889  if (ion_variable_struct_required()) {
      -
      1890  throw std::runtime_error("Not implemented.");
      -
      1891  }
      -
      1892 
      -
      1893  ParamVector params = {{"", "_nrn_mechanism_cache_range&", "", "_lmc"},
      -
      1894  {"", "NrnThread*", "", "nt"},
      -
      1895  {"", "Datum*", "", "_ppvar"},
      -
      1896  {"", "Datum*", "", "_thread"}};
      -
      1897 
      -
      1898  if (info.thread_callback_register) {
      -
      1899  auto type_name = fmt::format("{}&", thread_variables_struct());
      -
      1900  params.emplace_back("", type_name, "", "_thread_vars");
      -
      1901  }
      -
      1902  params.emplace_back("", "size_t", "", "id");
      -
      1903  params.emplace_back("", fmt::format("{}&", instance_struct()), "", "inst");
      -
      1904  params.emplace_back("", fmt::format("{}&", node_data_struct()), "", "node_data");
      -
      1905  params.emplace_back("", "double", "", "v");
      -
      1906  return params;
      -
      1907 }
      -
      1908 
      -
      1909 
      - -
      1911  const auto& args = nrn_current_parameters();
      -
      1912  const auto& block = node.get_statement_block();
      -
      1913  printer->add_newline(2);
      -
      1914  printer->fmt_push_block("inline double nrn_current_{}({})",
      -
      1915  info.mod_suffix,
      -
      1916  get_parameter_str(args));
      -
      1917  printer->add_line("double current = 0.0;");
      -
      1918  print_statement_block(*block, false, false);
      -
      1919  for (auto& current: info.currents) {
      -
      1920  const auto& name = get_variable_name(current);
      -
      1921  printer->fmt_line("current += {};", name);
      -
      1922  }
      -
      1923  printer->add_line("return current;");
      -
      1924  printer->pop_block();
      -
      1925 }
      -
      1926 
      -
      1927 
      - -
      1929  const auto& block = node.get_statement_block();
      -
      1930  print_statement_block(*block, false, false);
      -
      1931  if (!info.currents.empty()) {
      -
      1932  std::string sum;
      -
      1933  for (const auto& current: info.currents) {
      -
      1934  auto var = breakpoint_current(current);
      -
      1935  sum += get_variable_name(var);
      -
      1936  if (&current != &info.currents.back()) {
      -
      1937  sum += "+";
      -
      1938  }
      -
      1939  }
      -
      1940  printer->fmt_line("double rhs = {};", sum);
      -
      1941  }
      -
      1942 
      -
      1943  std::string sum;
      -
      1944  for (const auto& conductance: info.conductances) {
      -
      1945  auto var = breakpoint_current(conductance.variable);
      -
      1946  sum += get_variable_name(var);
      -
      1947  if (&conductance != &info.conductances.back()) {
      -
      1948  sum += "+";
      -
      1949  }
      -
      1950  }
      -
      1951  printer->fmt_line("double g = {};", sum);
      -
      1952 
      -
      1953  for (const auto& conductance: info.conductances) {
      -
      1954  if (!conductance.ion.empty()) {
      -
      1955  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + conductance.ion +
      -
      1956  "dv";
      -
      1957  const auto& rhs = get_variable_name(conductance.variable);
      -
      1958  const ShadowUseStatement statement{lhs, "+=", rhs};
      -
      1959  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      -
      1960  printer->add_line(text);
      -
      1961  }
      -
      1962  }
      -
      1963 }
      -
      1964 
      -
      1965 
      - -
      1967  printer->fmt_line("double I1 = nrn_current_{}({}+0.001);",
      -
      1968  info.mod_suffix,
      -
      1969  nrn_current_arguments());
      -
      1970  for (auto& ion: info.ions) {
      -
      1971  for (auto& var: ion.writes) {
      -
      1972  if (ion.is_ionic_current(var)) {
      -
      1973  const auto& name = get_variable_name(var);
      -
      1974  printer->fmt_line("double di{} = {};", ion.name, name);
      -
      1975  }
      -
      1976  }
      -
      1977  }
      -
      1978  printer->fmt_line("double I0 = nrn_current_{}({});", info.mod_suffix, nrn_current_arguments());
      -
      1979  printer->add_line("double rhs = I0;");
      -
      1980 
      -
      1981  printer->add_line("double g = (I1-I0)/0.001;");
      -
      1982  for (auto& ion: info.ions) {
      -
      1983  for (auto& var: ion.writes) {
      -
      1984  if (ion.is_ionic_current(var)) {
      -
      1985  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + ion.name + "dv";
      -
      1986  auto rhs = fmt::format("(di{}-{})/0.001", ion.name, get_variable_name(var));
      -
      1987  if (info.point_process) {
      -
      1988  auto area = get_variable_name(naming::NODE_AREA_VARIABLE);
      -
      1989  rhs += fmt::format("*1.e2/{}", area);
      -
      1990  }
      -
      1991  const ShadowUseStatement statement{lhs, "+=", rhs};
      -
      1992  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      -
      1993  printer->add_line(text);
      -
      1994  }
      -
      1995  }
      -
      1996  }
      -
      1997 }
      -
      1998 
      -
      1999 
      - -
      2001  printer->add_line("int node_id = node_data.nodeindices[id];");
      -
      2002  printer->add_line("double v = node_data.node_voltages[node_id];");
      -
      2003  printer->add_line("auto* _ppvar = _ml_arg->pdata[id];");
      -
      2004  const auto& read_statements = ion_read_statements(BlockType::Equation);
      -
      2005  for (auto& statement: read_statements) {
      -
      2006  printer->add_line(statement);
      -
      2007  }
      -
      2008 
      -
      2009  if (info.conductances.empty()) {
      -
      2010  print_nrn_cur_non_conductance_kernel();
      -
      2011  } else {
      -
      2012  print_nrn_cur_conductance_kernel(node);
      -
      2013  }
      -
      2014 
      -
      2015  const auto& write_statements = ion_write_statements(BlockType::Equation);
      -
      2016  for (auto& statement: write_statements) {
      -
      2017  auto text = process_shadow_update_statement(statement, BlockType::Equation);
      -
      2018  printer->add_line(text);
      -
      2019  }
      -
      2020 
      -
      2021  if (info.point_process) {
      -
      2022  const auto& area = get_variable_name(naming::NODE_AREA_VARIABLE);
      -
      2023  printer->fmt_line("double mfactor = 1.e2/{};", area);
      -
      2024  printer->add_line("g = g*mfactor;");
      -
      2025  printer->add_line("rhs = rhs*mfactor;");
      -
      2026  }
      -
      2027 
      -
      2028  // print_g_unused();
      -
      2029 }
      -
      2030 
      -
      2031 
      -
      2032 /// TODO: Edit for NEURON
      - -
      2034  return;
      -
      2035 }
      -
      2036 
      -
      2037 
      -
      2038 /// TODO: Edit for NEURON
      - -
      2040  if (!nrn_cur_required()) {
      -
      2041  return;
      -
      2042  }
      -
      2043 
      -
      2044  if (info.conductances.empty()) {
      -
      2045  print_nrn_current(*info.breakpoint_node);
      -
      2046  }
      -
      2047 
      -
      2048  printer->add_newline(2);
      -
      2049  printer->add_line("/** update current */");
      -
      2050  print_global_function_common_code(BlockType::Equation);
      -
      2051  // print_channel_iteration_block_parallel_hint(BlockType::Equation, info.breakpoint_node);
      -
      2052  printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      2053  print_nrn_cur_kernel(*info.breakpoint_node);
      -
      2054  // print_nrn_cur_matrix_shadow_update();
      -
      2055  // if (!nrn_cur_reduction_loop_required()) {
      -
      2056  // print_fast_imem_calculation();
      -
      2057  // }
      -
      2058 
      -
      2059 
      -
      2060  printer->fmt_line("node_data.node_rhs[node_id] {} rhs;", operator_for_rhs());
      +
      1882 /* Print nrn_cur related routines */
      +
      1883 /****************************************************************************************/
      +
      1884 
      + +
      1886  return get_arg_str(nrn_current_parameters());
      +
      1887 }
      +
      1888 
      +
      1889 
      + +
      1891  if (ion_variable_struct_required()) {
      +
      1892  throw std::runtime_error("Not implemented.");
      +
      1893  }
      +
      1894 
      +
      1895  ParamVector params = {{"", "_nrn_mechanism_cache_range&", "", "_lmc"},
      +
      1896  {"", "NrnThread*", "", "nt"},
      +
      1897  {"", "Datum*", "", "_ppvar"},
      +
      1898  {"", "Datum*", "", "_thread"}};
      +
      1899 
      +
      1900  if (info.thread_callback_register) {
      +
      1901  auto type_name = fmt::format("{}&", thread_variables_struct());
      +
      1902  params.emplace_back("", type_name, "", "_thread_vars");
      +
      1903  }
      +
      1904  params.emplace_back("", "size_t", "", "id");
      +
      1905  params.emplace_back("", fmt::format("{}&", instance_struct()), "", "inst");
      +
      1906  params.emplace_back("", fmt::format("{}&", node_data_struct()), "", "node_data");
      +
      1907  params.emplace_back("", "double", "", "v");
      +
      1908  return params;
      +
      1909 }
      +
      1910 
      +
      1911 
      + +
      1913  const auto& args = nrn_current_parameters();
      +
      1914  const auto& block = node.get_statement_block();
      +
      1915  printer->add_newline(2);
      +
      1916  printer->fmt_push_block("inline double nrn_current_{}({})",
      +
      1917  info.mod_suffix,
      +
      1918  get_parameter_str(args));
      +
      1919  printer->add_line("double current = 0.0;");
      +
      1920  print_statement_block(*block, false, false);
      +
      1921  for (auto& current: info.currents) {
      +
      1922  const auto& name = get_variable_name(current);
      +
      1923  printer->fmt_line("current += {};", name);
      +
      1924  }
      +
      1925  printer->add_line("return current;");
      +
      1926  printer->pop_block();
      +
      1927 }
      +
      1928 
      +
      1929 
      + +
      1931  const auto& block = node.get_statement_block();
      +
      1932  print_statement_block(*block, false, false);
      +
      1933  if (!info.currents.empty()) {
      +
      1934  std::string sum;
      +
      1935  for (const auto& current: info.currents) {
      +
      1936  auto var = breakpoint_current(current);
      +
      1937  sum += get_variable_name(var);
      +
      1938  if (&current != &info.currents.back()) {
      +
      1939  sum += "+";
      +
      1940  }
      +
      1941  }
      +
      1942  printer->fmt_line("double rhs = {};", sum);
      +
      1943  }
      +
      1944 
      +
      1945  std::string sum;
      +
      1946  for (const auto& conductance: info.conductances) {
      +
      1947  auto var = breakpoint_current(conductance.variable);
      +
      1948  sum += get_variable_name(var);
      +
      1949  if (&conductance != &info.conductances.back()) {
      +
      1950  sum += "+";
      +
      1951  }
      +
      1952  }
      +
      1953  printer->fmt_line("double g = {};", sum);
      +
      1954 
      +
      1955  for (const auto& conductance: info.conductances) {
      +
      1956  if (!conductance.ion.empty()) {
      +
      1957  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + conductance.ion +
      +
      1958  "dv";
      +
      1959  const auto& rhs = get_variable_name(conductance.variable);
      +
      1960  const ShadowUseStatement statement{lhs, "+=", rhs};
      +
      1961  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      +
      1962  printer->add_line(text);
      +
      1963  }
      +
      1964  }
      +
      1965 }
      +
      1966 
      +
      1967 
      + +
      1969  printer->fmt_line("double I1 = nrn_current_{}({}+0.001);",
      +
      1970  info.mod_suffix,
      +
      1971  nrn_current_arguments());
      +
      1972  for (auto& ion: info.ions) {
      +
      1973  for (auto& var: ion.writes) {
      +
      1974  if (ion.is_ionic_current(var)) {
      +
      1975  const auto& name = get_variable_name(var);
      +
      1976  printer->fmt_line("double di{} = {};", ion.name, name);
      +
      1977  }
      +
      1978  }
      +
      1979  }
      +
      1980  printer->fmt_line("double I0 = nrn_current_{}({});", info.mod_suffix, nrn_current_arguments());
      +
      1981  printer->add_line("double rhs = I0;");
      +
      1982 
      +
      1983  printer->add_line("double g = (I1-I0)/0.001;");
      +
      1984  for (auto& ion: info.ions) {
      +
      1985  for (auto& var: ion.writes) {
      +
      1986  if (ion.is_ionic_current(var)) {
      +
      1987  const auto& lhs = std::string(naming::ION_VARNAME_PREFIX) + "di" + ion.name + "dv";
      +
      1988  auto rhs = fmt::format("(di{}-{})/0.001", ion.name, get_variable_name(var));
      +
      1989  if (info.point_process) {
      +
      1990  auto area = get_variable_name(naming::NODE_AREA_VARIABLE);
      +
      1991  rhs += fmt::format("*1.e2/{}", area);
      +
      1992  }
      +
      1993  const ShadowUseStatement statement{lhs, "+=", rhs};
      +
      1994  const auto& text = process_shadow_update_statement(statement, BlockType::Equation);
      +
      1995  printer->add_line(text);
      +
      1996  }
      +
      1997  }
      +
      1998  }
      +
      1999 }
      +
      2000 
      +
      2001 
      + +
      2003  printer->add_line("int node_id = node_data.nodeindices[id];");
      +
      2004  printer->add_line("double v = node_data.node_voltages[node_id];");
      +
      2005  printer->add_line("auto* _ppvar = _ml_arg->pdata[id];");
      +
      2006  const auto& read_statements = ion_read_statements(BlockType::Equation);
      +
      2007  for (auto& statement: read_statements) {
      +
      2008  printer->add_line(statement);
      +
      2009  }
      +
      2010 
      +
      2011  if (info.conductances.empty()) {
      +
      2012  print_nrn_cur_non_conductance_kernel();
      +
      2013  } else {
      +
      2014  print_nrn_cur_conductance_kernel(node);
      +
      2015  }
      +
      2016 
      +
      2017  const auto& write_statements = ion_write_statements(BlockType::Equation);
      +
      2018  for (auto& statement: write_statements) {
      +
      2019  auto text = process_shadow_update_statement(statement, BlockType::Equation);
      +
      2020  printer->add_line(text);
      +
      2021  }
      +
      2022 
      +
      2023  if (info.point_process) {
      +
      2024  const auto& area = get_variable_name(naming::NODE_AREA_VARIABLE);
      +
      2025  printer->fmt_line("double mfactor = 1.e2/{};", area);
      +
      2026  printer->add_line("g = g*mfactor;");
      +
      2027  printer->add_line("rhs = rhs*mfactor;");
      +
      2028  }
      +
      2029 
      +
      2030  // print_g_unused();
      +
      2031 }
      +
      2032 
      +
      2033 
      +
      2034 /// TODO: Edit for NEURON
      + +
      2036  return;
      +
      2037 }
      +
      2038 
      +
      2039 
      +
      2040 /// TODO: Edit for NEURON
      + +
      2042  if (!nrn_cur_required()) {
      +
      2043  return;
      +
      2044  }
      +
      2045 
      +
      2046  if (info.conductances.empty()) {
      +
      2047  print_nrn_current(*info.breakpoint_node);
      +
      2048  }
      +
      2049 
      +
      2050  printer->add_newline(2);
      +
      2051  printer->add_line("/** update current */");
      +
      2052  print_global_function_common_code(BlockType::Equation);
      +
      2053  // print_channel_iteration_block_parallel_hint(BlockType::Equation, info.breakpoint_node);
      +
      2054  printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      2055  print_nrn_cur_kernel(*info.breakpoint_node);
      +
      2056  // print_nrn_cur_matrix_shadow_update();
      +
      2057  // if (!nrn_cur_reduction_loop_required()) {
      +
      2058  // print_fast_imem_calculation();
      +
      2059  // }
      +
      2060 
      2061 
      -
      2062  if (breakpoint_exist()) {
      -
      2063  printer->fmt_line("inst.{}[id] = g;",
      -
      2064  info.vectorize ? naming::CONDUCTANCE_UNUSED_VARIABLE
      - -
      2066  }
      -
      2067  printer->pop_block();
      -
      2068 
      -
      2069  // if (nrn_cur_reduction_loop_required()) {
      -
      2070  // printer->push_block("for (int id = 0; id < nodecount; id++)");
      -
      2071  // print_nrn_cur_matrix_shadow_reduction();
      -
      2072  // printer->pop_block();
      -
      2073  // print_fast_imem_calculation();
      -
      2074  // }
      -
      2075 
      -
      2076  // print_kernel_data_present_annotation_block_end();
      -
      2077  printer->pop_block();
      -
      2078 }
      -
      2079 
      -
      2080 
      -
      2081 /****************************************************************************************/
      -
      2082 /* Main code printing entry points */
      +
      2062  printer->fmt_line("node_data.node_rhs[node_id] {} rhs;", operator_for_rhs());
      +
      2063 
      +
      2064  if (breakpoint_exist()) {
      +
      2065  printer->fmt_line("inst.{}[id] = g;",
      +
      2066  info.vectorize ? naming::CONDUCTANCE_UNUSED_VARIABLE
      + +
      2068  }
      +
      2069  printer->pop_block();
      +
      2070 
      +
      2071  // if (nrn_cur_reduction_loop_required()) {
      +
      2072  // printer->push_block("for (int id = 0; id < nodecount; id++)");
      +
      2073  // print_nrn_cur_matrix_shadow_reduction();
      +
      2074  // printer->pop_block();
      +
      2075  // print_fast_imem_calculation();
      +
      2076  // }
      +
      2077 
      +
      2078  // print_kernel_data_present_annotation_block_end();
      +
      2079  printer->pop_block();
      +
      2080 }
      +
      2081 
      +
      2082 
      2083 /****************************************************************************************/
      -
      2084 
      - -
      2086  print_standard_includes();
      -
      2087  print_neuron_includes();
      -
      2088 
      -
      2089  if (info.thread_callback_register) {
      -
      2090  printer->add_line("extern void _nrn_thread_reg(int, int, void(*)(Datum*));");
      -
      2091  }
      -
      2092 }
      -
      2093 
      -
      2094 
      - -
      2096  print_global_macros();
      -
      2097  print_mechanism_variables_macros();
      -
      2098 
      -
      2099  printer->add_line("extern Node* nrn_alloc_node_;");
      -
      2100 }
      -
      2101 
      -
      2102 
      - -
      2104  printer->add_newline();
      -
      2105  printer->add_line("/* NEURON global macro definitions */");
      -
      2106  if (info.vectorize) {
      -
      2107  printer->add_multi_line(R"CODE(
      -
      2108  /* VECTORIZED */
      -
      2109  #define NRN_VECTORIZED 1
      -
      2110  )CODE");
      -
      2111  } else {
      -
      2112  printer->add_multi_line(R"CODE(
      -
      2113  /* NOT VECTORIZED */
      -
      2114  #define NRN_VECTORIZED 0
      -
      2115  )CODE");
      -
      2116  }
      -
      2117 }
      -
      2118 
      -
      2119 
      - -
      2121  printer->add_newline();
      -
      2122  printer->add_line("static constexpr auto number_of_datum_variables = ",
      -
      2123  std::to_string(int_variables_size()),
      -
      2124  ";");
      -
      2125  printer->add_line("static constexpr auto number_of_floating_point_variables = ",
      -
      2126  std::to_string(codegen_float_variables.size()),
      -
      2127  ";");
      -
      2128  printer->add_newline();
      -
      2129  printer->add_multi_line(R"CODE(
      -
      2130  namespace {
      -
      2131  template <typename T>
      -
      2132  using _nrn_mechanism_std_vector = std::vector<T>;
      -
      2133  using _nrn_model_sorted_token = neuron::model_sorted_token;
      -
      2134  using _nrn_mechanism_cache_range = neuron::cache::MechanismRange<number_of_floating_point_variables, number_of_datum_variables>;
      -
      2135  using _nrn_mechanism_cache_instance = neuron::cache::MechanismInstance<number_of_floating_point_variables, number_of_datum_variables>;
      -
      2136  using _nrn_non_owning_id_without_container = neuron::container::non_owning_identifier_without_container;
      -
      2137  template <typename T>
      -
      2138  using _nrn_mechanism_field = neuron::mechanism::field<T>;
      -
      2139  template <typename... Args>
      -
      2140  void _nrn_mechanism_register_data_fields(Args&&... args) {
      -
      2141  neuron::mechanism::register_data_fields(std::forward<Args>(args)...);
      -
      2142  }
      -
      2143  } // namespace
      -
      2144  )CODE");
      -
      2145 
      -
      2146  if (info.point_process) {
      -
      2147  printer->add_line("extern Prop* nrn_point_prop_;");
      -
      2148  } else {
      -
      2149  printer->add_line("Prop* hoc_getdata_range(int type);");
      -
      2150  }
      -
      2151  // for registration of tables
      -
      2152  if (info.table_count > 0) {
      -
      2153  printer->add_line("void _nrn_thread_table_reg(int, nrn_thread_table_check_t);");
      -
      2154  }
      -
      2155  if (info.for_netcon_used) {
      -
      2156  printer->add_line("int _nrn_netcon_args(void*, double***);");
      -
      2157  }
      -
      2158 }
      -
      2159 
      -
      2160 
      -
      2161 void CodegenNeuronCppVisitor::print_data_structures(bool print_initializers) {
      -
      2162  print_mechanism_global_var_structure(print_initializers);
      -
      2163  print_mechanism_range_var_structure(print_initializers);
      -
      2164  print_node_data_structure(print_initializers);
      -
      2165  print_thread_variables_structure(print_initializers);
      -
      2166  print_make_instance();
      -
      2167  print_make_node_data();
      -
      2168 }
      -
      2169 
      -
      2170 
      - -
      2172  if (!info.vectorize) {
      -
      2173  return;
      -
      2174  }
      -
      2175  printer->add_multi_line(R"CODE(
      -
      2176  #if NRN_PRCELLSTATE
      -
      2177  inst->v_unused[id] = v;
      -
      2178  #endif
      -
      2179  )CODE");
      -
      2180 }
      -
      2181 
      -
      2182 
      - -
      2184  printer->add_multi_line(R"CODE(
      -
      2185  #if NRN_PRCELLSTATE
      -
      2186  inst->g_unused[id] = g;
      -
      2187  #endif
      -
      2188  )CODE");
      -
      2189 }
      -
      2190 
      -
      2191 
      - -
      2193  print_hoc_py_wrapper_function_definitions();
      -
      2194  for (const auto& procedure: info.procedures) {
      -
      2195  print_procedure(*procedure);
      -
      2196  }
      -
      2197  for (const auto& function: info.functions) {
      -
      2198  print_function(*function);
      -
      2199  }
      -
      2200  print_nrn_init();
      -
      2201  print_nrn_cur();
      -
      2202  print_nrn_state();
      -
      2203  print_nrn_jacob();
      -
      2204  print_net_receive();
      -
      2205  print_net_init();
      -
      2206 }
      -
      2207 
      -
      2208 
      - -
      2210  print_backend_info();
      -
      2211  print_headers_include();
      -
      2212  print_macro_definitions();
      -
      2213  print_neuron_global_variable_declarations();
      -
      2214  print_namespace_start();
      -
      2215  print_nmodl_constants();
      -
      2216  print_prcellstate_macros();
      -
      2217  print_mechanism_info();
      -
      2218  print_data_structures(true);
      -
      2219  print_nrn_constructor_declaration();
      -
      2220  print_nrn_destructor_declaration();
      -
      2221  print_nrn_alloc();
      -
      2222  print_function_prototypes();
      -
      2223  print_functors_definitions();
      -
      2224  print_global_variables_for_hoc();
      -
      2225  print_thread_memory_callbacks();
      -
      2226  print_compute_functions(); // only nrn_cur and nrn_state
      -
      2227  print_nrn_constructor();
      -
      2228  print_nrn_destructor();
      -
      2229  print_sdlists_init(true);
      -
      2230  print_mechanism_register();
      -
      2231  print_namespace_stop();
      -
      2232 }
      -
      2233 
      - -
      2235  throw std::runtime_error("Not implemented.");
      -
      2236 }
      -
      2237 
      -
      2238 
      - -
      2240  auto const& arguments = node.get_arguments();
      -
      2241 
      -
      2242  if (printing_net_init) {
      -
      2243  throw std::runtime_error("Not implemented. [jfiwoei]");
      -
      2244  }
      -
      2245 
      -
      2246  std::string weight_pointer = "nullptr";
      -
      2247  auto point_process = get_variable_name(naming::POINT_PROCESS_VARIABLE,
      -
      2248  /* use_instance */ false);
      -
      2249  if (!printing_net_receive) {
      -
      2250  point_process += ".get<Point_process*>()";
      -
      2251  }
      -
      2252  const auto& tqitem = get_variable_name("tqitem", /* use_instance */ false);
      -
      2253 
      -
      2254  printer->fmt_text("{}(/* tqitem */ &{}, {}, {}, {} + ",
      -
      2255  info.artificial_cell ? "artcell_net_send" : "net_send",
      -
      2256  tqitem,
      -
      2257  weight_pointer,
      -
      2258  point_process,
      -
      2259  get_variable_name("t"));
      -
      2260  print_vector_elements(arguments, ", ");
      -
      2261  printer->add_text(')');
      -
      2262 }
      -
      2263 
      - -
      2265  const auto& point_process = get_variable_name("point_process", /* use_instance */ false);
      -
      2266  const auto& tqitem = get_variable_name("tqitem", /* use_instance */ false);
      -
      2267 
      -
      2268  printer->fmt_text("{}(/* tqitem */ &{}, {}, ",
      -
      2269  info.artificial_cell ? "artcell_net_move" : "net_move",
      -
      2270  tqitem,
      -
      2271  point_process);
      -
      2272 
      -
      2273  print_vector_elements(node.get_arguments(), ", ");
      -
      2274  printer->add_text(')');
      -
      2275 }
      -
      2276 
      - -
      2278  const auto& point_process = get_variable_name(naming::POINT_PROCESS_VARIABLE,
      -
      2279  /* use_instance */ false);
      -
      2280  printer->fmt_text("net_event({}, t)", point_process);
      -
      2281 }
      -
      2282 
      -
      2283 /**
      -
      2284  * Rename arguments to NET_RECEIVE block with corresponding pointer variable
      -
      2285  *
      -
      2286  * \code{.mod}
      -
      2287  * NET_RECEIVE (weight, R){
      -
      2288  * x = R
      -
      2289  * }
      -
      2290  * \endcode
      -
      2291  *
      -
      2292  * then generated code should be:
      +
      2084 /* Main code printing entry points */
      +
      2085 /****************************************************************************************/
      +
      2086 
      + +
      2088  print_standard_includes();
      +
      2089  print_neuron_includes();
      +
      2090 
      +
      2091  if (info.thread_callback_register) {
      +
      2092  printer->add_line("extern void _nrn_thread_reg(int, int, void(*)(Datum*));");
      +
      2093  }
      +
      2094 }
      +
      2095 
      +
      2096 
      + +
      2098  print_global_macros();
      +
      2099  print_mechanism_variables_macros();
      +
      2100 
      +
      2101  printer->add_line("extern Node* nrn_alloc_node_;");
      +
      2102 }
      +
      2103 
      +
      2104 
      + +
      2106  printer->add_newline();
      +
      2107  printer->add_line("/* NEURON global macro definitions */");
      +
      2108  if (info.vectorize) {
      +
      2109  printer->add_multi_line(R"CODE(
      +
      2110  /* VECTORIZED */
      +
      2111  #define NRN_VECTORIZED 1
      +
      2112  )CODE");
      +
      2113  } else {
      +
      2114  printer->add_multi_line(R"CODE(
      +
      2115  /* NOT VECTORIZED */
      +
      2116  #define NRN_VECTORIZED 0
      +
      2117  )CODE");
      +
      2118  }
      +
      2119 }
      +
      2120 
      +
      2121 
      + +
      2123  printer->add_newline();
      +
      2124  printer->add_line("static constexpr auto number_of_datum_variables = ",
      +
      2125  std::to_string(int_variables_size()),
      +
      2126  ";");
      +
      2127  printer->add_line("static constexpr auto number_of_floating_point_variables = ",
      +
      2128  std::to_string(codegen_float_variables.size()),
      +
      2129  ";");
      +
      2130  printer->add_newline();
      +
      2131  printer->add_multi_line(R"CODE(
      +
      2132  namespace {
      +
      2133  template <typename T>
      +
      2134  using _nrn_mechanism_std_vector = std::vector<T>;
      +
      2135  using _nrn_model_sorted_token = neuron::model_sorted_token;
      +
      2136  using _nrn_mechanism_cache_range = neuron::cache::MechanismRange<number_of_floating_point_variables, number_of_datum_variables>;
      +
      2137  using _nrn_mechanism_cache_instance = neuron::cache::MechanismInstance<number_of_floating_point_variables, number_of_datum_variables>;
      +
      2138  using _nrn_non_owning_id_without_container = neuron::container::non_owning_identifier_without_container;
      +
      2139  template <typename T>
      +
      2140  using _nrn_mechanism_field = neuron::mechanism::field<T>;
      +
      2141  template <typename... Args>
      +
      2142  void _nrn_mechanism_register_data_fields(Args&&... args) {
      +
      2143  neuron::mechanism::register_data_fields(std::forward<Args>(args)...);
      +
      2144  }
      +
      2145  } // namespace
      +
      2146  )CODE");
      +
      2147 
      +
      2148  if (info.point_process) {
      +
      2149  printer->add_line("extern Prop* nrn_point_prop_;");
      +
      2150  } else {
      +
      2151  printer->add_line("Prop* hoc_getdata_range(int type);");
      +
      2152  }
      +
      2153  // for registration of tables
      +
      2154  if (info.table_count > 0) {
      +
      2155  printer->add_line("void _nrn_thread_table_reg(int, nrn_thread_table_check_t);");
      +
      2156  }
      +
      2157  if (info.for_netcon_used) {
      +
      2158  printer->add_line("int _nrn_netcon_args(void*, double***);");
      +
      2159  }
      +
      2160 }
      +
      2161 
      +
      2162 
      +
      2163 void CodegenNeuronCppVisitor::print_data_structures(bool print_initializers) {
      +
      2164  print_mechanism_global_var_structure(print_initializers);
      +
      2165  print_mechanism_range_var_structure(print_initializers);
      +
      2166  print_node_data_structure(print_initializers);
      +
      2167  print_thread_variables_structure(print_initializers);
      +
      2168  print_make_instance();
      +
      2169  print_make_node_data();
      +
      2170 }
      +
      2171 
      +
      2172 
      + +
      2174  if (!info.vectorize) {
      +
      2175  return;
      +
      2176  }
      +
      2177  printer->add_multi_line(R"CODE(
      +
      2178  #if NRN_PRCELLSTATE
      +
      2179  inst->v_unused[id] = v;
      +
      2180  #endif
      +
      2181  )CODE");
      +
      2182 }
      +
      2183 
      +
      2184 
      + +
      2186  printer->add_multi_line(R"CODE(
      +
      2187  #if NRN_PRCELLSTATE
      +
      2188  inst->g_unused[id] = g;
      +
      2189  #endif
      +
      2190  )CODE");
      +
      2191 }
      +
      2192 
      +
      2193 
      + +
      2195  print_hoc_py_wrapper_function_definitions();
      +
      2196  for (const auto& procedure: info.procedures) {
      +
      2197  print_procedure(*procedure);
      +
      2198  }
      +
      2199  for (const auto& function: info.functions) {
      +
      2200  print_function(*function);
      +
      2201  }
      +
      2202  print_nrn_init();
      +
      2203  print_nrn_cur();
      +
      2204  print_nrn_state();
      +
      2205  print_nrn_jacob();
      +
      2206  print_net_receive();
      +
      2207  print_net_init();
      +
      2208 }
      +
      2209 
      +
      2210 
      + +
      2212  print_backend_info();
      +
      2213  print_headers_include();
      +
      2214  print_macro_definitions();
      +
      2215  print_neuron_global_variable_declarations();
      +
      2216  print_namespace_start();
      +
      2217  print_nmodl_constants();
      +
      2218  print_prcellstate_macros();
      +
      2219  print_mechanism_info();
      +
      2220  print_data_structures(true);
      +
      2221  print_nrn_constructor_declaration();
      +
      2222  print_nrn_destructor_declaration();
      +
      2223  print_nrn_alloc();
      +
      2224  print_function_prototypes();
      +
      2225  print_functors_definitions();
      +
      2226  print_global_variables_for_hoc();
      +
      2227  print_thread_memory_callbacks();
      +
      2228  print_compute_functions(); // only nrn_cur and nrn_state
      +
      2229  print_nrn_constructor();
      +
      2230  print_nrn_destructor();
      +
      2231  print_sdlists_init(true);
      +
      2232  print_mechanism_register();
      +
      2233  print_namespace_stop();
      +
      2234 }
      +
      2235 
      + +
      2237  throw std::runtime_error("Not implemented.");
      +
      2238 }
      +
      2239 
      +
      2240 
      + +
      2242  auto const& arguments = node.get_arguments();
      +
      2243 
      +
      2244  if (printing_net_init) {
      +
      2245  throw std::runtime_error("Not implemented. [jfiwoei]");
      +
      2246  }
      +
      2247 
      +
      2248  std::string weight_pointer = "nullptr";
      +
      2249  auto point_process = get_variable_name(naming::POINT_PROCESS_VARIABLE,
      +
      2250  /* use_instance */ false);
      +
      2251  if (!printing_net_receive) {
      +
      2252  point_process += ".get<Point_process*>()";
      +
      2253  }
      +
      2254  const auto& tqitem = get_variable_name("tqitem", /* use_instance */ false);
      +
      2255 
      +
      2256  printer->fmt_text("{}(/* tqitem */ &{}, {}, {}, {} + ",
      +
      2257  info.artificial_cell ? "artcell_net_send" : "net_send",
      +
      2258  tqitem,
      +
      2259  weight_pointer,
      +
      2260  point_process,
      +
      2261  get_variable_name("t"));
      +
      2262  print_vector_elements(arguments, ", ");
      +
      2263  printer->add_text(')');
      +
      2264 }
      +
      2265 
      + +
      2267  const auto& point_process = get_variable_name("point_process", /* use_instance */ false);
      +
      2268  const auto& tqitem = get_variable_name("tqitem", /* use_instance */ false);
      +
      2269 
      +
      2270  printer->fmt_text("{}(/* tqitem */ &{}, {}, ",
      +
      2271  info.artificial_cell ? "artcell_net_move" : "net_move",
      +
      2272  tqitem,
      +
      2273  point_process);
      +
      2274 
      +
      2275  print_vector_elements(node.get_arguments(), ", ");
      +
      2276  printer->add_text(')');
      +
      2277 }
      +
      2278 
      + +
      2280  const auto& point_process = get_variable_name(naming::POINT_PROCESS_VARIABLE,
      +
      2281  /* use_instance */ false);
      +
      2282  printer->fmt_text("net_event({}, t)", point_process);
      +
      2283 }
      +
      2284 
      +
      2285 /**
      +
      2286  * Rename arguments to NET_RECEIVE block with corresponding pointer variable
      +
      2287  *
      +
      2288  * \code{.mod}
      +
      2289  * NET_RECEIVE (weight, R){
      +
      2290  * x = R
      +
      2291  * }
      +
      2292  * \endcode
      2293  *
      -
      2294  * \code{.cpp}
      -
      2295  * x[id] = _args[1];
      -
      2296  * \endcode
      -
      2297  *
      -
      2298  * So, the `R` in AST needs to be renamed with `_args[1]`.
      -
      2299  */
      -
      2300 static void rename_net_receive_arguments(const ast::NetReceiveBlock& net_receive_node,
      -
      2301  const ast::Node& node) {
      -
      2302  const auto& parameters = net_receive_node.get_parameters();
      -
      2303 
      -
      2304  auto n_parameters = parameters.size();
      -
      2305  for (size_t i = 0; i < n_parameters; ++i) {
      -
      2306  const auto& name = parameters[i]->get_node_name();
      -
      2307  auto var_used = VarUsageVisitor().variable_used(node, name);
      -
      2308  if (var_used) {
      -
      2309  RenameVisitor vr(name, fmt::format("_args[{}]", i));
      -
      2310  node.get_statement_block()->visit_children(vr);
      -
      2311  }
      -
      2312  }
      -
      2313 }
      -
      2314 
      - -
      2316  return {{"", "Point_process*", "", "_pnt"},
      -
      2317  {"", "double*", "", "_args"},
      -
      2318  {"", "double", "", "flag"}};
      -
      2319 }
      -
      2320 
      -
      2321 
      - -
      2323  printer->add_line("_nrn_mechanism_cache_instance _lmc{_pnt->prop};");
      -
      2324  printer->add_line("auto * nt = static_cast<NrnThread*>(_pnt->_vnt);");
      -
      2325  printer->add_line("auto * _ppvar = _nrn_mechanism_access_dparam(_pnt->prop);");
      -
      2326 
      -
      2327  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      -
      2328  if (!info.artificial_cell) {
      -
      2329  printer->fmt_line("auto node_data = make_node_data_{}(_pnt->prop);", info.mod_suffix);
      -
      2330  }
      -
      2331  printer->fmt_line("// nocmodl has a nullptr dereference for thread variables.");
      -
      2332  printer->fmt_line("// NMODL will fail to compile at a later point, because of");
      -
      2333  printer->fmt_line("// missing '_thread_vars'.");
      -
      2334  printer->fmt_line("Datum * _thread = nullptr;");
      -
      2335 
      -
      2336  printer->add_line("size_t id = 0;");
      -
      2337  printer->add_line("double t = nt->_t;");
      -
      2338 }
      -
      2339 
      - -
      2341  printing_net_receive = true;
      -
      2342  auto node = info.net_receive_node;
      -
      2343  if (!node) {
      -
      2344  return;
      -
      2345  }
      -
      2346 
      -
      2347  printer->fmt_push_block("static void nrn_net_receive_{}({})",
      -
      2348  info.mod_suffix,
      -
      2349  get_parameter_str(net_receive_args()));
      -
      2350 
      -
      2351  rename_net_receive_arguments(*node, *node);
      -
      2352  print_net_receive_common_code();
      -
      2353 
      -
      2354 
      -
      2355  print_statement_block(*node->get_statement_block(), false, false);
      +
      2294  * then generated code should be:
      +
      2295  *
      +
      2296  * \code{.cpp}
      +
      2297  * x[id] = _args[1];
      +
      2298  * \endcode
      +
      2299  *
      +
      2300  * So, the `R` in AST needs to be renamed with `_args[1]`.
      +
      2301  */
      +
      2302 static void rename_net_receive_arguments(const ast::NetReceiveBlock& net_receive_node,
      +
      2303  const ast::Node& node) {
      +
      2304  const auto& parameters = net_receive_node.get_parameters();
      +
      2305 
      +
      2306  auto n_parameters = parameters.size();
      +
      2307  for (size_t i = 0; i < n_parameters; ++i) {
      +
      2308  const auto& name = parameters[i]->get_node_name();
      +
      2309  auto var_used = VarUsageVisitor().variable_used(node, name);
      +
      2310  if (var_used) {
      +
      2311  RenameVisitor vr(name, fmt::format("_args[{}]", i));
      +
      2312  node.get_statement_block()->visit_children(vr);
      +
      2313  }
      +
      2314  }
      +
      2315 }
      +
      2316 
      + +
      2318  return {{"", "Point_process*", "", "_pnt"},
      +
      2319  {"", "double*", "", "_args"},
      +
      2320  {"", "double", "", "flag"}};
      +
      2321 }
      +
      2322 
      +
      2323 
      + +
      2325  printer->add_line("_nrn_mechanism_cache_instance _lmc{_pnt->prop};");
      +
      2326  printer->add_line("auto * nt = static_cast<NrnThread*>(_pnt->_vnt);");
      +
      2327  printer->add_line("auto * _ppvar = _nrn_mechanism_access_dparam(_pnt->prop);");
      +
      2328 
      +
      2329  printer->fmt_line("auto inst = make_instance_{}(_lmc);", info.mod_suffix);
      +
      2330  if (!info.artificial_cell) {
      +
      2331  printer->fmt_line("auto node_data = make_node_data_{}(_pnt->prop);", info.mod_suffix);
      +
      2332  }
      +
      2333  printer->fmt_line("// nocmodl has a nullptr dereference for thread variables.");
      +
      2334  printer->fmt_line("// NMODL will fail to compile at a later point, because of");
      +
      2335  printer->fmt_line("// missing '_thread_vars'.");
      +
      2336  printer->fmt_line("Datum * _thread = nullptr;");
      +
      2337 
      +
      2338  printer->add_line("size_t id = 0;");
      +
      2339  printer->add_line("double t = nt->_t;");
      +
      2340 }
      +
      2341 
      + +
      2343  printing_net_receive = true;
      +
      2344  auto node = info.net_receive_node;
      +
      2345  if (!node) {
      +
      2346  return;
      +
      2347  }
      +
      2348 
      +
      2349  printer->fmt_push_block("static void nrn_net_receive_{}({})",
      +
      2350  info.mod_suffix,
      +
      2351  get_parameter_str(net_receive_args()));
      +
      2352 
      +
      2353  rename_net_receive_arguments(*node, *node);
      +
      2354  print_net_receive_common_code();
      +
      2355 
      2356 
      -
      2357  printer->add_newline();
      -
      2358  printer->pop_block();
      -
      2359  printing_net_receive = false;
      -
      2360 }
      -
      2361 
      - -
      2363  const auto node = info.net_receive_initial_node;
      -
      2364  if (node == nullptr) {
      -
      2365  return;
      -
      2366  }
      -
      2367 
      -
      2368  // rename net_receive arguments used in the initial block of net_receive
      -
      2369  rename_net_receive_arguments(*info.net_receive_node, *node);
      -
      2370 
      -
      2371  printing_net_init = true;
      -
      2372  printer->add_newline(2);
      -
      2373  printer->fmt_push_block("static void net_init({})", get_parameter_str(net_receive_args()));
      -
      2374 
      -
      2375  auto block = node->get_statement_block().get();
      -
      2376  if (!block->get_statements().empty()) {
      -
      2377  print_net_receive_common_code();
      -
      2378  print_statement_block(*block, false, false);
      -
      2379  }
      -
      2380  printer->pop_block();
      -
      2381  printing_net_init = false;
      -
      2382 }
      -
      2383 
      -
      2384 
      -
      2385 /****************************************************************************************/
      -
      2386 /* Overloaded visitor routines */
      +
      2357  print_statement_block(*node->get_statement_block(), false, false);
      +
      2358 
      +
      2359  printer->add_newline();
      +
      2360  printer->pop_block();
      +
      2361  printing_net_receive = false;
      +
      2362 }
      +
      2363 
      + +
      2365  const auto node = info.net_receive_initial_node;
      +
      2366  if (node == nullptr) {
      +
      2367  return;
      +
      2368  }
      +
      2369 
      +
      2370  // rename net_receive arguments used in the initial block of net_receive
      +
      2371  rename_net_receive_arguments(*info.net_receive_node, *node);
      +
      2372 
      +
      2373  printing_net_init = true;
      +
      2374  printer->add_newline(2);
      +
      2375  printer->fmt_push_block("static void net_init({})", get_parameter_str(net_receive_args()));
      +
      2376 
      +
      2377  auto block = node->get_statement_block().get();
      +
      2378  if (!block->get_statements().empty()) {
      +
      2379  print_net_receive_common_code();
      +
      2380  print_statement_block(*block, false, false);
      +
      2381  }
      +
      2382  printer->pop_block();
      +
      2383  printing_net_init = false;
      +
      2384 }
      +
      2385 
      +
      2386 
      2387 /****************************************************************************************/
      -
      2388 
      -
      2389 /// TODO: Edit for NEURON
      - -
      2391  return;
      -
      2392 }
      -
      2393 
      - -
      2395  // The setup for enabling this loop is:
      -
      2396  // double ** _fornetcon_data = ...;
      -
      2397  // for(size_t i = 0; i < n_netcons; ++i) {
      -
      2398  // double * _netcon_data = _fornetcon_data[i];
      -
      2399  //
      -
      2400  // // loop body.
      -
      2401  // }
      -
      2402  //
      -
      2403  // Where `_fornetcon_data` is an array of pointers to the arguments, one
      -
      2404  // for each netcon; and `_netcon_data` points to the arguments for the
      -
      2405  // current netcon.
      -
      2406  //
      -
      2407  // Similar to the CoreNEURON solution, we replace all arguments with the
      -
      2408  // C++ string that implements them, i.e. `_netcon_data[{}]`. The arguments
      -
      2409  // are positional and thus simply numbered through.
      -
      2410  const auto& args = node.get_parameters();
      -
      2411  RenameVisitor v;
      -
      2412  const auto& statement_block = node.get_statement_block();
      -
      2413  for (size_t i_arg = 0; i_arg < args.size(); ++i_arg) {
      -
      2414  auto old_name = args[i_arg]->get_node_name();
      -
      2415  auto new_name = fmt::format("_netcon_data[{}]", i_arg);
      -
      2416  v.set(old_name, new_name);
      -
      2417  statement_block->accept(v);
      -
      2418  }
      -
      2419 
      -
      2420  auto dparam_it =
      -
      2421  std::find_if(info.semantics.begin(), info.semantics.end(), [](const IndexSemantics& a) {
      -
      2422  return a.name == naming::FOR_NETCON_SEMANTIC;
      -
      2423  });
      -
      2424  if (dparam_it == info.semantics.end()) {
      -
      2425  throw std::runtime_error("Couldn't find `fornetcon` variable.");
      -
      2426  }
      -
      2427 
      -
      2428  int dparam_index = dparam_it->index;
      -
      2429  auto netcon_var = get_name(codegen_int_variables[dparam_index]);
      -
      2430 
      -
      2431  // This is called from `print_statement_block` which pre-indents the
      -
      2432  // current line. Hence `add_text` only.
      -
      2433  printer->add_text("double ** _fornetcon_data;");
      -
      2434  printer->add_newline();
      -
      2435 
      -
      2436  printer->fmt_line("int _n_netcons = _nrn_netcon_args({}, &_fornetcon_data);",
      -
      2437  get_variable_name(netcon_var, false));
      -
      2438 
      -
      2439  printer->push_block("for (size_t _i = 0; _i < _n_netcons; ++_i)");
      -
      2440  printer->add_line("double * _netcon_data = _fornetcon_data[_i];");
      -
      2441  print_statement_block(*statement_block, false, false);
      -
      2442  printer->pop_block();
      -
      2443 }
      -
      2444 
      -
      2445 
      -
      2446 } // namespace codegen
      -
      2447 } // namespace nmodl
      +
      2388 /* Overloaded visitor routines */
      +
      2389 /****************************************************************************************/
      +
      2390 
      +
      2391 /// TODO: Edit for NEURON
      + +
      2393  return;
      +
      2394 }
      +
      2395 
      + +
      2397  // The setup for enabling this loop is:
      +
      2398  // double ** _fornetcon_data = ...;
      +
      2399  // for(size_t i = 0; i < n_netcons; ++i) {
      +
      2400  // double * _netcon_data = _fornetcon_data[i];
      +
      2401  //
      +
      2402  // // loop body.
      +
      2403  // }
      +
      2404  //
      +
      2405  // Where `_fornetcon_data` is an array of pointers to the arguments, one
      +
      2406  // for each netcon; and `_netcon_data` points to the arguments for the
      +
      2407  // current netcon.
      +
      2408  //
      +
      2409  // Similar to the CoreNEURON solution, we replace all arguments with the
      +
      2410  // C++ string that implements them, i.e. `_netcon_data[{}]`. The arguments
      +
      2411  // are positional and thus simply numbered through.
      +
      2412  const auto& args = node.get_parameters();
      +
      2413  RenameVisitor v;
      +
      2414  const auto& statement_block = node.get_statement_block();
      +
      2415  for (size_t i_arg = 0; i_arg < args.size(); ++i_arg) {
      +
      2416  auto old_name = args[i_arg]->get_node_name();
      +
      2417  auto new_name = fmt::format("_netcon_data[{}]", i_arg);
      +
      2418  v.set(old_name, new_name);
      +
      2419  statement_block->accept(v);
      +
      2420  }
      +
      2421 
      +
      2422  auto dparam_it =
      +
      2423  std::find_if(info.semantics.begin(), info.semantics.end(), [](const IndexSemantics& a) {
      +
      2424  return a.name == naming::FOR_NETCON_SEMANTIC;
      +
      2425  });
      +
      2426  if (dparam_it == info.semantics.end()) {
      +
      2427  throw std::runtime_error("Couldn't find `fornetcon` variable.");
      +
      2428  }
      +
      2429 
      +
      2430  int dparam_index = dparam_it->index;
      +
      2431  auto netcon_var = get_name(codegen_int_variables[dparam_index]);
      +
      2432 
      +
      2433  // This is called from `print_statement_block` which pre-indents the
      +
      2434  // current line. Hence `add_text` only.
      +
      2435  printer->add_text("double ** _fornetcon_data;");
      +
      2436  printer->add_newline();
      +
      2437 
      +
      2438  printer->fmt_line("int _n_netcons = _nrn_netcon_args({}, &_fornetcon_data);",
      +
      2439  get_variable_name(netcon_var, false));
      +
      2440 
      +
      2441  printer->push_block("for (size_t _i = 0; _i < _n_netcons; ++_i)");
      +
      2442  printer->add_line("double * _netcon_data = _fornetcon_data[_i];");
      +
      2443  print_statement_block(*statement_block, false, false);
      +
      2444  printer->pop_block();
      +
      2445 }
      +
      2446 
      +
      2447 
      +
      2448 } // namespace codegen
      +
      2449 } // namespace nmodl
      static constexpr char RANDOM_SEMANTIC[]
      semantic type for RANDOM variable
      static bool starts_with(const std::string &haystack, const std::string &needle)
      Check if haystack starts with needle.
      -
      void print_nrn_destructor() override
      Print nrn_destructor function definition.
      -
      void print_callable_preamble_from_prop()
      Print the set of common variables from a Prop only.
      -
      void print_global_var_external_access()
      Print functions for EXTERNAL use.
      +
      void print_nrn_destructor() override
      Print nrn_destructor function definition.
      +
      void print_callable_preamble_from_prop()
      Print the set of common variables from a Prop only.
      +
      void print_global_var_external_access()
      Print functions for EXTERNAL use.
      Base class for all AST node.
      Definition: node.hpp:40
      bool is_index
      if this is pure index (e.g.
      -
      void print_nrn_cur_non_conductance_kernel() override
      Print the nrn_cur kernel without NMODL conductance keyword provisions.
      -
      std::string py_function_signature(const std::string &function_or_procedure_name) const
      Get the signature of the npy <func_or_proc_name> function.
      +
      void print_nrn_cur_non_conductance_kernel() override
      Print the nrn_cur kernel without NMODL conductance keyword provisions.
      +
      std::string py_function_signature(const std::string &function_or_procedure_name) const
      Get the signature of the npy <func_or_proc_name> function.
      Helper to represent information about index/int variables.
      -
      void print_nrn_cur() override
      Print nrn_cur / current update function definition.
      -
      void print_macro_definitions()
      Print all NEURON macros.
      -
      void print_net_event_call(const ast::FunctionCall &node) override
      Print call to net_event.
      +
      void print_nrn_cur() override
      Print nrn_cur / current update function definition.
      +
      void print_macro_definitions()
      Print all NEURON macros.
      +
      void print_net_event_call(const ast::FunctionCall &node) override
      Print call to net_event.
      static constexpr char POINT_PROCESS_VARIABLE[]
      inbuilt neuron variable for point process
      - -
      void print_neuron_includes()
      Print includes from NEURON.
      + +
      void print_neuron_includes()
      Print includes from NEURON.
      static constexpr char NRN_JACOB_METHOD[]
      nrn_jacob method in generated code
      -
      void print_nrn_init(bool skip_init_check=true)
      Print the nrn_init function definition.
      +
      void print_nrn_init(bool skip_init_check=true)
      Print the nrn_init function definition.
      -
      void print_hoc_py_wrapper_function_body(const ast::Block *function_or_procedure_block, InterpreterWrapper wrapper_type)
      -
      std::string internal_method_arguments() override
      Arguments for functions that are defined and used internally.
      +
      void print_hoc_py_wrapper_function_body(const ast::Block *function_or_procedure_block, InterpreterWrapper wrapper_type)
      +
      std::string internal_method_arguments() override
      Arguments for functions that are defined and used internally.
      std::shared_ptr< symtab::Symbol > SymbolType
      -
      void print_nrn_alloc() override
      Print nrn_alloc function definition.
      +
      void print_nrn_alloc() override
      Print nrn_alloc function definition.
      -
      std::string register_mechanism_arguments() const override
      Arguments for register_mech or point_register_mech function.
      +
      std::string register_mechanism_arguments() const override
      Arguments for register_mech or point_register_mech function.
      int position_of_int_var(const std::string &name) const override
      Determine the position in the data array for a given int variable.
      const ArgumentVector & get_parameters() const noexcept override
      Getter for member variable NetReceiveBlock::parameters.
      - +
      std::string table_thread_function_name() const
      Name of the threaded table checking function.
      bool optimize_ion_variable_copies() const override
      Check if ion variable copies should be avoided.
      -
      std::string hoc_function_name(const std::string &function_or_procedure_name) const
      All functions and procedures need a hoc <func_or_proc_name> to be available to the HOC interpreter.
      +
      std::string hoc_function_name(const std::string &function_or_procedure_name) const
      All functions and procedures need a hoc <func_or_proc_name> to be available to the HOC interpreter.
      encapsulates code generation backend implementations
      Definition: ast_common.hpp:26
      std::shared_ptr< StatementBlock > get_statement_block() const noexcept override
      Getter for member variable InitialBlock::statement_block.
      -
      std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
      Determine the variable name for a global variable given its symbol.
      +
      std::string global_variable_name(const SymbolType &symbol, bool use_instance=true) const override
      Determine the variable name for a global variable given its symbol.
      Represent ions used in mod file.
      -
      std::string nrn_thread_internal_arguments() override
      Arguments for "_threadargs_" macro in neuron implementation.
      +
      std::string nrn_thread_internal_arguments() override
      Arguments for "_threadargs_" macro in neuron implementation.
      static constexpr char NRN_CONSTRUCTOR_METHOD[]
      nrn_constructor method in generated code
      Visitor for printing C++ code compatible with legacy api of NEURON
      std::string backend_name() const override
      Name of the code generation backend.
      -
      std::string process_verbatim_text(std::string const &text) override
      Process a verbatim block for possible variable renaming.
      - -
      void print_global_param_default_values()
      Print global struct with default value of RANGE PARAMETERs.
      +
      std::string process_verbatim_text(std::string const &text) override
      Process a verbatim block for possible variable renaming.
      + +
      void print_global_param_default_values()
      Print global struct with default value of RANGE PARAMETERs.
      std::string name
      name of the ion
      static constexpr char NTHREAD_DT_VARIABLE[]
      dt variable in neuron thread structure
      -
      void print_g_unused() const override
      Set g_unused (conductance) for NRN_PRCELLSTATE feature.
      -
      void visit_watch_statement(const ast::WatchStatement &node) override
      TODO: Edit for NEURON.
      -
      void print_net_send_call(const ast::FunctionCall &node) override
      Print call to net_send.
      +
      void print_g_unused() const override
      Set g_unused (conductance) for NRN_PRCELLSTATE feature.
      +
      void visit_watch_statement(const ast::WatchStatement &node) override
      TODO: Edit for NEURON.
      +
      void print_net_send_call(const ast::FunctionCall &node) override
      Print call to net_send.
      Check if variable is used in given block.
      Implement string manipulation functions.
      -
      void print_net_move_call(const ast::FunctionCall &node) override
      Print call to net_move.
      -
      std::string thread_variable_name(const ThreadVariableInfo &var_info, bool use_instance=true) const
      Determine the C++ string to print for thread variables.
      +
      void print_net_move_call(const ast::FunctionCall &node) override
      Print call to net_move.
      +
      std::string thread_variable_name(const ThreadVariableInfo &var_info, bool use_instance=true) const
      Determine the C++ string to print for thread variables.
      static constexpr char NODE_AREA_VARIABLE[]
      inbuilt neuron variable for area of the compartment
      -
      void print_nrn_state() override
      Print nrn_state / state update function definition.
      +
      void print_nrn_state() override
      Print nrn_state / state update function definition.
      virtual bool is_function_block() const noexcept
      Check if the ast node is an instance of ast::FunctionBlock.
      Definition: ast.cpp:142
      static constexpr char NRN_ALLOC_METHOD[]
      nrn_alloc method in generated code
      -
      void print_initial_block(const ast::InitialBlock *node)
      Print the initial block.
      +
      void print_initial_block(const ast::InitialBlock *node)
      Print the initial block.
      bool is_integer
      if this is an integer (e.g.
      -
      void print_global_function_common_code(BlockType type, const std::string &function_name="") override
      Print common code for global functions like nrn_init, nrn_cur and nrn_state.
      -
      std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
      Determine the name of a float variable given its symbol.
      +
      void print_global_function_common_code(BlockType type, const std::string &function_name="") override
      Print common code for global functions like nrn_init, nrn_cur and nrn_state.
      +
      std::string float_variable_name(const SymbolType &symbol, bool use_instance) const override
      Determine the name of a float variable given its symbol.
      std::string extra_conc_pointer_name() const
      std::shared_ptr< StatementBlock > get_statement_block() const noexcept override
      Getter for member variable BreakpointBlock::statement_block.
      - +
      std::string rev_potential_pointer_name() const
      -
      void print_v_unused() const override
      Set v_unused (voltage) for NRN_PRCELLSTATE feature.
      - +
      void print_v_unused() const override
      Set v_unused (voltage) for NRN_PRCELLSTATE feature.
      +
      static constexpr char CONDUCTANCE_VARIABLE[]
      range variable for conductance
      -
      ParamVector functor_params() override
      The parameters of the Newton solver "functor".
      +
      ParamVector functor_params() override
      The parameters of the Newton solver "functor".
      Visitor for printing C++ code compatible with legacy api of CoreNEURON
      Check if variable is used in given block.
      Base class for all block scoped nodes.
      Definition: block.hpp:41
      Represents a INITIAL block in the NMODL.
      void print_point_process_function_definitions()
      Print POINT_PROCESS related functions Wrap external NEURON functions related to POINT_PROCESS mechani...
      - +
      Utility functions for visitors implementation.
      -
      void print_net_init()
      Print NET_RECEIVE{ INITIAL{ ...
      +
      void print_net_init()
      Print NET_RECEIVE{ INITIAL{ ...
      @ Equation
      breakpoint block
      static constexpr char USE_TABLE_VARIABLE[]
      global variable to indicate if table is used
      Represent WATCH statement in NMODL.
      -
      void print_neuron_global_variable_declarations()
      Print extern declarations for neuron global variables.
      +
      void print_neuron_global_variable_declarations()
      Print extern declarations for neuron global variables.
      Represents a BREAKPOINT block in NMODL.
      -
      void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
      Print the nrn_cur kernel with NMODL conductance keyword provisions.
      -
      std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
      Determine the name of an int variable given its symbol.
      +
      void print_nrn_cur_conductance_kernel(const ast::BreakpointBlock &node) override
      Print the nrn_cur kernel with NMODL conductance keyword provisions.
      +
      std::string int_variable_name(const IndexVariableInfo &symbol, const std::string &name, bool use_instance) const override
      Determine the name of an int variable given its symbol.
      InterpreterWrapper
      Enum to switch between HOC and Python wrappers for functions and procedures defined in mechanisms.
      -
      void print_global_variables_for_hoc() override
      Print byte arrays that register scalar and vector variables for hoc interface.
      -
      void print_standard_includes() override
      Print standard C/C++ includes.
      -
      void print_mechanism_range_var_structure(bool print_initializers) override
      Print the structure that wraps all range and int variables required for the NMODL.
      -
      void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
      Add the variable tqitem during get_int_variables.
      -
      std::string hoc_function_signature(const std::string &function_or_procedure_name) const
      Get the signature of the hoc <func_or_proc_name> function.
      +
      void print_global_variables_for_hoc() override
      Print byte arrays that register scalar and vector variables for hoc interface.
      +
      void print_standard_includes() override
      Print standard C/C++ includes.
      +
      void print_mechanism_range_var_structure(bool print_initializers) override
      Print the structure that wraps all range and int variables required for the NMODL.
      +
      void add_variable_tqitem(std::vector< IndexVariableInfo > &variables) override
      Add the variable tqitem during get_int_variables.
      +
      std::string hoc_function_signature(const std::string &function_or_procedure_name) const
      Get the signature of the hoc <func_or_proc_name> function.
      std::shared_ptr< StatementBlock > get_statement_block() const noexcept override
      Getter for member variable ForNetcon::statement_block.
      Definition: for_netcon.hpp:185
      -
      void print_thread_variables_structure(bool print_initializers)
      Print the data structure used to access thread variables.
      +
      void print_thread_variables_structure(bool print_initializers)
      Print the data structure used to access thread variables.
      Represent semantic information for index variable.
      std::string to_string(const T &obj)
      -
      void print_compute_functions() override
      Print all compute functions for every backend.
      +
      void print_compute_functions() override
      Print all compute functions for every backend.
      static constexpr char NRN_INIT_METHOD[]
      nrn_init method in generated code
      const ExpressionVector & get_arguments() const noexcept
      Getter for member variable FunctionCall::arguments.
      -
      void print_thread_memory_callbacks()
      Print thread variable (de-)initialization functions.
      -
      static void rename_net_receive_arguments(const ast::NetReceiveBlock &net_receive_node, const ast::Node &node)
      Rename arguments to NET_RECEIVE block with corresponding pointer variable.
      -
      void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
      Print main body of nrn_cur function.
      +
      void print_thread_memory_callbacks()
      Print thread variable (de-)initialization functions.
      +
      static void rename_net_receive_arguments(const ast::NetReceiveBlock &net_receive_node, const ast::Node &node)
      Rename arguments to NET_RECEIVE block with corresponding pointer variable.
      +
      void print_nrn_cur_kernel(const ast::BreakpointBlock &node) override
      Print main body of nrn_cur function.
      Blindly rename given variable to new name
      static constexpr char NRN_CUR_METHOD[]
      nrn_cur method in generated code
      -
      void print_mechanism_register() override
      Print the mechanism registration function.
      +
      void print_mechanism_register() override
      Print the mechanism registration function.
      Implement utility functions for codegen visitors.
      static constexpr char TQITEM_VARIABLE[]
      inbuilt neuron variable for tqitem process
      -
      void print_nrn_jacob()
      Print nrn_jacob function definition.
      - +
      void print_nrn_jacob()
      Print nrn_jacob function definition.
      + -
      void print_fast_imem_calculation() override
      Print fast membrane current calculation code.
      +
      void print_fast_imem_calculation() override
      Print fast membrane current calculation code.
      void set(const std::string &old_name, std::string new_name)
      -
      void print_data_structures(bool print_initializers) override
      Print all classes.
      -
      void print_sdlists_init(bool print_initializers) override
      -
      const std::string external_method_arguments() noexcept override
      Arguments for external functions called from generated code.
      -
      std::string py_function_name(const std::string &function_or_procedure_name) const
      In non POINT_PROCESS mechanisms all functions and procedures need a py <func_or_proc_name> to be avai...
      +
      void print_data_structures(bool print_initializers) override
      Print all classes.
      +
      void print_sdlists_init(bool print_initializers) override
      +
      const std::string external_method_arguments() noexcept override
      Arguments for external functions called from generated code.
      +
      std::string py_function_name(const std::string &function_or_procedure_name) const
      In non POINT_PROCESS mechanisms all functions and procedures need a py <func_or_proc_name> to be avai...
      size_t offset
      The global variables ahead of this one require offset doubles to store.
      -
      void print_node_data_structure(bool print_initializers)
      Print the structure that wraps all node variables required for the NMODL.
      +
      void print_node_data_structure(bool print_initializers)
      Print the structure that wraps all node variables required for the NMODL.
      static constexpr char INST_GLOBAL_MEMBER[]
      instance struct member pointing to the global variable structure
      -
      void print_make_instance() const
      Print make_*_instance.
      +
      void print_make_instance() const
      Print make_*_instance.
      void print_atomic_reduction_pragma() override
      Print atomic update pragma for reduction statements.
      NmodlType
      NMODL variable properties.
      const ArgumentVector & get_parameters() const noexcept override
      Getter for member variable ForNetcon::parameters.
      Definition: for_netcon.hpp:176
      -
      void print_setdata_functions()
      Print NEURON functions related to setting global variables of the mechanism.
      +
      void print_setdata_functions()
      Print NEURON functions related to setting global variables of the mechanism.
      std::string get_name(const std::shared_ptr< symtab::Symbol > &sym)
      Represents ion write statement during code generation.
      -
      void print_mechanism_variables_macros()
      Print mechanism variables' related macros.
      -
      void print_nrn_constructor() override
      Print nrn_constructor function definition.
      -
      void print_check_table_function_prototypes()
      Print all check_* function declarations.
      +
      void print_mechanism_variables_macros()
      Print mechanism variables' related macros.
      +
      void print_nrn_constructor() override
      Print nrn_constructor function definition.
      BlockType
      Helper to represent various block types.
      -
      void print_headers_include() override
      Print all includes.
      -
      const ParamVector external_method_parameters(bool table=false) noexcept override
      Parameters for functions in generated code that are called back from external code.
      -
      void visit_for_netcon(const ast::ForNetcon &node) override
      visit node of type ast::ForNetcon
      -
      void print_function_or_procedure(const ast::Block &node, const std::string &name, const std::unordered_set< CppObjectSpecifier > &specifiers={ CppObjectSpecifier::Inline}) override
      Print nmodl function or procedure (common code)
      +
      void print_headers_include() override
      Print all includes.
      +
      void print_check_table_entrypoint()
      Print all check_* function declarations.
      +
      const ParamVector external_method_parameters(bool table=false) noexcept override
      Parameters for functions in generated code that are called back from external code.
      +
      void visit_for_netcon(const ast::ForNetcon &node) override
      visit node of type ast::ForNetcon
      +
      void print_function_or_procedure(const ast::Block &node, const std::string &name, const std::unordered_set< CppObjectSpecifier > &specifiers={ CppObjectSpecifier::Inline}) override
      Print nmodl function or procedure (common code)
      static constexpr char NRN_STATE_METHOD[]
      nrn_state method in generated code
      const std::shared_ptr< symtab::Symbol > symbol
      Version information and units file path.
      -
      void print_nrn_current(const ast::BreakpointBlock &node) override
      Print the nrn_current kernel.
      +
      void print_nrn_current(const ast::BreakpointBlock &node) override
      Print the nrn_current kernel.
      int position_of_float_var(const std::string &name) const override
      Determine the position in the data array for a given float variable.
      -
      std::string get_variable_name(const std::string &name, bool use_instance=true) const override
      Determine variable name in the structure of mechanism properties.
      +
      std::string get_variable_name(const std::string &name, bool use_instance=true) const override
      Determine variable name in the structure of mechanism properties.
      static constexpr char AREA_VARIABLE[]
      similar to node_area but user can explicitly declare it as area
      -
      void print_make_node_data() const
      Print make_*_node_data.
      +
      void print_make_node_data() const
      Print make_*_node_data.
      virtual std::shared_ptr< StatementBlock > get_statement_block() const
      Return associated statement block for the AST node.
      Definition: ast.cpp:32
      std::string simulator_name() override
      Name of the simulator the code was generated for.
      -
      void print_function_procedure_helper(const ast::Block &node) override
      Common helper function to help printing function or procedure blocks.
      +
      void print_function_procedure_helper(const ast::Block &node) override
      Common helper function to help printing function or procedure blocks.
      static constexpr char FOR_NETCON_SEMANTIC[]
      semantic type for for_netcon statement
      static constexpr char NTHREAD_T_VARIABLE[]
      t variable in neuron thread structure
      -
      void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
      Add the variable point_process during get_int_variables.
      +
      void add_variable_point_process(std::vector< IndexVariableInfo > &variables) override
      Add the variable point_process during get_int_variables.
      Blindly rename given variable to new name
      @ State
      derivative block
      static constexpr char DIAM_VARIABLE[]
      inbuilt neuron variable for diameter of the compartment
      virtual const ArgumentVector & get_parameters() const
      Definition: block.hpp:50
      bool variable_used(const ast::Node &node, std::string name)
      -
      std::string nrn_thread_arguments() const override
      Arguments for "_threadargs_" macro in neuron implementation.
      +
      std::string nrn_thread_arguments() const override
      Arguments for "_threadargs_" macro in neuron implementation.
      static bool ends_with(const std::string &haystack, const std::string &needle)
      Check if haystack ends with needle.
      static constexpr char NRN_DESTRUCTOR_METHOD[]
      nrn_destructor method in generated code
      static constexpr char CONDUCTANCE_UNUSED_VARIABLE[]
      range variable when conductance is not used (for vectorized model)
      -
      void print_codegen_routines() override
      Print entry point to code generation.
      +
      void print_codegen_routines() override
      Print entry point to code generation.
      -
      void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
      Generate Function call statement for nrn_wrote_conc.
      +
      void append_conc_write_statements(std::vector< ShadowUseStatement > &statements, const Ion &ion, const std::string &concentration) override
      Generate Function call statement for nrn_wrote_conc.
      static constexpr char NRN_POINTERINDEX[]
      hoc_nrnpointerindex name
      -
      ParamVector internal_method_parameters() override
      Parameters for internally defined functions.
      +
      ParamVector internal_method_parameters() override
      Parameters for internally defined functions.
      int get_index_from_name(const std::vector< T > &variables, const std::string &name)
      virtual std::string get_node_name() const
      Return name of of the node.
      Definition: ast.cpp:28
      - +
      Auto generated AST classes declaration.
      -
      void print_net_receive()
      Print net_receive call-back.
      -
      void print_function_prototypes() override
      Print function and procedures prototype declaration.
      -
      void print_mechanism_global_var_structure(bool print_initializers) override
      Print the structure that wraps all global variables used in the NMODL.
      -
      std::string namespace_name() override
      Name of "our" namespace.
      -
      void print_global_macros()
      Print NEURON global variable macros.
      +
      void print_net_receive()
      Print net_receive call-back.
      +
      void print_function_prototypes() override
      Print function and procedures prototype declaration.
      +
      void print_mechanism_global_var_structure(bool print_initializers) override
      Print the structure that wraps all global variables used in the NMODL.
      +
      std::string namespace_name() override
      Name of "our" namespace.
      +
      void print_global_macros()
      Print NEURON global variable macros.
      std::string intra_conc_pointer_name() const
      std::vector< std::tuple< std::string, std::string, std::string, std::string > > ParamVector
      A vector of parameters represented by a 4-tuple of strings:
      static constexpr char ION_VARNAME_PREFIX[]
      prefix for ion variable
      diff --git a/notebooks/nmodl-kinetic-schemes.ipynb b/notebooks/nmodl-kinetic-schemes.ipynb index 5998b378f..a5ff5c986 100644 --- a/notebooks/nmodl-kinetic-schemes.ipynb +++ b/notebooks/nmodl-kinetic-schemes.ipynb @@ -152,10 +152,10 @@ "execution_count": 1, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:41.019582Z", - "iopub.status.busy": "2024-09-23T08:47:41.019189Z", - "iopub.status.idle": "2024-09-23T08:47:41.822324Z", - "shell.execute_reply": "2024-09-23T08:47:41.821467Z" + "iopub.execute_input": "2024-09-23T14:54:36.027240Z", + "iopub.status.busy": "2024-09-23T14:54:36.027046Z", + "iopub.status.idle": "2024-09-23T14:54:36.849090Z", + "shell.execute_reply": "2024-09-23T14:54:36.848255Z" } }, "outputs": [], @@ -169,10 +169,10 @@ "execution_count": 2, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:41.824764Z", - "iopub.status.busy": "2024-09-23T08:47:41.824543Z", - "iopub.status.idle": "2024-09-23T08:47:41.855372Z", - "shell.execute_reply": "2024-09-23T08:47:41.854756Z" + "iopub.execute_input": "2024-09-23T14:54:36.851787Z", + "iopub.status.busy": "2024-09-23T14:54:36.851336Z", + "iopub.status.idle": "2024-09-23T14:54:36.883344Z", + "shell.execute_reply": "2024-09-23T14:54:36.882539Z" } }, "outputs": [], @@ -277,10 +277,10 @@ "execution_count": 3, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:41.857737Z", - "iopub.status.busy": "2024-09-23T08:47:41.857349Z", - "iopub.status.idle": "2024-09-23T08:47:41.861857Z", - "shell.execute_reply": "2024-09-23T08:47:41.861232Z" + "iopub.execute_input": "2024-09-23T14:54:36.886253Z", + "iopub.status.busy": "2024-09-23T14:54:36.885826Z", + "iopub.status.idle": "2024-09-23T14:54:36.890562Z", + "shell.execute_reply": "2024-09-23T14:54:36.889914Z" } }, "outputs": [ @@ -323,10 +323,10 @@ "execution_count": 4, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:41.891982Z", - "iopub.status.busy": "2024-09-23T08:47:41.891565Z", - "iopub.status.idle": "2024-09-23T08:47:41.895230Z", - "shell.execute_reply": "2024-09-23T08:47:41.894656Z" + "iopub.execute_input": "2024-09-23T14:54:36.922622Z", + "iopub.status.busy": "2024-09-23T14:54:36.922094Z", + "iopub.status.idle": "2024-09-23T14:54:36.926085Z", + "shell.execute_reply": "2024-09-23T14:54:36.925460Z" } }, "outputs": [ @@ -367,10 +367,10 @@ "execution_count": 5, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:41.897326Z", - "iopub.status.busy": "2024-09-23T08:47:41.896951Z", - "iopub.status.idle": "2024-09-23T08:47:41.900372Z", - "shell.execute_reply": "2024-09-23T08:47:41.899845Z" + "iopub.execute_input": "2024-09-23T14:54:36.928312Z", + "iopub.status.busy": "2024-09-23T14:54:36.927919Z", + "iopub.status.idle": "2024-09-23T14:54:36.931568Z", + "shell.execute_reply": "2024-09-23T14:54:36.930978Z" }, "scrolled": true }, @@ -412,10 +412,10 @@ "execution_count": 6, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:41.902476Z", - "iopub.status.busy": "2024-09-23T08:47:41.902054Z", - "iopub.status.idle": "2024-09-23T08:47:41.905569Z", - "shell.execute_reply": "2024-09-23T08:47:41.905049Z" + "iopub.execute_input": "2024-09-23T14:54:36.933866Z", + "iopub.status.busy": "2024-09-23T14:54:36.933464Z", + "iopub.status.idle": "2024-09-23T14:54:36.937302Z", + "shell.execute_reply": "2024-09-23T14:54:36.936702Z" }, "scrolled": true }, @@ -459,10 +459,10 @@ "execution_count": 7, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:41.907583Z", - "iopub.status.busy": "2024-09-23T08:47:41.907200Z", - "iopub.status.idle": "2024-09-23T08:47:41.911420Z", - "shell.execute_reply": "2024-09-23T08:47:41.910895Z" + "iopub.execute_input": "2024-09-23T14:54:36.939540Z", + "iopub.status.busy": "2024-09-23T14:54:36.939160Z", + "iopub.status.idle": "2024-09-23T14:54:36.943351Z", + "shell.execute_reply": "2024-09-23T14:54:36.942730Z" } }, "outputs": [ diff --git a/notebooks/nmodl-python-tutorial.ipynb b/notebooks/nmodl-python-tutorial.ipynb index 66b7d7d82..f96b3943e 100644 --- a/notebooks/nmodl-python-tutorial.ipynb +++ b/notebooks/nmodl-python-tutorial.ipynb @@ -30,10 +30,10 @@ "execution_count": 1, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:47.283890Z", - "iopub.status.busy": "2024-09-23T08:47:47.283707Z", - "iopub.status.idle": "2024-09-23T08:47:48.089522Z", - "shell.execute_reply": "2024-09-23T08:47:48.088692Z" + "iopub.execute_input": "2024-09-23T14:54:49.124095Z", + "iopub.status.busy": "2024-09-23T14:54:49.123872Z", + "iopub.status.idle": "2024-09-23T14:54:49.957735Z", + "shell.execute_reply": "2024-09-23T14:54:49.956826Z" } }, "outputs": [], @@ -63,10 +63,10 @@ "execution_count": 2, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.092407Z", - "iopub.status.busy": "2024-09-23T08:47:48.091948Z", - "iopub.status.idle": "2024-09-23T08:47:48.120965Z", - "shell.execute_reply": "2024-09-23T08:47:48.120361Z" + "iopub.execute_input": "2024-09-23T14:54:49.960442Z", + "iopub.status.busy": "2024-09-23T14:54:49.960219Z", + "iopub.status.idle": "2024-09-23T14:54:49.989522Z", + "shell.execute_reply": "2024-09-23T14:54:49.988761Z" } }, "outputs": [], @@ -86,10 +86,10 @@ "execution_count": 3, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.123386Z", - "iopub.status.busy": "2024-09-23T08:47:48.122994Z", - "iopub.status.idle": "2024-09-23T08:47:48.126508Z", - "shell.execute_reply": "2024-09-23T08:47:48.125948Z" + "iopub.execute_input": "2024-09-23T14:54:49.991749Z", + "iopub.status.busy": "2024-09-23T14:54:49.991493Z", + "iopub.status.idle": "2024-09-23T14:54:49.995016Z", + "shell.execute_reply": "2024-09-23T14:54:49.994482Z" } }, "outputs": [], @@ -152,10 +152,10 @@ "execution_count": 4, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.128443Z", - "iopub.status.busy": "2024-09-23T08:47:48.128104Z", - "iopub.status.idle": "2024-09-23T08:47:48.131946Z", - "shell.execute_reply": "2024-09-23T08:47:48.131282Z" + "iopub.execute_input": "2024-09-23T14:54:49.997036Z", + "iopub.status.busy": "2024-09-23T14:54:49.996671Z", + "iopub.status.idle": "2024-09-23T14:54:50.000391Z", + "shell.execute_reply": "2024-09-23T14:54:49.999777Z" } }, "outputs": [], @@ -183,10 +183,10 @@ "execution_count": 5, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.133930Z", - "iopub.status.busy": "2024-09-23T08:47:48.133591Z", - "iopub.status.idle": "2024-09-23T08:47:48.137090Z", - "shell.execute_reply": "2024-09-23T08:47:48.136512Z" + "iopub.execute_input": "2024-09-23T14:54:50.002547Z", + "iopub.status.busy": "2024-09-23T14:54:50.002163Z", + "iopub.status.idle": "2024-09-23T14:54:50.005985Z", + "shell.execute_reply": "2024-09-23T14:54:50.005418Z" } }, "outputs": [ @@ -235,10 +235,10 @@ "execution_count": 6, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.139032Z", - "iopub.status.busy": "2024-09-23T08:47:48.138696Z", - "iopub.status.idle": "2024-09-23T08:47:48.142745Z", - "shell.execute_reply": "2024-09-23T08:47:48.142070Z" + "iopub.execute_input": "2024-09-23T14:54:50.008133Z", + "iopub.status.busy": "2024-09-23T14:54:50.007750Z", + "iopub.status.idle": "2024-09-23T14:54:50.011544Z", + "shell.execute_reply": "2024-09-23T14:54:50.011035Z" } }, "outputs": [], @@ -260,10 +260,10 @@ "execution_count": 7, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.144767Z", - "iopub.status.busy": "2024-09-23T08:47:48.144333Z", - "iopub.status.idle": "2024-09-23T08:47:48.148017Z", - "shell.execute_reply": "2024-09-23T08:47:48.147363Z" + "iopub.execute_input": "2024-09-23T14:54:50.013512Z", + "iopub.status.busy": "2024-09-23T14:54:50.013205Z", + "iopub.status.idle": "2024-09-23T14:54:50.016690Z", + "shell.execute_reply": "2024-09-23T14:54:50.016159Z" } }, "outputs": [ @@ -297,10 +297,10 @@ "execution_count": 8, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.150053Z", - "iopub.status.busy": "2024-09-23T08:47:48.149703Z", - "iopub.status.idle": "2024-09-23T08:47:48.156836Z", - "shell.execute_reply": "2024-09-23T08:47:48.156202Z" + "iopub.execute_input": "2024-09-23T14:54:50.018609Z", + "iopub.status.busy": "2024-09-23T14:54:50.018273Z", + "iopub.status.idle": "2024-09-23T14:54:50.025454Z", + "shell.execute_reply": "2024-09-23T14:54:50.024785Z" } }, "outputs": [ @@ -365,10 +365,10 @@ "execution_count": 9, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.158925Z", - "iopub.status.busy": "2024-09-23T08:47:48.158486Z", - "iopub.status.idle": "2024-09-23T08:47:48.162687Z", - "shell.execute_reply": "2024-09-23T08:47:48.162029Z" + "iopub.execute_input": "2024-09-23T14:54:50.027575Z", + "iopub.status.busy": "2024-09-23T14:54:50.027207Z", + "iopub.status.idle": "2024-09-23T14:54:50.031352Z", + "shell.execute_reply": "2024-09-23T14:54:50.030818Z" } }, "outputs": [ @@ -416,10 +416,10 @@ "execution_count": 10, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.164729Z", - "iopub.status.busy": "2024-09-23T08:47:48.164389Z", - "iopub.status.idle": "2024-09-23T08:47:48.167692Z", - "shell.execute_reply": "2024-09-23T08:47:48.167050Z" + "iopub.execute_input": "2024-09-23T14:54:50.033207Z", + "iopub.status.busy": "2024-09-23T14:54:50.033025Z", + "iopub.status.idle": "2024-09-23T14:54:50.036148Z", + "shell.execute_reply": "2024-09-23T14:54:50.035595Z" } }, "outputs": [], @@ -441,10 +441,10 @@ "execution_count": 11, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.169800Z", - "iopub.status.busy": "2024-09-23T08:47:48.169428Z", - "iopub.status.idle": "2024-09-23T08:47:48.172957Z", - "shell.execute_reply": "2024-09-23T08:47:48.172422Z" + "iopub.execute_input": "2024-09-23T14:54:50.038089Z", + "iopub.status.busy": "2024-09-23T14:54:50.037889Z", + "iopub.status.idle": "2024-09-23T14:54:50.041476Z", + "shell.execute_reply": "2024-09-23T14:54:50.040875Z" } }, "outputs": [ @@ -510,10 +510,10 @@ "execution_count": 12, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.174879Z", - "iopub.status.busy": "2024-09-23T08:47:48.174570Z", - "iopub.status.idle": "2024-09-23T08:47:48.177732Z", - "shell.execute_reply": "2024-09-23T08:47:48.177185Z" + "iopub.execute_input": "2024-09-23T14:54:50.043248Z", + "iopub.status.busy": "2024-09-23T14:54:50.043052Z", + "iopub.status.idle": "2024-09-23T14:54:50.046205Z", + "shell.execute_reply": "2024-09-23T14:54:50.045650Z" } }, "outputs": [ @@ -548,10 +548,10 @@ "execution_count": 13, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.179698Z", - "iopub.status.busy": "2024-09-23T08:47:48.179402Z", - "iopub.status.idle": "2024-09-23T08:47:48.182754Z", - "shell.execute_reply": "2024-09-23T08:47:48.182189Z" + "iopub.execute_input": "2024-09-23T14:54:50.048072Z", + "iopub.status.busy": "2024-09-23T14:54:50.047875Z", + "iopub.status.idle": "2024-09-23T14:54:50.051230Z", + "shell.execute_reply": "2024-09-23T14:54:50.050698Z" } }, "outputs": [ @@ -584,10 +584,10 @@ "execution_count": 14, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.184773Z", - "iopub.status.busy": "2024-09-23T08:47:48.184386Z", - "iopub.status.idle": "2024-09-23T08:47:48.188131Z", - "shell.execute_reply": "2024-09-23T08:47:48.187583Z" + "iopub.execute_input": "2024-09-23T14:54:50.053148Z", + "iopub.status.busy": "2024-09-23T14:54:50.052923Z", + "iopub.status.idle": "2024-09-23T14:54:50.056343Z", + "shell.execute_reply": "2024-09-23T14:54:50.055805Z" } }, "outputs": [ @@ -622,10 +622,10 @@ "execution_count": 15, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.190002Z", - "iopub.status.busy": "2024-09-23T08:47:48.189702Z", - "iopub.status.idle": "2024-09-23T08:47:48.193867Z", - "shell.execute_reply": "2024-09-23T08:47:48.193328Z" + "iopub.execute_input": "2024-09-23T14:54:50.058214Z", + "iopub.status.busy": "2024-09-23T14:54:50.058013Z", + "iopub.status.idle": "2024-09-23T14:54:50.061864Z", + "shell.execute_reply": "2024-09-23T14:54:50.061311Z" } }, "outputs": [ @@ -669,10 +669,10 @@ "execution_count": 16, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.195898Z", - "iopub.status.busy": "2024-09-23T08:47:48.195519Z", - "iopub.status.idle": "2024-09-23T08:47:48.200935Z", - "shell.execute_reply": "2024-09-23T08:47:48.200382Z" + "iopub.execute_input": "2024-09-23T14:54:50.063752Z", + "iopub.status.busy": "2024-09-23T14:54:50.063555Z", + "iopub.status.idle": "2024-09-23T14:54:50.069086Z", + "shell.execute_reply": "2024-09-23T14:54:50.068491Z" } }, "outputs": [ @@ -733,10 +733,10 @@ "execution_count": 17, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.202693Z", - "iopub.status.busy": "2024-09-23T08:47:48.202515Z", - "iopub.status.idle": "2024-09-23T08:47:48.206198Z", - "shell.execute_reply": "2024-09-23T08:47:48.205645Z" + "iopub.execute_input": "2024-09-23T14:54:50.070878Z", + "iopub.status.busy": "2024-09-23T14:54:50.070684Z", + "iopub.status.idle": "2024-09-23T14:54:50.074190Z", + "shell.execute_reply": "2024-09-23T14:54:50.073666Z" } }, "outputs": [], @@ -761,10 +761,10 @@ "execution_count": 18, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.208208Z", - "iopub.status.busy": "2024-09-23T08:47:48.207830Z", - "iopub.status.idle": "2024-09-23T08:47:48.217883Z", - "shell.execute_reply": "2024-09-23T08:47:48.217336Z" + "iopub.execute_input": "2024-09-23T14:54:50.076138Z", + "iopub.status.busy": "2024-09-23T14:54:50.075756Z", + "iopub.status.idle": "2024-09-23T14:54:50.085734Z", + "shell.execute_reply": "2024-09-23T14:54:50.085181Z" } }, "outputs": [], @@ -857,10 +857,10 @@ "execution_count": 19, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.219758Z", - "iopub.status.busy": "2024-09-23T08:47:48.219377Z", - "iopub.status.idle": "2024-09-23T08:47:48.223036Z", - "shell.execute_reply": "2024-09-23T08:47:48.222461Z" + "iopub.execute_input": "2024-09-23T14:54:50.087687Z", + "iopub.status.busy": "2024-09-23T14:54:50.087306Z", + "iopub.status.idle": "2024-09-23T14:54:50.091014Z", + "shell.execute_reply": "2024-09-23T14:54:50.090461Z" } }, "outputs": [ @@ -897,10 +897,10 @@ "execution_count": 20, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:48.224980Z", - "iopub.status.busy": "2024-09-23T08:47:48.224602Z", - "iopub.status.idle": "2024-09-23T08:47:48.229813Z", - "shell.execute_reply": "2024-09-23T08:47:48.229252Z" + "iopub.execute_input": "2024-09-23T14:54:50.092953Z", + "iopub.status.busy": "2024-09-23T14:54:50.092549Z", + "iopub.status.idle": "2024-09-23T14:54:50.098222Z", + "shell.execute_reply": "2024-09-23T14:54:50.097670Z" } }, "outputs": [ diff --git a/notebooks/nmodl-sympy-conductance.ipynb b/notebooks/nmodl-sympy-conductance.ipynb index 019efb555..9a8bc5ce7 100644 --- a/notebooks/nmodl-sympy-conductance.ipynb +++ b/notebooks/nmodl-sympy-conductance.ipynb @@ -86,10 +86,10 @@ "execution_count": 1, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:50.494838Z", - "iopub.status.busy": "2024-09-23T08:47:50.494277Z", - "iopub.status.idle": "2024-09-23T08:47:51.310513Z", - "shell.execute_reply": "2024-09-23T08:47:51.309743Z" + "iopub.execute_input": "2024-09-23T14:54:52.416252Z", + "iopub.status.busy": "2024-09-23T14:54:52.415789Z", + "iopub.status.idle": "2024-09-23T14:54:53.264476Z", + "shell.execute_reply": "2024-09-23T14:54:53.263713Z" } }, "outputs": [], @@ -103,10 +103,10 @@ "execution_count": 2, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.312983Z", - "iopub.status.busy": "2024-09-23T08:47:51.312777Z", - "iopub.status.idle": "2024-09-23T08:47:51.342840Z", - "shell.execute_reply": "2024-09-23T08:47:51.342176Z" + "iopub.execute_input": "2024-09-23T14:54:53.267243Z", + "iopub.status.busy": "2024-09-23T14:54:53.266800Z", + "iopub.status.idle": "2024-09-23T14:54:53.297909Z", + "shell.execute_reply": "2024-09-23T14:54:53.297240Z" } }, "outputs": [], @@ -149,10 +149,10 @@ "execution_count": 3, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.344933Z", - "iopub.status.busy": "2024-09-23T08:47:51.344728Z", - "iopub.status.idle": "2024-09-23T08:47:51.615509Z", - "shell.execute_reply": "2024-09-23T08:47:51.614679Z" + "iopub.execute_input": "2024-09-23T14:54:53.300477Z", + "iopub.status.busy": "2024-09-23T14:54:53.300060Z", + "iopub.status.idle": "2024-09-23T14:54:53.580206Z", + "shell.execute_reply": "2024-09-23T14:54:53.579466Z" } }, "outputs": [ @@ -196,10 +196,10 @@ "execution_count": 4, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.617708Z", - "iopub.status.busy": "2024-09-23T08:47:51.617267Z", - "iopub.status.idle": "2024-09-23T08:47:51.749743Z", - "shell.execute_reply": "2024-09-23T08:47:51.749036Z" + "iopub.execute_input": "2024-09-23T14:54:53.582371Z", + "iopub.status.busy": "2024-09-23T14:54:53.582004Z", + "iopub.status.idle": "2024-09-23T14:54:53.713652Z", + "shell.execute_reply": "2024-09-23T14:54:53.713015Z" } }, "outputs": [ @@ -243,10 +243,10 @@ "execution_count": 5, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.751990Z", - "iopub.status.busy": "2024-09-23T08:47:51.751766Z", - "iopub.status.idle": "2024-09-23T08:47:51.759870Z", - "shell.execute_reply": "2024-09-23T08:47:51.759332Z" + "iopub.execute_input": "2024-09-23T14:54:53.715936Z", + "iopub.status.busy": "2024-09-23T14:54:53.715573Z", + "iopub.status.idle": "2024-09-23T14:54:53.723829Z", + "shell.execute_reply": "2024-09-23T14:54:53.723259Z" } }, "outputs": [ @@ -290,10 +290,10 @@ "execution_count": 6, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.761962Z", - "iopub.status.busy": "2024-09-23T08:47:51.761560Z", - "iopub.status.idle": "2024-09-23T08:47:51.779897Z", - "shell.execute_reply": "2024-09-23T08:47:51.779303Z" + "iopub.execute_input": "2024-09-23T14:54:53.726081Z", + "iopub.status.busy": "2024-09-23T14:54:53.725699Z", + "iopub.status.idle": "2024-09-23T14:54:53.743660Z", + "shell.execute_reply": "2024-09-23T14:54:53.743079Z" } }, "outputs": [ @@ -337,10 +337,10 @@ "execution_count": 7, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.782100Z", - "iopub.status.busy": "2024-09-23T08:47:51.781696Z", - "iopub.status.idle": "2024-09-23T08:47:51.899312Z", - "shell.execute_reply": "2024-09-23T08:47:51.898649Z" + "iopub.execute_input": "2024-09-23T14:54:53.745793Z", + "iopub.status.busy": "2024-09-23T14:54:53.745403Z", + "iopub.status.idle": "2024-09-23T14:54:53.865191Z", + "shell.execute_reply": "2024-09-23T14:54:53.864483Z" } }, "outputs": [ @@ -400,10 +400,10 @@ "execution_count": 8, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.901588Z", - "iopub.status.busy": "2024-09-23T08:47:51.901171Z", - "iopub.status.idle": "2024-09-23T08:47:51.995784Z", - "shell.execute_reply": "2024-09-23T08:47:51.995148Z" + "iopub.execute_input": "2024-09-23T14:54:53.867667Z", + "iopub.status.busy": "2024-09-23T14:54:53.867191Z", + "iopub.status.idle": "2024-09-23T14:54:53.962256Z", + "shell.execute_reply": "2024-09-23T14:54:53.961514Z" } }, "outputs": [ @@ -456,10 +456,10 @@ "execution_count": 9, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:51.997988Z", - "iopub.status.busy": "2024-09-23T08:47:51.997574Z", - "iopub.status.idle": "2024-09-23T08:47:52.070904Z", - "shell.execute_reply": "2024-09-23T08:47:52.070279Z" + "iopub.execute_input": "2024-09-23T14:54:53.964637Z", + "iopub.status.busy": "2024-09-23T14:54:53.964241Z", + "iopub.status.idle": "2024-09-23T14:54:54.038495Z", + "shell.execute_reply": "2024-09-23T14:54:54.037820Z" } }, "outputs": [ diff --git a/notebooks/nmodl-sympy-solver-cnexp.ipynb b/notebooks/nmodl-sympy-solver-cnexp.ipynb index f5fc8af89..e6d4b99c4 100644 --- a/notebooks/nmodl-sympy-solver-cnexp.ipynb +++ b/notebooks/nmodl-sympy-solver-cnexp.ipynb @@ -62,10 +62,10 @@ "execution_count": 1, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:53.935653Z", - "iopub.status.busy": "2024-09-23T08:47:53.935463Z", - "iopub.status.idle": "2024-09-23T08:47:54.767490Z", - "shell.execute_reply": "2024-09-23T08:47:54.766739Z" + "iopub.execute_input": "2024-09-23T14:54:56.004657Z", + "iopub.status.busy": "2024-09-23T14:54:56.004249Z", + "iopub.status.idle": "2024-09-23T14:54:56.818421Z", + "shell.execute_reply": "2024-09-23T14:54:56.817601Z" } }, "outputs": [], @@ -79,10 +79,10 @@ "execution_count": 2, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:54.770389Z", - "iopub.status.busy": "2024-09-23T08:47:54.769805Z", - "iopub.status.idle": "2024-09-23T08:47:54.801488Z", - "shell.execute_reply": "2024-09-23T08:47:54.800815Z" + "iopub.execute_input": "2024-09-23T14:54:56.820866Z", + "iopub.status.busy": "2024-09-23T14:54:56.820634Z", + "iopub.status.idle": "2024-09-23T14:54:56.852757Z", + "shell.execute_reply": "2024-09-23T14:54:56.852057Z" } }, "outputs": [], @@ -123,10 +123,10 @@ "execution_count": 3, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:54.804230Z", - "iopub.status.busy": "2024-09-23T08:47:54.803784Z", - "iopub.status.idle": "2024-09-23T08:47:55.220925Z", - "shell.execute_reply": "2024-09-23T08:47:55.220181Z" + "iopub.execute_input": "2024-09-23T14:54:56.855296Z", + "iopub.status.busy": "2024-09-23T14:54:56.855008Z", + "iopub.status.idle": "2024-09-23T14:54:57.299613Z", + "shell.execute_reply": "2024-09-23T14:54:57.298907Z" } }, "outputs": [ @@ -165,10 +165,10 @@ "execution_count": 4, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:55.223287Z", - "iopub.status.busy": "2024-09-23T08:47:55.222843Z", - "iopub.status.idle": "2024-09-23T08:47:55.334385Z", - "shell.execute_reply": "2024-09-23T08:47:55.333768Z" + "iopub.execute_input": "2024-09-23T14:54:57.301976Z", + "iopub.status.busy": "2024-09-23T14:54:57.301557Z", + "iopub.status.idle": "2024-09-23T14:54:57.416310Z", + "shell.execute_reply": "2024-09-23T14:54:57.415611Z" } }, "outputs": [ @@ -207,10 +207,10 @@ "execution_count": 5, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:55.336474Z", - "iopub.status.busy": "2024-09-23T08:47:55.336272Z", - "iopub.status.idle": "2024-09-23T08:47:55.535102Z", - "shell.execute_reply": "2024-09-23T08:47:55.534460Z" + "iopub.execute_input": "2024-09-23T14:54:57.418689Z", + "iopub.status.busy": "2024-09-23T14:54:57.418166Z", + "iopub.status.idle": "2024-09-23T14:54:57.624046Z", + "shell.execute_reply": "2024-09-23T14:54:57.623328Z" } }, "outputs": [ @@ -255,10 +255,10 @@ "execution_count": 6, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:55.537255Z", - "iopub.status.busy": "2024-09-23T08:47:55.537046Z", - "iopub.status.idle": "2024-09-23T08:47:55.693944Z", - "shell.execute_reply": "2024-09-23T08:47:55.693318Z" + "iopub.execute_input": "2024-09-23T14:54:57.626570Z", + "iopub.status.busy": "2024-09-23T14:54:57.626205Z", + "iopub.status.idle": "2024-09-23T14:54:57.790198Z", + "shell.execute_reply": "2024-09-23T14:54:57.789463Z" } }, "outputs": [ @@ -303,10 +303,10 @@ "execution_count": 7, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:55.696269Z", - "iopub.status.busy": "2024-09-23T08:47:55.695869Z", - "iopub.status.idle": "2024-09-23T08:47:57.449017Z", - "shell.execute_reply": "2024-09-23T08:47:57.448363Z" + "iopub.execute_input": "2024-09-23T14:54:57.792215Z", + "iopub.status.busy": "2024-09-23T14:54:57.792006Z", + "iopub.status.idle": "2024-09-23T14:54:59.652815Z", + "shell.execute_reply": "2024-09-23T14:54:59.652055Z" } }, "outputs": [ @@ -351,10 +351,10 @@ "execution_count": 8, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:47:57.451244Z", - "iopub.status.busy": "2024-09-23T08:47:57.450829Z", - "iopub.status.idle": "2024-09-23T08:47:59.713483Z", - "shell.execute_reply": "2024-09-23T08:47:59.712609Z" + "iopub.execute_input": "2024-09-23T14:54:59.655221Z", + "iopub.status.busy": "2024-09-23T14:54:59.655000Z", + "iopub.status.idle": "2024-09-23T14:55:01.941651Z", + "shell.execute_reply": "2024-09-23T14:55:01.940888Z" } }, "outputs": [ diff --git a/notebooks/nmodl-sympy-solver-derivimplicit.ipynb b/notebooks/nmodl-sympy-solver-derivimplicit.ipynb index 5ab7b0bd2..d76c3327f 100644 --- a/notebooks/nmodl-sympy-solver-derivimplicit.ipynb +++ b/notebooks/nmodl-sympy-solver-derivimplicit.ipynb @@ -39,10 +39,10 @@ "execution_count": 1, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:01.304819Z", - "iopub.status.busy": "2024-09-23T08:48:01.304623Z", - "iopub.status.idle": "2024-09-23T08:48:02.095402Z", - "shell.execute_reply": "2024-09-23T08:48:02.094589Z" + "iopub.execute_input": "2024-09-23T14:55:03.649752Z", + "iopub.status.busy": "2024-09-23T14:55:03.649067Z", + "iopub.status.idle": "2024-09-23T14:55:04.465369Z", + "shell.execute_reply": "2024-09-23T14:55:04.464537Z" } }, "outputs": [], @@ -56,10 +56,10 @@ "execution_count": 2, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:02.097800Z", - "iopub.status.busy": "2024-09-23T08:48:02.097586Z", - "iopub.status.idle": "2024-09-23T08:48:02.128563Z", - "shell.execute_reply": "2024-09-23T08:48:02.127779Z" + "iopub.execute_input": "2024-09-23T14:55:04.468008Z", + "iopub.status.busy": "2024-09-23T14:55:04.467574Z", + "iopub.status.idle": "2024-09-23T14:55:04.499399Z", + "shell.execute_reply": "2024-09-23T14:55:04.498533Z" } }, "outputs": [], @@ -100,10 +100,10 @@ "execution_count": 3, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:02.131268Z", - "iopub.status.busy": "2024-09-23T08:48:02.130867Z", - "iopub.status.idle": "2024-09-23T08:48:02.626362Z", - "shell.execute_reply": "2024-09-23T08:48:02.625667Z" + "iopub.execute_input": "2024-09-23T14:55:04.502392Z", + "iopub.status.busy": "2024-09-23T14:55:04.501931Z", + "iopub.status.idle": "2024-09-23T14:55:05.003552Z", + "shell.execute_reply": "2024-09-23T14:55:05.002806Z" } }, "outputs": [ @@ -165,10 +165,10 @@ "execution_count": 4, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:02.628519Z", - "iopub.status.busy": "2024-09-23T08:48:02.628274Z", - "iopub.status.idle": "2024-09-23T08:48:02.925526Z", - "shell.execute_reply": "2024-09-23T08:48:02.924883Z" + "iopub.execute_input": "2024-09-23T14:55:05.006257Z", + "iopub.status.busy": "2024-09-23T14:55:05.005798Z", + "iopub.status.idle": "2024-09-23T14:55:05.295366Z", + "shell.execute_reply": "2024-09-23T14:55:05.294706Z" } }, "outputs": [ diff --git a/notebooks/nmodl-sympy-solver-sparse.ipynb b/notebooks/nmodl-sympy-solver-sparse.ipynb index 8a08badac..8fb519395 100644 --- a/notebooks/nmodl-sympy-solver-sparse.ipynb +++ b/notebooks/nmodl-sympy-solver-sparse.ipynb @@ -39,10 +39,10 @@ "execution_count": 1, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:04.266731Z", - "iopub.status.busy": "2024-09-23T08:48:04.266535Z", - "iopub.status.idle": "2024-09-23T08:48:05.074066Z", - "shell.execute_reply": "2024-09-23T08:48:05.073268Z" + "iopub.execute_input": "2024-09-23T14:55:06.664522Z", + "iopub.status.busy": "2024-09-23T14:55:06.664318Z", + "iopub.status.idle": "2024-09-23T14:55:07.495171Z", + "shell.execute_reply": "2024-09-23T14:55:07.494256Z" } }, "outputs": [], @@ -56,10 +56,10 @@ "execution_count": 2, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:05.076748Z", - "iopub.status.busy": "2024-09-23T08:48:05.076318Z", - "iopub.status.idle": "2024-09-23T08:48:05.107088Z", - "shell.execute_reply": "2024-09-23T08:48:05.106453Z" + "iopub.execute_input": "2024-09-23T14:55:07.497657Z", + "iopub.status.busy": "2024-09-23T14:55:07.497431Z", + "iopub.status.idle": "2024-09-23T14:55:07.529391Z", + "shell.execute_reply": "2024-09-23T14:55:07.528689Z" } }, "outputs": [], @@ -100,10 +100,10 @@ "execution_count": 3, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:05.109372Z", - "iopub.status.busy": "2024-09-23T08:48:05.109036Z", - "iopub.status.idle": "2024-09-23T08:48:05.603395Z", - "shell.execute_reply": "2024-09-23T08:48:05.602753Z" + "iopub.execute_input": "2024-09-23T14:55:07.531758Z", + "iopub.status.busy": "2024-09-23T14:55:07.531489Z", + "iopub.status.idle": "2024-09-23T14:55:08.030408Z", + "shell.execute_reply": "2024-09-23T14:55:08.029729Z" } }, "outputs": [ @@ -165,10 +165,10 @@ "execution_count": 4, "metadata": { "execution": { - "iopub.execute_input": "2024-09-23T08:48:05.605758Z", - "iopub.status.busy": "2024-09-23T08:48:05.605274Z", - "iopub.status.idle": "2024-09-23T08:48:05.899859Z", - "shell.execute_reply": "2024-09-23T08:48:05.899123Z" + "iopub.execute_input": "2024-09-23T14:55:08.032822Z", + "iopub.status.busy": "2024-09-23T14:55:08.032311Z", + "iopub.status.idle": "2024-09-23T14:55:08.333627Z", + "shell.execute_reply": "2024-09-23T14:55:08.332912Z" } }, "outputs": [

void nmodl::codegen::CodegenNeuronCppVisitor::print_check_table_function_prototypes void nmodl::codegen::CodegenNeuronCppVisitor::print_check_table_entrypoint ( )